diff --git a/build/common_virtualenv_packages.txt b/build/common_virtualenv_packages.txt index 80177f9aeb00..6401d3311f07 100644 --- a/build/common_virtualenv_packages.txt +++ b/build/common_virtualenv_packages.txt @@ -54,9 +54,7 @@ pth:testing/web-platform/tests/tools/third_party/webencodings pth:testing/web-platform/tests/tools/wptserve pth:testing/web-platform/tests/tools/wptrunner pth:testing/xpcshell -pth:third_party/python/aiohttp pth:third_party/python/appdirs -pth:third_party/python/async_timeout pth:third_party/python/atomicwrites pth:third_party/python/attrs pth:third_party/python/blessings @@ -76,7 +74,6 @@ pth:third_party/python/fluent.syntax pth:third_party/python/funcsigs pth:third_party/python/gyp/pylib pth:third_party/python/idna -pth:third_party/python/idna-ssl pth:third_party/python/importlib_metadata pth:third_party/python/iso8601 pth:third_party/python/Jinja2 @@ -87,17 +84,15 @@ pth:third_party/python/MarkupSafe/src pth:third_party/python/mohawk pth:third_party/python/more_itertools pth:third_party/python/mozilla_version -pth:third_party/python/multidict -pth:third_party/python/packaging pth:third_party/python/pathspec +pth:third_party/python/pep487/lib pth:third_party/python/pip_tools pth:third_party/python/pluggy pth:third_party/python/ply pth:third_party/python/py pth:third_party/python/pyasn1 -pth:third_party/python/pyasn1_modules +pth:third_party/python/pyasn1-modules pth:third_party/python/pylru -pth:third_party/python/pyparsing pth:third_party/python/pyrsistent pth:third_party/python/pystache pth:third_party/python/pytest @@ -106,19 +101,18 @@ pth:third_party/python/pytoml pth:third_party/python/PyYAML/lib3/ pth:third_party/python/redo pth:third_party/python/requests -pth:third_party/python/requests_unixsocket +pth:third_party/python/requests-unixsocket pth:third_party/python/responses pth:third_party/python/rsa pth:third_party/python/sentry_sdk pth:third_party/python/six pth:third_party/python/slugid pth:third_party/python/taskcluster -pth:third_party/python/taskcluster_urls +pth:third_party/python/taskcluster-urls pth:third_party/python/typing_extensions pth:third_party/python/urllib3 pth:third_party/python/voluptuous pth:third_party/python/yamllint -pth:third_party/python/yarl pth:third_party/python/zipp pth:toolkit/components/telemetry/tests/marionette/harness pth:tools diff --git a/python/mozbuild/mozbuild/vendor/vendor_python.py b/python/mozbuild/mozbuild/vendor/vendor_python.py index b2274603e703..f730baf34948 100644 --- a/python/mozbuild/mozbuild/vendor/vendor_python.py +++ b/python/mozbuild/mozbuild/vendor/vendor_python.py @@ -7,7 +7,6 @@ from __future__ import absolute_import, print_function, unicode_literals import os import shutil import subprocess -from pathlib import Path import mozfile import mozpack.path as mozpath @@ -32,11 +31,6 @@ class VendorPython(MozbuildObject): shutil.copyfile(spec, tmpspec_absolute) self._update_packages(tmpspec_absolute) - tmp_requirements_absolute = os.path.join(spec_dir, "requirements.txt") - # Copy the existing "requirements.txt" file so that the versions - # of transitive dependencies aren't implicitly changed. - shutil.copy(requirements, tmp_requirements_absolute) - # resolve the dependencies and update requirements.txt subprocess.check_output( [ @@ -48,7 +42,7 @@ class VendorPython(MozbuildObject): "--no-header", "--no-index", "--output-file", - tmp_requirements_absolute, + requirements, "--generate-hashes", ], # Run pip-compile from within the temporary directory so that the "via" @@ -62,7 +56,7 @@ class VendorPython(MozbuildObject): [ "download", "-r", - tmp_requirements_absolute, + requirements, "--no-deps", "--dest", tmp, @@ -72,11 +66,9 @@ class VendorPython(MozbuildObject): "any", ] ) - _purge_vendor_dir(vendor_dir) self._extract(tmp, vendor_dir, keep_extra_files) shutil.copyfile(tmpspec_absolute, spec) - shutil.copy(tmp_requirements_absolute, requirements) self.repository.add_remove_files(vendor_dir) def _update_packages(self, spec): @@ -103,7 +95,7 @@ class VendorPython(MozbuildObject): ignore = () if not keep_extra_files: - ignore = ("*/doc", "*/docs", "*/test", "*/tests", "**/.git") + ignore = ("*/doc", "*/docs", "*/test", "*/tests") finder = FileFinder(src) for archive, _ in finder.find("*"): _, ext = os.path.splitext(archive) @@ -115,6 +107,7 @@ class VendorPython(MozbuildObject): "-", 4 ) target_package_dir = os.path.join(dest, package_name) + mozfile.remove(target_package_dir) os.mkdir(target_package_dir) # Extract all the contents of the wheel into the package subdirectory. @@ -128,6 +121,7 @@ class VendorPython(MozbuildObject): # specifier. package_name, archive_postfix = archive.rsplit("-", 1) package_dir = os.path.join(dest, package_name) + mozfile.remove(package_dir) # The archive should only contain one top-level directory, which has # the source files. We extract this directory directly to @@ -142,25 +136,6 @@ class VendorPython(MozbuildObject): _denormalize_symlinks(package_dir) -def _purge_vendor_dir(vendor_dir): - excluded_packages = [ - # dlmanager's package on PyPI only has metadata, but is missing the code. - # https://github.com/parkouss/dlmanager/issues/1 - "dlmanager", - # gyp's package on PyPI doesn't have any downloadable files. - "gyp", - # We manage installing "virtualenv" package manually, and we have a custom - # "virtualenv.py" entry module. - "virtualenv", - # The moz.build file isn't a vendored module, so don't delete it. - "moz.build", - ] - - for child in Path(vendor_dir).iterdir(): - if child.name not in excluded_packages: - mozfile.remove(str(child)) - - def _denormalize_symlinks(target): # If any files inside the vendored package were symlinks, turn them into normal files # because hg.mozilla.org forbids symlinks in the repository. diff --git a/third_party/python/aiohttp/CHANGES.rst b/third_party/python/aiohttp/CHANGES.rst deleted file mode 100644 index f064f4895ce2..000000000000 --- a/third_party/python/aiohttp/CHANGES.rst +++ /dev/null @@ -1,728 +0,0 @@ -========= -Changelog -========= - -.. - You should *NOT* be adding new change log entries to this file, this - file is managed by towncrier. You *may* edit previous change logs to - fix problems like typo corrections or such. - To add a new change log entry, please see - https://pip.pypa.io/en/latest/development/#adding-a-news-entry - we named the news folder "changes". - - WARNING: Don't drop the next directive! - -.. towncrier release notes start - -3.7.4.post0 (2021-03-06) -======================== - -Misc ----- - -- Bumped upper bound of the ``chardet`` runtime dependency - to allow their v4.0 version stream. - `#5366 `_ - - ----- - - -3.7.4 (2021-02-25) -================== - -Bugfixes --------- - -- **(SECURITY BUG)** Started preventing open redirects in the - ``aiohttp.web.normalize_path_middleware`` middleware. For - more details, see - https://github.com/aio-libs/aiohttp/security/advisories/GHSA-v6wp-4m6f-gcjg. - - Thanks to `Beast Glatisant `__ for - finding the first instance of this issue and `Jelmer Vernooij - `__ for reporting and tracking it down - in aiohttp. - `#5497 `_ -- Fix interpretation difference of the pure-Python and the Cython-based - HTTP parsers construct a ``yarl.URL`` object for HTTP request-target. - - Before this fix, the Python parser would turn the URI's absolute-path - for ``//some-path`` into ``/`` while the Cython code preserved it as - ``//some-path``. Now, both do the latter. - `#5498 `_ - - ----- - - -3.7.3 (2020-11-18) -================== - -Features --------- - -- Use Brotli instead of brotlipy - `#3803 `_ -- Made exceptions pickleable. Also changed the repr of some exceptions. - `#4077 `_ - - -Bugfixes --------- - -- Raise a ClientResponseError instead of an AssertionError for a blank - HTTP Reason Phrase. - `#3532 `_ -- Fix ``web_middlewares.normalize_path_middleware`` behavior for patch without slash. - `#3669 `_ -- Fix overshadowing of overlapped sub-applications prefixes. - `#3701 `_ -- Make `BaseConnector.close()` a coroutine and wait until the client closes all connections. Drop deprecated "with Connector():" syntax. - `#3736 `_ -- Reset the ``sock_read`` timeout each time data is received for a ``aiohttp.client`` response. - `#3808 `_ -- Fixed type annotation for add_view method of UrlDispatcher to accept any subclass of View - `#3880 `_ -- Fixed querying the address families from DNS that the current host supports. - `#5156 `_ -- Change return type of MultipartReader.__aiter__() and BodyPartReader.__aiter__() to AsyncIterator. - `#5163 `_ -- Provide x86 Windows wheels. - `#5230 `_ - - -Improved Documentation ----------------------- - -- Add documentation for ``aiohttp.web.FileResponse``. - `#3958 `_ -- Removed deprecation warning in tracing example docs - `#3964 `_ -- Fixed wrong "Usage" docstring of ``aiohttp.client.request``. - `#4603 `_ -- Add aiohttp-pydantic to third party libraries - `#5228 `_ - - -Misc ----- - -- `#4102 `_ - - ----- - - -3.7.2 (2020-10-27) -================== - -Bugfixes --------- - -- Fixed static files handling for loops without ``.sendfile()`` support - `#5149 `_ - - ----- - - -3.7.1 (2020-10-25) -================== - -Bugfixes --------- - -- Fixed a type error caused by the conditional import of `Protocol`. - `#5111 `_ -- Server doesn't send Content-Length for 1xx or 204 - `#4901 `_ -- Fix run_app typing - `#4957 `_ -- Always require ``typing_extensions`` library. - `#5107 `_ -- Fix a variable-shadowing bug causing `ThreadedResolver.resolve` to - return the resolved IP as the ``hostname`` in each record, which prevented - validation of HTTPS connections. - `#5110 `_ -- Added annotations to all public attributes. - `#5115 `_ -- Fix flaky test_when_timeout_smaller_second - `#5116 `_ -- Ensure sending a zero byte file does not throw an exception - `#5124 `_ -- Fix a bug in ``web.run_app()`` about Python version checking on Windows - `#5127 `_ - - ----- - - -3.7.0 (2020-10-24) -================== - -Features --------- - -- Response headers are now prepared prior to running ``on_response_prepare`` hooks, directly before headers are sent to the client. - `#1958 `_ -- Add a ``quote_cookie`` option to ``CookieJar``, a way to skip quotation wrapping of cookies containing special characters. - `#2571 `_ -- Call ``AccessLogger.log`` with the current exception available from ``sys.exc_info()``. - `#3557 `_ -- `web.UrlDispatcher.add_routes` and `web.Application.add_routes` return a list - of registered `AbstractRoute` instances. `AbstractRouteDef.register` (and all - subclasses) return a list of registered resources registered resource. - `#3866 `_ -- Added properties of default ClientSession params to ClientSession class so it is available for introspection - `#3882 `_ -- Don't cancel web handler on peer disconnection, raise `OSError` on reading/writing instead. - `#4080 `_ -- Implement BaseRequest.get_extra_info() to access a protocol transports' extra info. - `#4189 `_ -- Added `ClientSession.timeout` property. - `#4191 `_ -- allow use of SameSite in cookies. - `#4224 `_ -- Use ``loop.sendfile()`` instead of custom implementation if available. - `#4269 `_ -- Apply SO_REUSEADDR to test server's socket. - `#4393 `_ -- Use .raw_host instead of slower .host in client API - `#4402 `_ -- Allow configuring the buffer size of input stream by passing ``read_bufsize`` argument. - `#4453 `_ -- Pass tests on Python 3.8 for Windows. - `#4513 `_ -- Add `method` and `url` attributes to `TraceRequestChunkSentParams` and `TraceResponseChunkReceivedParams`. - `#4674 `_ -- Add ClientResponse.ok property for checking status code under 400. - `#4711 `_ -- Don't ceil timeouts that are smaller than 5 seconds. - `#4850 `_ -- TCPSite now listens by default on all interfaces instead of just IPv4 when `None` is passed in as the host. - `#4894 `_ -- Bump ``http_parser`` to 2.9.4 - `#5070 `_ - - -Bugfixes --------- - -- Fix keepalive connections not being closed in time - `#3296 `_ -- Fix failed websocket handshake leaving connection hanging. - `#3380 `_ -- Fix tasks cancellation order on exit. The run_app task needs to be cancelled first for cleanup hooks to run with all tasks intact. - `#3805 `_ -- Don't start heartbeat until _writer is set - `#4062 `_ -- Fix handling of multipart file uploads without a content type. - `#4089 `_ -- Preserve view handler function attributes across middlewares - `#4174 `_ -- Fix the string representation of ``ServerDisconnectedError``. - `#4175 `_ -- Raising RuntimeError when trying to get encoding from not read body - `#4214 `_ -- Remove warning messages from noop. - `#4282 `_ -- Raise ClientPayloadError if FormData re-processed. - `#4345 `_ -- Fix a warning about unfinished task in ``web_protocol.py`` - `#4408 `_ -- Fixed 'deflate' compression. According to RFC 2616 now. - `#4506 `_ -- Fixed OverflowError on platforms with 32-bit time_t - `#4515 `_ -- Fixed request.body_exists returns wrong value for methods without body. - `#4528 `_ -- Fix connecting to link-local IPv6 addresses. - `#4554 `_ -- Fix a problem with connection waiters that are never awaited. - `#4562 `_ -- Always make sure transport is not closing before reuse a connection. - - Reuse a protocol based on keepalive in headers is unreliable. - For example, uWSGI will not support keepalive even it serves a - HTTP 1.1 request, except explicitly configure uWSGI with a - ``--http-keepalive`` option. - - Servers designed like uWSGI could cause aiohttp intermittently - raise a ConnectionResetException when the protocol poll runs - out and some protocol is reused. - `#4587 `_ -- Handle the last CRLF correctly even if it is received via separate TCP segment. - `#4630 `_ -- Fix the register_resource function to validate route name before splitting it so that route name can include python keywords. - `#4691 `_ -- Improve typing annotations for ``web.Request``, ``aiohttp.ClientResponse`` and - ``multipart`` module. - `#4736 `_ -- Fix resolver task is not awaited when connector is cancelled - `#4795 `_ -- Fix a bug "Aiohttp doesn't return any error on invalid request methods" - `#4798 `_ -- Fix HEAD requests for static content. - `#4809 `_ -- Fix incorrect size calculation for memoryview - `#4890 `_ -- Add HTTPMove to _all__. - `#4897 `_ -- Fixed the type annotations in the ``tracing`` module. - `#4912 `_ -- Fix typing for multipart ``__aiter__``. - `#4931 `_ -- Fix for race condition on connections in BaseConnector that leads to exceeding the connection limit. - `#4936 `_ -- Add forced UTF-8 encoding for ``application/rdap+json`` responses. - `#4938 `_ -- Fix inconsistency between Python and C http request parsers in parsing pct-encoded URL. - `#4972 `_ -- Fix connection closing issue in HEAD request. - `#5012 `_ -- Fix type hint on BaseRunner.addresses (from ``List[str]`` to ``List[Any]``) - `#5086 `_ -- Make `web.run_app()` more responsive to Ctrl+C on Windows for Python < 3.8. It slightly - increases CPU load as a side effect. - `#5098 `_ - - -Improved Documentation ----------------------- - -- Fix example code in client quick-start - `#3376 `_ -- Updated the docs so there is no contradiction in ``ttl_dns_cache`` default value - `#3512 `_ -- Add 'Deploy with SSL' to docs. - `#4201 `_ -- Change typing of the secure argument on StreamResponse.set_cookie from ``Optional[str]`` to ``Optional[bool]`` - `#4204 `_ -- Changes ``ttl_dns_cache`` type from int to Optional[int]. - `#4270 `_ -- Simplify README hello word example and add a documentation page for people coming from requests. - `#4272 `_ -- Improve some code examples in the documentation involving websockets and starting a simple HTTP site with an AppRunner. - `#4285 `_ -- Fix typo in code example in Multipart docs - `#4312 `_ -- Fix code example in Multipart section. - `#4314 `_ -- Update contributing guide so new contributors read the most recent version of that guide. Update command used to create test coverage reporting. - `#4810 `_ -- Spelling: Change "canonize" to "canonicalize". - `#4986 `_ -- Add ``aiohttp-sse-client`` library to third party usage list. - `#5084 `_ - - -Misc ----- - -- `#2856 `_, `#4218 `_, `#4250 `_ - - ----- - - -3.6.3 (2020-10-12) -================== - -Bugfixes --------- - -- Pin yarl to ``<1.6.0`` to avoid buggy behavior that will be fixed by the next aiohttp - release. - -3.6.2 (2019-10-09) -================== - -Features --------- - -- Made exceptions pickleable. Also changed the repr of some exceptions. - `#4077 `_ -- Use ``Iterable`` type hint instead of ``Sequence`` for ``Application`` *middleware* - parameter. `#4125 `_ - - -Bugfixes --------- - -- Reset the ``sock_read`` timeout each time data is received for a - ``aiohttp.ClientResponse``. `#3808 - `_ -- Fix handling of expired cookies so they are not stored in CookieJar. - `#4063 `_ -- Fix misleading message in the string representation of ``ClientConnectorError``; - ``self.ssl == None`` means default SSL context, not SSL disabled `#4097 - `_ -- Don't clobber HTTP status when using FileResponse. - `#4106 `_ - - -Improved Documentation ----------------------- - -- Added minimal required logging configuration to logging documentation. - `#2469 `_ -- Update docs to reflect proxy support. - `#4100 `_ -- Fix typo in code example in testing docs. - `#4108 `_ - - -Misc ----- - -- `#4102 `_ - - ----- - - -3.6.1 (2019-09-19) -================== - -Features --------- - -- Compatibility with Python 3.8. - `#4056 `_ - - -Bugfixes --------- - -- correct some exception string format - `#4068 `_ -- Emit a warning when ``ssl.OP_NO_COMPRESSION`` is - unavailable because the runtime is built against - an outdated OpenSSL. - `#4052 `_ -- Update multidict requirement to >= 4.5 - `#4057 `_ - - -Improved Documentation ----------------------- - -- Provide pytest-aiohttp namespace for pytest fixtures in docs. - `#3723 `_ - - ----- - - -3.6.0 (2019-09-06) -================== - -Features --------- - -- Add support for Named Pipes (Site and Connector) under Windows. This feature requires - Proactor event loop to work. `#3629 - `_ -- Removed ``Transfer-Encoding: chunked`` header from websocket responses to be - compatible with more http proxy servers. `#3798 - `_ -- Accept non-GET request for starting websocket handshake on server side. - `#3980 `_ - - -Bugfixes --------- - -- Raise a ClientResponseError instead of an AssertionError for a blank - HTTP Reason Phrase. - `#3532 `_ -- Fix an issue where cookies would sometimes not be set during a redirect. - `#3576 `_ -- Change normalize_path_middleware to use 308 redirect instead of 301. - - This behavior should prevent clients from being unable to use PUT/POST - methods on endpoints that are redirected because of a trailing slash. - `#3579 `_ -- Drop the processed task from ``all_tasks()`` list early. It prevents logging about a - task with unhandled exception when the server is used in conjunction with - ``asyncio.run()``. `#3587 `_ -- ``Signal`` type annotation changed from ``Signal[Callable[['TraceConfig'], - Awaitable[None]]]`` to ``Signal[Callable[ClientSession, SimpleNamespace, ...]``. - `#3595 `_ -- Use sanitized URL as Location header in redirects - `#3614 `_ -- Improve typing annotations for multipart.py along with changes required - by mypy in files that references multipart.py. - `#3621 `_ -- Close session created inside ``aiohttp.request`` when unhandled exception occurs - `#3628 `_ -- Cleanup per-chunk data in generic data read. Memory leak fixed. - `#3631 `_ -- Use correct type for add_view and family - `#3633 `_ -- Fix _keepalive field in __slots__ of ``RequestHandler``. - `#3644 `_ -- Properly handle ConnectionResetError, to silence the "Cannot write to closing - transport" exception when clients disconnect uncleanly. - `#3648 `_ -- Suppress pytest warnings due to ``test_utils`` classes - `#3660 `_ -- Fix overshadowing of overlapped sub-application prefixes. - `#3701 `_ -- Fixed return type annotation for WSMessage.json() - `#3720 `_ -- Properly expose TooManyRedirects publicly as documented. - `#3818 `_ -- Fix missing brackets for IPv6 in proxy CONNECT request - `#3841 `_ -- Make the signature of ``aiohttp.test_utils.TestClient.request`` match - ``asyncio.ClientSession.request`` according to the docs `#3852 - `_ -- Use correct style for re-exported imports, makes mypy ``--strict`` mode happy. - `#3868 `_ -- Fixed type annotation for add_view method of UrlDispatcher to accept any subclass of - View `#3880 `_ -- Made cython HTTP parser set Reason-Phrase of the response to an empty string if it is - missing. `#3906 `_ -- Add URL to the string representation of ClientResponseError. - `#3959 `_ -- Accept ``istr`` keys in ``LooseHeaders`` type hints. - `#3976 `_ -- Fixed race conditions in _resolve_host caching and throttling when tracing is enabled. - `#4013 `_ -- For URLs like "unix://localhost/..." set Host HTTP header to "localhost" instead of - "localhost:None". `#4039 `_ - - -Improved Documentation ----------------------- - -- Modify documentation for Background Tasks to remove deprecated usage of event loop. - `#3526 `_ -- use ``if __name__ == '__main__':`` in server examples. - `#3775 `_ -- Update documentation reference to the default access logger. - `#3783 `_ -- Improve documentation for ``web.BaseRequest.path`` and ``web.BaseRequest.raw_path``. - `#3791 `_ -- Removed deprecation warning in tracing example docs - `#3964 `_ - - ----- - - -3.5.4 (2019-01-12) -================== - -Bugfixes --------- - -- Fix stream ``.read()`` / ``.readany()`` / ``.iter_any()`` which used to return a - partial content only in case of compressed content - `#3525 `_ - - -3.5.3 (2019-01-10) -================== - -Bugfixes --------- - -- Fix type stubs for ``aiohttp.web.run_app(access_log=True)`` and fix edge case of - ``access_log=True`` and the event loop being in debug mode. `#3504 - `_ -- Fix ``aiohttp.ClientTimeout`` type annotations to accept ``None`` for fields - `#3511 `_ -- Send custom per-request cookies even if session jar is empty - `#3515 `_ -- Restore Linux binary wheels publishing on PyPI - ----- - - -3.5.2 (2019-01-08) -================== - -Features --------- - -- ``FileResponse`` from ``web_fileresponse.py`` uses a ``ThreadPoolExecutor`` to work - with files asynchronously. I/O based payloads from ``payload.py`` uses a - ``ThreadPoolExecutor`` to work with I/O objects asynchronously. `#3313 - `_ -- Internal Server Errors in plain text if the browser does not support HTML. - `#3483 `_ - - -Bugfixes --------- - -- Preserve MultipartWriter parts headers on write. Refactor the way how - ``Payload.headers`` are handled. Payload instances now always have headers and - Content-Type defined. Fix Payload Content-Disposition header reset after initial - creation. `#3035 `_ -- Log suppressed exceptions in ``GunicornWebWorker``. - `#3464 `_ -- Remove wildcard imports. - `#3468 `_ -- Use the same task for app initialization and web server handling in gunicorn workers. - It allows to use Python3.7 context vars smoothly. - `#3471 `_ -- Fix handling of chunked+gzipped response when first chunk does not give uncompressed - data `#3477 `_ -- Replace ``collections.MutableMapping`` with ``collections.abc.MutableMapping`` to - avoid a deprecation warning. `#3480 - `_ -- ``Payload.size`` type annotation changed from ``Optional[float]`` to - ``Optional[int]``. `#3484 `_ -- Ignore done tasks when cancels pending activities on ``web.run_app`` finalization. - `#3497 `_ - - -Improved Documentation ----------------------- - -- Add documentation for ``aiohttp.web.HTTPException``. - `#3490 `_ - - -Misc ----- - -- `#3487 `_ - - ----- - - -3.5.1 (2018-12-24) -==================== - -- Fix a regression about ``ClientSession._requote_redirect_url`` modification in debug - mode. - -3.5.0 (2018-12-22) -==================== - -Features --------- - -- The library type annotations are checked in strict mode now. -- Add support for setting cookies for individual request (`#2387 - `_) -- Application.add_domain implementation (`#2809 - `_) -- The default ``app`` in the request returned by ``test_utils.make_mocked_request`` can - now have objects assigned to it and retrieved using the ``[]`` operator. (`#3174 - `_) -- Make ``request.url`` accessible when transport is closed. (`#3177 - `_) -- Add ``zlib_executor_size`` argument to ``Response`` constructor to allow compression - to run in a background executor to avoid blocking the main thread and potentially - triggering health check failures. (`#3205 - `_) -- Enable users to set ``ClientTimeout`` in ``aiohttp.request`` (`#3213 - `_) -- Don't raise a warning if ``NETRC`` environment variable is not set and ``~/.netrc`` - file doesn't exist. (`#3267 `_) -- Add default logging handler to web.run_app If the ``Application.debug``` flag is set - and the default logger ``aiohttp.access`` is used, access logs will now be output - using a *stderr* ``StreamHandler`` if no handlers are attached. Furthermore, if the - default logger has no log level set, the log level will be set to ``DEBUG``. (`#3324 - `_) -- Add method argument to ``session.ws_connect()``. Sometimes server API requires a - different HTTP method for WebSocket connection establishment. For example, ``Docker - exec`` needs POST. (`#3378 `_) -- Create a task per request handling. (`#3406 - `_) - - -Bugfixes --------- - -- Enable passing ``access_log_class`` via ``handler_args`` (`#3158 - `_) -- Return empty bytes with end-of-chunk marker in empty stream reader. (`#3186 - `_) -- Accept ``CIMultiDictProxy`` instances for ``headers`` argument in ``web.Response`` - constructor. (`#3207 `_) -- Don't uppercase HTTP method in parser (`#3233 - `_) -- Make method match regexp RFC-7230 compliant (`#3235 - `_) -- Add ``app.pre_frozen`` state to properly handle startup signals in - sub-applications. (`#3237 `_) -- Enhanced parsing and validation of helpers.BasicAuth.decode. (`#3239 - `_) -- Change imports from collections module in preparation for 3.8. (`#3258 - `_) -- Ensure Host header is added first to ClientRequest to better replicate browser (`#3265 - `_) -- Fix forward compatibility with Python 3.8: importing ABCs directly from the - collections module will not be supported anymore. (`#3273 - `_) -- Keep the query string by ``normalize_path_middleware``. (`#3278 - `_) -- Fix missing parameter ``raise_for_status`` for aiohttp.request() (`#3290 - `_) -- Bracket IPv6 addresses in the HOST header (`#3304 - `_) -- Fix default message for server ping and pong frames. (`#3308 - `_) -- Fix tests/test_connector.py typo and tests/autobahn/server.py duplicate loop - def. (`#3337 `_) -- Fix false-negative indicator end_of_HTTP_chunk in StreamReader.readchunk function - (`#3361 `_) -- Release HTTP response before raising status exception (`#3364 - `_) -- Fix task cancellation when ``sendfile()`` syscall is used by static file - handling. (`#3383 `_) -- Fix stack trace for ``asyncio.TimeoutError`` which was not logged, when it is caught - in the handler. (`#3414 `_) - - -Improved Documentation ----------------------- - -- Improve documentation of ``Application.make_handler`` parameters. (`#3152 - `_) -- Fix BaseRequest.raw_headers doc. (`#3215 - `_) -- Fix typo in TypeError exception reason in ``web.Application._handle`` (`#3229 - `_) -- Make server access log format placeholder %b documentation reflect - behavior and docstring. (`#3307 `_) - - -Deprecations and Removals -------------------------- - -- Deprecate modification of ``session.requote_redirect_url`` (`#2278 - `_) -- Deprecate ``stream.unread_data()`` (`#3260 - `_) -- Deprecated use of boolean in ``resp.enable_compression()`` (`#3318 - `_) -- Encourage creation of aiohttp public objects inside a coroutine (`#3331 - `_) -- Drop dead ``Connection.detach()`` and ``Connection.writer``. Both methods were broken - for more than 2 years. (`#3358 `_) -- Deprecate ``app.loop``, ``request.loop``, ``client.loop`` and ``connector.loop`` - properties. (`#3374 `_) -- Deprecate explicit debug argument. Use asyncio debug mode instead. (`#3381 - `_) -- Deprecate body parameter in HTTPException (and derived classes) constructor. (`#3385 - `_) -- Deprecate bare connector close, use ``async with connector:`` and ``await - connector.close()`` instead. (`#3417 - `_) -- Deprecate obsolete ``read_timeout`` and ``conn_timeout`` in ``ClientSession`` - constructor. (`#3438 `_) - - -Misc ----- - -- #3341, #3351 diff --git a/third_party/python/aiohttp/CONTRIBUTORS.txt b/third_party/python/aiohttp/CONTRIBUTORS.txt deleted file mode 100644 index ad63ce9e4dec..000000000000 --- a/third_party/python/aiohttp/CONTRIBUTORS.txt +++ /dev/null @@ -1,312 +0,0 @@ -- Contributors - ----------------- -A. Jesse Jiryu Davis -Adam Bannister -Adam Cooper -Adam Mills -Adrian Krupa -Adrián Chaves -Alan Tse -Alec Hanefeld -Alejandro Gómez -Aleksandr Danshyn -Aleksey Kutepov -Alex Hayes -Alex Key -Alex Khomchenko -Alex Kuzmenko -Alex Lisovoy -Alexander Bayandin -Alexander Karpinsky -Alexander Koshevoy -Alexander Malev -Alexander Mohr -Alexander Shorin -Alexander Travov -Alexandru Mihai -Alexey Firsov -Alexey Popravka -Alexey Stepanov -Amin Etesamian -Amit Tulshyan -Amy Boyle -Anders Melchiorsen -Andrei Ursulenko -Andrej Antonov -Andrew Leech -Andrew Lytvyn -Andrew Svetlov -Andrew Zhou -Andrii Soldatenko -Antoine Pietri -Anton Kasyanov -Anton Zhdan-Pushkin -Arseny Timoniq -Artem Yushkovskiy -Arthur Darcet -Ben Bader -Ben Timby -Benedikt Reinartz -Boris Feld -Boyi Chen -Brett Cannon -Brian C. Lane -Brian Muller -Bruce Merry -Bryan Kok -Bryce Drennan -Carl George -Cecile Tonglet -Chien-Wei Huang -Chih-Yuan Chen -Chris AtLee -Chris Laws -Chris Moore -Christopher Schmitt -Claudiu Popa -Colin Dunklau -Cong Xu -Damien Nadé -Dan Xu -Daniel García -Daniel Grossmann-Kavanagh -Daniel Nelson -Danny Song -David Bibb -David Michael Brown -Denilson Amorim -Denis Matiychuk -Dennis Kliban -Dima Veselov -Dimitar Dimitrov -Dmitriy Safonov -Dmitry Doroshev -Dmitry Erlikh -Dmitry Lukashin -Dmitry Marakasov -Dmitry Shamov -Dmitry Trofimov -Dmytro Bohomiakov -Dmytro Kuznetsov -Dustin J. Mitchell -Eduard Iskandarov -Eli Ribble -Elizabeth Leddy -Enrique Saez -Eric Sheng -Erich Healy -Erik Peterson -Eugene Chernyshov -Eugene Naydenov -Eugene Nikolaiev -Eugene Tolmachev -Evan Kepner -Evert Lammerts -Felix Yan -Fernanda Guimarães -FichteFoll -Florian Scheffler -Frederik Gladhorn -Frederik Peter Aalund -Gabriel Tremblay -Gary Wilson Jr. -Gennady Andreyev -Georges Dubus -Greg Holt -Gregory Haynes -Gus Goulart -Gustavo Carneiro -Günther Jena -Hans Adema -Harmon Y. -Hrishikesh Paranjape -Hu Bo -Hugh Young -Hugo Herter -Hynek Schlawack -Igor Alexandrov -Igor Davydenko -Igor Mozharovsky -Igor Pavlov -Illia Volochii -Ilya Chichak -Ilya Gruzinov -Ingmar Steen -Jacob Champion -Jaesung Lee -Jake Davis -Jakob Ackermann -Jakub Wilk -Jashandeep Sohi -Jens Steinhauser -Jeonghun Lee -Jeongkyu Shin -Jeroen van der Heijden -Jesus Cea -Jian Zeng -Jinkyu Yi -Joel Watts -Jon Nabozny -Jonas Krüger Svensson -Jonas Obrist -Jonathan Wright -Jonny Tan -Joongi Kim -Josep Cugat -Josh Junon -Joshu Coats -Julia Tsemusheva -Julien Duponchelle -Jungkook Park -Junjie Tao -Junyeong Jeong -Justas Trimailovas -Justin Foo -Justin Turner Arthur -Kay Zheng -Kevin Samuel -Kimmo Parviainen-Jalanko -Kirill Klenov -Kirill Malovitsa -Konstantin Valetov -Krzysztof Blazewicz -Kyrylo Perevozchikov -Kyungmin Lee -Lars P. Søndergaard -Liu Hua -Louis-Philippe Huberdeau -Loïc Lajeanne -Lu Gong -Lubomir Gelo -Ludovic Gasc -Luis Pedrosa -Lukasz Marcin Dobrzanski -Makc Belousow -Manuel Miranda -Marat Sharafutdinov -Marco Paolini -Mariano Anaya -Martijn Pieters -Martin Melka -Martin Richard -Mathias Fröjdman -Mathieu Dugré -Matthieu Hauglustaine -Matthieu Rigal -Michael Ihnatenko -Michał Górny -Mikhail Burshteyn -Mikhail Kashkin -Mikhail Lukyanchenko -Mikhail Nacharov -Misha Behersky -Mitchell Ferree -Morgan Delahaye-Prat -Moss Collum -Mun Gwan-gyeong -Navid Sheikhol -Nicolas Braem -Nikolay Kim -Nikolay Novik -Oisin Aylward -Olaf Conradi -Pahaz Blinov -Panagiotis Kolokotronis -Pankaj Pandey -Pau Freixes -Paul Colomiets -Paulius Šileikis -Paulus Schoutsen -Pavel Kamaev -Pavel Polyakov -Pawel Kowalski -Pawel Miech -Pepe Osca -Philipp A. -Pieter van Beek -Rafael Viotti -Raphael Bialon -Raúl Cumplido -Required Field -Robert Lu -Robert Nikolich -Roman Podoliaka -Samuel Colvin -Sean Hunt -Sebastian Acuna -Sebastian Hanula -Sebastian Hüther -Sebastien Geffroy -SeongSoo Cho -Sergey Ninua -Sergey Skripnick -Serhii Charykov -Serhii Kostel -Serhiy Storchaka -Simon Kennedy -Sin-Woo Bang -Stanislas Plum -Stanislav Prokop -Stefan Tjarks -Stepan Pletnev -Stephan Jaensch -Stephen Granade -Steven Seguin -Sunghyun Hwang -Sunit Deshpande -Sviatoslav Bulbakha -Sviatoslav Sydorenko -Taha Jahangir -Taras Voinarovskyi -Terence Honles -Thanos Lefteris -Thijs Vermeir -Thomas Forbes -Thomas Grainger -Tolga Tezel -Tomasz Trebski -Toshiaki Tanaka -Trinh Hoang Nhu -Vadim Suharnikov -Vaibhav Sagar -Vamsi Krishna Avula -Vasiliy Faronov -Vasyl Baran -Viacheslav Greshilov -Victor Collod -Victor Kovtun -Vikas Kawadia -Viktor Danyliuk -Ville Skyttä -Vincent Maillol -Vitalik Verhovodov -Vitaly Haritonsky -Vitaly Magerya -Vladimir Kamarzin -Vladimir Kozlovski -Vladimir Rutsky -Vladimir Shulyak -Vladimir Zakharov -Vladyslav Bohaichuk -Vladyslav Bondar -W. Trevor King -Wei Lin -Weiwei Wang -Will McGugan -Willem de Groot -William Grzybowski -William S. -Wilson Ong -Yang Zhou -Yannick Koechlin -Yannick Péroux -Ye Cao -Yegor Roganov -Yifei Kong -Young-Ho Cha -Yuriy Shatrov -Yury Selivanov -Yusuke Tsutsumi -Zlatan Sičanica -Марк Коренберг -Семён Марьясин diff --git a/third_party/python/aiohttp/LICENSE.txt b/third_party/python/aiohttp/LICENSE.txt deleted file mode 100644 index 90c9d01bc5a1..000000000000 --- a/third_party/python/aiohttp/LICENSE.txt +++ /dev/null @@ -1,201 +0,0 @@ -Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "{}" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright 2013-2020 aiohttp maintainers - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/third_party/python/aiohttp/MANIFEST.in b/third_party/python/aiohttp/MANIFEST.in deleted file mode 100644 index 05084efddb98..000000000000 --- a/third_party/python/aiohttp/MANIFEST.in +++ /dev/null @@ -1,20 +0,0 @@ -include LICENSE.txt -include CHANGES.rst -include README.rst -include CONTRIBUTORS.txt -include Makefile -graft aiohttp -graft docs -graft examples -graft tests -recursive-include vendor * -global-include aiohttp *.pyi -global-exclude *.pyc -global-exclude *.pyd -global-exclude *.so -global-exclude *.lib -global-exclude *.dll -global-exclude *.a -global-exclude *.obj -exclude aiohttp/*.html -prune docs/_build diff --git a/third_party/python/aiohttp/Makefile b/third_party/python/aiohttp/Makefile deleted file mode 100644 index 5e4a9eaf116a..000000000000 --- a/third_party/python/aiohttp/Makefile +++ /dev/null @@ -1,144 +0,0 @@ -# Some simple testing tasks (sorry, UNIX only). - -to-hash-one = $(dir $1).hash/$(addsuffix .hash,$(notdir $1)) -to-hash = $(foreach fname,$1,$(call to-hash-one,$(fname))) - -CYS := $(wildcard aiohttp/*.pyx) $(wildcard aiohttp/*.pyi) $(wildcard aiohttp/*.pxd) -PYXS := $(wildcard aiohttp/*.pyx) -CS := $(wildcard aiohttp/*.c) -PYS := $(wildcard aiohttp/*.py) -REQS := $(wildcard requirements/*.txt) -ALLS := $(sort $(CYS) $(CS) $(PYS) $(REQS)) - -.PHONY: all -all: test - -tst: - @echo $(call to-hash,requirements/cython.txt) - @echo $(call to-hash,aiohttp/%.pyx) - - -# Recipe from https://www.cmcrossroads.com/article/rebuilding-when-files-checksum-changes -FORCE: - -# check_sum.py works perfectly fine but slow when called for every file from $(ALLS) -# (perhaps even several times for each file). -# That is why much less readable but faster solution exists -ifneq (, $(shell which sha256sum)) -%.hash: FORCE - $(eval $@_ABS := $(abspath $@)) - $(eval $@_NAME := $($@_ABS)) - $(eval $@_HASHDIR := $(dir $($@_ABS))) - $(eval $@_TMP := $($@_HASHDIR)../$(notdir $($@_ABS))) - $(eval $@_ORIG := $(subst /.hash/../,/,$(basename $($@_TMP)))) - @#echo ==== $($@_ABS) $($@_HASHDIR) $($@_NAME) $($@_TMP) $($@_ORIG) - @if ! (sha256sum --check $($@_ABS) 1>/dev/null 2>/dev/null); then \ - mkdir -p $($@_HASHDIR); \ - echo re-hash $($@_ORIG); \ - sha256sum $($@_ORIG) > $($@_ABS); \ - fi -else -%.hash: FORCE - @./tools/check_sum.py $@ # --debug -endif - -# Enumerate intermediate files to don't remove them automatically. -.SECONDARY: $(call to-hash,$(ALLS)) - - -.install-cython: $(call to-hash,requirements/cython.txt) - pip install -r requirements/cython.txt - @touch .install-cython - -aiohttp/_find_header.c: $(call to-hash,aiohttp/hdrs.py ./tools/gen.py) - ./tools/gen.py - -# _find_headers generator creates _headers.pyi as well -aiohttp/%.c: aiohttp/%.pyx $(call to-hash,$(CYS)) aiohttp/_find_header.c - cython -3 -o $@ $< -I aiohttp - - -.PHONY: cythonize -cythonize: .install-cython $(PYXS:.pyx=.c) - -.install-deps: .install-cython $(PYXS:.pyx=.c) $(call to-hash,$(CYS) $(REQS)) - pip install -r requirements/dev.txt - @touch .install-deps - -.PHONY: lint -lint: fmt mypy - -.PHONY: fmt format -fmt format: - python -m pre_commit run --all-files --show-diff-on-failure - -.PHONY: mypy -mypy: - mypy aiohttp - -.develop: .install-deps $(call to-hash,$(PYS) $(CYS) $(CS)) - pip install -e . - @touch .develop - -.PHONY: test -test: .develop - @pytest -q - -.PHONY: vtest -vtest: .develop - @pytest -s -v - -.PHONY: vvtest -vvtest: .develop - @pytest -vv - -.PHONY: clean -clean: - @rm -rf `find . -name __pycache__` - @rm -rf `find . -name .hash` - @rm -rf `find . -name .md5` # old styling - @rm -f `find . -type f -name '*.py[co]' ` - @rm -f `find . -type f -name '*~' ` - @rm -f `find . -type f -name '.*~' ` - @rm -f `find . -type f -name '@*' ` - @rm -f `find . -type f -name '#*#' ` - @rm -f `find . -type f -name '*.orig' ` - @rm -f `find . -type f -name '*.rej' ` - @rm -f `find . -type f -name '*.md5' ` # old styling - @rm -f .coverage - @rm -rf htmlcov - @rm -rf build - @rm -rf cover - @make -C docs clean - @python setup.py clean - @rm -f aiohttp/*.so - @rm -f aiohttp/*.pyd - @rm -f aiohttp/*.html - @rm -f aiohttp/_frozenlist.c - @rm -f aiohttp/_find_header.c - @rm -f aiohttp/_http_parser.c - @rm -f aiohttp/_http_writer.c - @rm -f aiohttp/_websocket.c - @rm -rf .tox - @rm -f .develop - @rm -f .flake - @rm -rf aiohttp.egg-info - @rm -f .install-deps - @rm -f .install-cython - -.PHONY: doc -doc: - @make -C docs html SPHINXOPTS="-W --keep-going -E" - @echo "open file://`pwd`/docs/_build/html/index.html" - -.PHONY: doc-spelling -doc-spelling: - @make -C docs spelling SPHINXOPTS="-W -E" - -.PHONY: install -install: - @pip install -U 'pip' - @pip install -Ur requirements/dev.txt - -.PHONY: install-dev -install-dev: .develop diff --git a/third_party/python/aiohttp/PKG-INFO b/third_party/python/aiohttp/PKG-INFO deleted file mode 100644 index a0c00158c790..000000000000 --- a/third_party/python/aiohttp/PKG-INFO +++ /dev/null @@ -1,966 +0,0 @@ -Metadata-Version: 2.1 -Name: aiohttp -Version: 3.7.4.post0 -Summary: Async http client/server framework (asyncio) -Home-page: https://github.com/aio-libs/aiohttp -Author: Nikolay Kim -Author-email: fafhrd91@gmail.com -Maintainer: Nikolay Kim , Andrew Svetlov -Maintainer-email: aio-libs@googlegroups.com -License: Apache 2 -Project-URL: Chat: Gitter, https://gitter.im/aio-libs/Lobby -Project-URL: CI: Azure Pipelines, https://dev.azure.com/aio-libs/aiohttp/_build -Project-URL: Coverage: codecov, https://codecov.io/github/aio-libs/aiohttp -Project-URL: Docs: RTD, https://docs.aiohttp.org -Project-URL: GitHub: issues, https://github.com/aio-libs/aiohttp/issues -Project-URL: GitHub: repo, https://github.com/aio-libs/aiohttp -Description: ================================== - Async http client/server framework - ================================== - - .. image:: https://raw.githubusercontent.com/aio-libs/aiohttp/master/docs/_static/aiohttp-icon-128x128.png - :height: 64px - :width: 64px - :alt: aiohttp logo - - | - - .. image:: https://github.com/aio-libs/aiohttp/workflows/CI/badge.svg - :target: https://github.com/aio-libs/aiohttp/actions?query=workflow%3ACI - :alt: GitHub Actions status for master branch - - .. image:: https://codecov.io/gh/aio-libs/aiohttp/branch/master/graph/badge.svg - :target: https://codecov.io/gh/aio-libs/aiohttp - :alt: codecov.io status for master branch - - .. image:: https://badge.fury.io/py/aiohttp.svg - :target: https://pypi.org/project/aiohttp - :alt: Latest PyPI package version - - .. image:: https://readthedocs.org/projects/aiohttp/badge/?version=latest - :target: https://docs.aiohttp.org/ - :alt: Latest Read The Docs - - .. image:: https://img.shields.io/discourse/status?server=https%3A%2F%2Faio-libs.discourse.group - :target: https://aio-libs.discourse.group - :alt: Discourse status - - .. image:: https://badges.gitter.im/Join%20Chat.svg - :target: https://gitter.im/aio-libs/Lobby - :alt: Chat on Gitter - - - Key Features - ============ - - - Supports both client and server side of HTTP protocol. - - Supports both client and server Web-Sockets out-of-the-box and avoids - Callback Hell. - - Provides Web-server with middlewares and plugable routing. - - - Getting started - =============== - - Client - ------ - - To get something from the web: - - .. code-block:: python - - import aiohttp - import asyncio - - async def main(): - - async with aiohttp.ClientSession() as session: - async with session.get('http://python.org') as response: - - print("Status:", response.status) - print("Content-type:", response.headers['content-type']) - - html = await response.text() - print("Body:", html[:15], "...") - - loop = asyncio.get_event_loop() - loop.run_until_complete(main()) - - This prints: - - .. code-block:: - - Status: 200 - Content-type: text/html; charset=utf-8 - Body: ... - - Coming from `requests `_ ? Read `why we need so many lines `_. - - Server - ------ - - An example using a simple server: - - .. code-block:: python - - # examples/server_simple.py - from aiohttp import web - - async def handle(request): - name = request.match_info.get('name', "Anonymous") - text = "Hello, " + name - return web.Response(text=text) - - async def wshandle(request): - ws = web.WebSocketResponse() - await ws.prepare(request) - - async for msg in ws: - if msg.type == web.WSMsgType.text: - await ws.send_str("Hello, {}".format(msg.data)) - elif msg.type == web.WSMsgType.binary: - await ws.send_bytes(msg.data) - elif msg.type == web.WSMsgType.close: - break - - return ws - - - app = web.Application() - app.add_routes([web.get('/', handle), - web.get('/echo', wshandle), - web.get('/{name}', handle)]) - - if __name__ == '__main__': - web.run_app(app) - - - Documentation - ============= - - https://aiohttp.readthedocs.io/ - - - Demos - ===== - - https://github.com/aio-libs/aiohttp-demos - - - External links - ============== - - * `Third party libraries - `_ - * `Built with aiohttp - `_ - * `Powered by aiohttp - `_ - - Feel free to make a Pull Request for adding your link to these pages! - - - Communication channels - ====================== - - *aio-libs discourse group*: https://aio-libs.discourse.group - - *gitter chat* https://gitter.im/aio-libs/Lobby - - We support `Stack Overflow - `_. - Please add *aiohttp* tag to your question there. - - Requirements - ============ - - - Python >= 3.6 - - async-timeout_ - - attrs_ - - chardet_ - - multidict_ - - yarl_ - - Optionally you may install the cChardet_ and aiodns_ libraries (highly - recommended for sake of speed). - - .. _chardet: https://pypi.python.org/pypi/chardet - .. _aiodns: https://pypi.python.org/pypi/aiodns - .. _attrs: https://github.com/python-attrs/attrs - .. _multidict: https://pypi.python.org/pypi/multidict - .. _yarl: https://pypi.python.org/pypi/yarl - .. _async-timeout: https://pypi.python.org/pypi/async_timeout - .. _cChardet: https://pypi.python.org/pypi/cchardet - - License - ======= - - ``aiohttp`` is offered under the Apache 2 license. - - - Keepsafe - ======== - - The aiohttp community would like to thank Keepsafe - (https://www.getkeepsafe.com) for its support in the early days of - the project. - - - Source code - =========== - - The latest developer version is available in a GitHub repository: - https://github.com/aio-libs/aiohttp - - Benchmarks - ========== - - If you are interested in efficiency, the AsyncIO community maintains a - list of benchmarks on the official wiki: - https://github.com/python/asyncio/wiki/Benchmarks - - ========= - Changelog - ========= - - .. - You should *NOT* be adding new change log entries to this file, this - file is managed by towncrier. You *may* edit previous change logs to - fix problems like typo corrections or such. - To add a new change log entry, please see - https://pip.pypa.io/en/latest/development/#adding-a-news-entry - we named the news folder "changes". - - WARNING: Don't drop the next directive! - - .. towncrier release notes start - - 3.7.4.post0 (2021-03-06) - ======================== - - Misc - ---- - - - Bumped upper bound of the ``chardet`` runtime dependency - to allow their v4.0 version stream. - `#5366 `_ - - - ---- - - - 3.7.4 (2021-02-25) - ================== - - Bugfixes - -------- - - - **(SECURITY BUG)** Started preventing open redirects in the - ``aiohttp.web.normalize_path_middleware`` middleware. For - more details, see - https://github.com/aio-libs/aiohttp/security/advisories/GHSA-v6wp-4m6f-gcjg. - - Thanks to `Beast Glatisant `__ for - finding the first instance of this issue and `Jelmer Vernooij - `__ for reporting and tracking it down - in aiohttp. - `#5497 `_ - - Fix interpretation difference of the pure-Python and the Cython-based - HTTP parsers construct a ``yarl.URL`` object for HTTP request-target. - - Before this fix, the Python parser would turn the URI's absolute-path - for ``//some-path`` into ``/`` while the Cython code preserved it as - ``//some-path``. Now, both do the latter. - `#5498 `_ - - - ---- - - - 3.7.3 (2020-11-18) - ================== - - Features - -------- - - - Use Brotli instead of brotlipy - `#3803 `_ - - Made exceptions pickleable. Also changed the repr of some exceptions. - `#4077 `_ - - - Bugfixes - -------- - - - Raise a ClientResponseError instead of an AssertionError for a blank - HTTP Reason Phrase. - `#3532 `_ - - Fix ``web_middlewares.normalize_path_middleware`` behavior for patch without slash. - `#3669 `_ - - Fix overshadowing of overlapped sub-applications prefixes. - `#3701 `_ - - Make `BaseConnector.close()` a coroutine and wait until the client closes all connections. Drop deprecated "with Connector():" syntax. - `#3736 `_ - - Reset the ``sock_read`` timeout each time data is received for a ``aiohttp.client`` response. - `#3808 `_ - - Fixed type annotation for add_view method of UrlDispatcher to accept any subclass of View - `#3880 `_ - - Fixed querying the address families from DNS that the current host supports. - `#5156 `_ - - Change return type of MultipartReader.__aiter__() and BodyPartReader.__aiter__() to AsyncIterator. - `#5163 `_ - - Provide x86 Windows wheels. - `#5230 `_ - - - Improved Documentation - ---------------------- - - - Add documentation for ``aiohttp.web.FileResponse``. - `#3958 `_ - - Removed deprecation warning in tracing example docs - `#3964 `_ - - Fixed wrong "Usage" docstring of ``aiohttp.client.request``. - `#4603 `_ - - Add aiohttp-pydantic to third party libraries - `#5228 `_ - - - Misc - ---- - - - `#4102 `_ - - - ---- - - - 3.7.2 (2020-10-27) - ================== - - Bugfixes - -------- - - - Fixed static files handling for loops without ``.sendfile()`` support - `#5149 `_ - - - ---- - - - 3.7.1 (2020-10-25) - ================== - - Bugfixes - -------- - - - Fixed a type error caused by the conditional import of `Protocol`. - `#5111 `_ - - Server doesn't send Content-Length for 1xx or 204 - `#4901 `_ - - Fix run_app typing - `#4957 `_ - - Always require ``typing_extensions`` library. - `#5107 `_ - - Fix a variable-shadowing bug causing `ThreadedResolver.resolve` to - return the resolved IP as the ``hostname`` in each record, which prevented - validation of HTTPS connections. - `#5110 `_ - - Added annotations to all public attributes. - `#5115 `_ - - Fix flaky test_when_timeout_smaller_second - `#5116 `_ - - Ensure sending a zero byte file does not throw an exception - `#5124 `_ - - Fix a bug in ``web.run_app()`` about Python version checking on Windows - `#5127 `_ - - - ---- - - - 3.7.0 (2020-10-24) - ================== - - Features - -------- - - - Response headers are now prepared prior to running ``on_response_prepare`` hooks, directly before headers are sent to the client. - `#1958 `_ - - Add a ``quote_cookie`` option to ``CookieJar``, a way to skip quotation wrapping of cookies containing special characters. - `#2571 `_ - - Call ``AccessLogger.log`` with the current exception available from ``sys.exc_info()``. - `#3557 `_ - - `web.UrlDispatcher.add_routes` and `web.Application.add_routes` return a list - of registered `AbstractRoute` instances. `AbstractRouteDef.register` (and all - subclasses) return a list of registered resources registered resource. - `#3866 `_ - - Added properties of default ClientSession params to ClientSession class so it is available for introspection - `#3882 `_ - - Don't cancel web handler on peer disconnection, raise `OSError` on reading/writing instead. - `#4080 `_ - - Implement BaseRequest.get_extra_info() to access a protocol transports' extra info. - `#4189 `_ - - Added `ClientSession.timeout` property. - `#4191 `_ - - allow use of SameSite in cookies. - `#4224 `_ - - Use ``loop.sendfile()`` instead of custom implementation if available. - `#4269 `_ - - Apply SO_REUSEADDR to test server's socket. - `#4393 `_ - - Use .raw_host instead of slower .host in client API - `#4402 `_ - - Allow configuring the buffer size of input stream by passing ``read_bufsize`` argument. - `#4453 `_ - - Pass tests on Python 3.8 for Windows. - `#4513 `_ - - Add `method` and `url` attributes to `TraceRequestChunkSentParams` and `TraceResponseChunkReceivedParams`. - `#4674 `_ - - Add ClientResponse.ok property for checking status code under 400. - `#4711 `_ - - Don't ceil timeouts that are smaller than 5 seconds. - `#4850 `_ - - TCPSite now listens by default on all interfaces instead of just IPv4 when `None` is passed in as the host. - `#4894 `_ - - Bump ``http_parser`` to 2.9.4 - `#5070 `_ - - - Bugfixes - -------- - - - Fix keepalive connections not being closed in time - `#3296 `_ - - Fix failed websocket handshake leaving connection hanging. - `#3380 `_ - - Fix tasks cancellation order on exit. The run_app task needs to be cancelled first for cleanup hooks to run with all tasks intact. - `#3805 `_ - - Don't start heartbeat until _writer is set - `#4062 `_ - - Fix handling of multipart file uploads without a content type. - `#4089 `_ - - Preserve view handler function attributes across middlewares - `#4174 `_ - - Fix the string representation of ``ServerDisconnectedError``. - `#4175 `_ - - Raising RuntimeError when trying to get encoding from not read body - `#4214 `_ - - Remove warning messages from noop. - `#4282 `_ - - Raise ClientPayloadError if FormData re-processed. - `#4345 `_ - - Fix a warning about unfinished task in ``web_protocol.py`` - `#4408 `_ - - Fixed 'deflate' compression. According to RFC 2616 now. - `#4506 `_ - - Fixed OverflowError on platforms with 32-bit time_t - `#4515 `_ - - Fixed request.body_exists returns wrong value for methods without body. - `#4528 `_ - - Fix connecting to link-local IPv6 addresses. - `#4554 `_ - - Fix a problem with connection waiters that are never awaited. - `#4562 `_ - - Always make sure transport is not closing before reuse a connection. - - Reuse a protocol based on keepalive in headers is unreliable. - For example, uWSGI will not support keepalive even it serves a - HTTP 1.1 request, except explicitly configure uWSGI with a - ``--http-keepalive`` option. - - Servers designed like uWSGI could cause aiohttp intermittently - raise a ConnectionResetException when the protocol poll runs - out and some protocol is reused. - `#4587 `_ - - Handle the last CRLF correctly even if it is received via separate TCP segment. - `#4630 `_ - - Fix the register_resource function to validate route name before splitting it so that route name can include python keywords. - `#4691 `_ - - Improve typing annotations for ``web.Request``, ``aiohttp.ClientResponse`` and - ``multipart`` module. - `#4736 `_ - - Fix resolver task is not awaited when connector is cancelled - `#4795 `_ - - Fix a bug "Aiohttp doesn't return any error on invalid request methods" - `#4798 `_ - - Fix HEAD requests for static content. - `#4809 `_ - - Fix incorrect size calculation for memoryview - `#4890 `_ - - Add HTTPMove to _all__. - `#4897 `_ - - Fixed the type annotations in the ``tracing`` module. - `#4912 `_ - - Fix typing for multipart ``__aiter__``. - `#4931 `_ - - Fix for race condition on connections in BaseConnector that leads to exceeding the connection limit. - `#4936 `_ - - Add forced UTF-8 encoding for ``application/rdap+json`` responses. - `#4938 `_ - - Fix inconsistency between Python and C http request parsers in parsing pct-encoded URL. - `#4972 `_ - - Fix connection closing issue in HEAD request. - `#5012 `_ - - Fix type hint on BaseRunner.addresses (from ``List[str]`` to ``List[Any]``) - `#5086 `_ - - Make `web.run_app()` more responsive to Ctrl+C on Windows for Python < 3.8. It slightly - increases CPU load as a side effect. - `#5098 `_ - - - Improved Documentation - ---------------------- - - - Fix example code in client quick-start - `#3376 `_ - - Updated the docs so there is no contradiction in ``ttl_dns_cache`` default value - `#3512 `_ - - Add 'Deploy with SSL' to docs. - `#4201 `_ - - Change typing of the secure argument on StreamResponse.set_cookie from ``Optional[str]`` to ``Optional[bool]`` - `#4204 `_ - - Changes ``ttl_dns_cache`` type from int to Optional[int]. - `#4270 `_ - - Simplify README hello word example and add a documentation page for people coming from requests. - `#4272 `_ - - Improve some code examples in the documentation involving websockets and starting a simple HTTP site with an AppRunner. - `#4285 `_ - - Fix typo in code example in Multipart docs - `#4312 `_ - - Fix code example in Multipart section. - `#4314 `_ - - Update contributing guide so new contributors read the most recent version of that guide. Update command used to create test coverage reporting. - `#4810 `_ - - Spelling: Change "canonize" to "canonicalize". - `#4986 `_ - - Add ``aiohttp-sse-client`` library to third party usage list. - `#5084 `_ - - - Misc - ---- - - - `#2856 `_, `#4218 `_, `#4250 `_ - - - ---- - - - 3.6.3 (2020-10-12) - ================== - - Bugfixes - -------- - - - Pin yarl to ``<1.6.0`` to avoid buggy behavior that will be fixed by the next aiohttp - release. - - 3.6.2 (2019-10-09) - ================== - - Features - -------- - - - Made exceptions pickleable. Also changed the repr of some exceptions. - `#4077 `_ - - Use ``Iterable`` type hint instead of ``Sequence`` for ``Application`` *middleware* - parameter. `#4125 `_ - - - Bugfixes - -------- - - - Reset the ``sock_read`` timeout each time data is received for a - ``aiohttp.ClientResponse``. `#3808 - `_ - - Fix handling of expired cookies so they are not stored in CookieJar. - `#4063 `_ - - Fix misleading message in the string representation of ``ClientConnectorError``; - ``self.ssl == None`` means default SSL context, not SSL disabled `#4097 - `_ - - Don't clobber HTTP status when using FileResponse. - `#4106 `_ - - - Improved Documentation - ---------------------- - - - Added minimal required logging configuration to logging documentation. - `#2469 `_ - - Update docs to reflect proxy support. - `#4100 `_ - - Fix typo in code example in testing docs. - `#4108 `_ - - - Misc - ---- - - - `#4102 `_ - - - ---- - - - 3.6.1 (2019-09-19) - ================== - - Features - -------- - - - Compatibility with Python 3.8. - `#4056 `_ - - - Bugfixes - -------- - - - correct some exception string format - `#4068 `_ - - Emit a warning when ``ssl.OP_NO_COMPRESSION`` is - unavailable because the runtime is built against - an outdated OpenSSL. - `#4052 `_ - - Update multidict requirement to >= 4.5 - `#4057 `_ - - - Improved Documentation - ---------------------- - - - Provide pytest-aiohttp namespace for pytest fixtures in docs. - `#3723 `_ - - - ---- - - - 3.6.0 (2019-09-06) - ================== - - Features - -------- - - - Add support for Named Pipes (Site and Connector) under Windows. This feature requires - Proactor event loop to work. `#3629 - `_ - - Removed ``Transfer-Encoding: chunked`` header from websocket responses to be - compatible with more http proxy servers. `#3798 - `_ - - Accept non-GET request for starting websocket handshake on server side. - `#3980 `_ - - - Bugfixes - -------- - - - Raise a ClientResponseError instead of an AssertionError for a blank - HTTP Reason Phrase. - `#3532 `_ - - Fix an issue where cookies would sometimes not be set during a redirect. - `#3576 `_ - - Change normalize_path_middleware to use 308 redirect instead of 301. - - This behavior should prevent clients from being unable to use PUT/POST - methods on endpoints that are redirected because of a trailing slash. - `#3579 `_ - - Drop the processed task from ``all_tasks()`` list early. It prevents logging about a - task with unhandled exception when the server is used in conjunction with - ``asyncio.run()``. `#3587 `_ - - ``Signal`` type annotation changed from ``Signal[Callable[['TraceConfig'], - Awaitable[None]]]`` to ``Signal[Callable[ClientSession, SimpleNamespace, ...]``. - `#3595 `_ - - Use sanitized URL as Location header in redirects - `#3614 `_ - - Improve typing annotations for multipart.py along with changes required - by mypy in files that references multipart.py. - `#3621 `_ - - Close session created inside ``aiohttp.request`` when unhandled exception occurs - `#3628 `_ - - Cleanup per-chunk data in generic data read. Memory leak fixed. - `#3631 `_ - - Use correct type for add_view and family - `#3633 `_ - - Fix _keepalive field in __slots__ of ``RequestHandler``. - `#3644 `_ - - Properly handle ConnectionResetError, to silence the "Cannot write to closing - transport" exception when clients disconnect uncleanly. - `#3648 `_ - - Suppress pytest warnings due to ``test_utils`` classes - `#3660 `_ - - Fix overshadowing of overlapped sub-application prefixes. - `#3701 `_ - - Fixed return type annotation for WSMessage.json() - `#3720 `_ - - Properly expose TooManyRedirects publicly as documented. - `#3818 `_ - - Fix missing brackets for IPv6 in proxy CONNECT request - `#3841 `_ - - Make the signature of ``aiohttp.test_utils.TestClient.request`` match - ``asyncio.ClientSession.request`` according to the docs `#3852 - `_ - - Use correct style for re-exported imports, makes mypy ``--strict`` mode happy. - `#3868 `_ - - Fixed type annotation for add_view method of UrlDispatcher to accept any subclass of - View `#3880 `_ - - Made cython HTTP parser set Reason-Phrase of the response to an empty string if it is - missing. `#3906 `_ - - Add URL to the string representation of ClientResponseError. - `#3959 `_ - - Accept ``istr`` keys in ``LooseHeaders`` type hints. - `#3976 `_ - - Fixed race conditions in _resolve_host caching and throttling when tracing is enabled. - `#4013 `_ - - For URLs like "unix://localhost/..." set Host HTTP header to "localhost" instead of - "localhost:None". `#4039 `_ - - - Improved Documentation - ---------------------- - - - Modify documentation for Background Tasks to remove deprecated usage of event loop. - `#3526 `_ - - use ``if __name__ == '__main__':`` in server examples. - `#3775 `_ - - Update documentation reference to the default access logger. - `#3783 `_ - - Improve documentation for ``web.BaseRequest.path`` and ``web.BaseRequest.raw_path``. - `#3791 `_ - - Removed deprecation warning in tracing example docs - `#3964 `_ - - - ---- - - - 3.5.4 (2019-01-12) - ================== - - Bugfixes - -------- - - - Fix stream ``.read()`` / ``.readany()`` / ``.iter_any()`` which used to return a - partial content only in case of compressed content - `#3525 `_ - - - 3.5.3 (2019-01-10) - ================== - - Bugfixes - -------- - - - Fix type stubs for ``aiohttp.web.run_app(access_log=True)`` and fix edge case of - ``access_log=True`` and the event loop being in debug mode. `#3504 - `_ - - Fix ``aiohttp.ClientTimeout`` type annotations to accept ``None`` for fields - `#3511 `_ - - Send custom per-request cookies even if session jar is empty - `#3515 `_ - - Restore Linux binary wheels publishing on PyPI - - ---- - - - 3.5.2 (2019-01-08) - ================== - - Features - -------- - - - ``FileResponse`` from ``web_fileresponse.py`` uses a ``ThreadPoolExecutor`` to work - with files asynchronously. I/O based payloads from ``payload.py`` uses a - ``ThreadPoolExecutor`` to work with I/O objects asynchronously. `#3313 - `_ - - Internal Server Errors in plain text if the browser does not support HTML. - `#3483 `_ - - - Bugfixes - -------- - - - Preserve MultipartWriter parts headers on write. Refactor the way how - ``Payload.headers`` are handled. Payload instances now always have headers and - Content-Type defined. Fix Payload Content-Disposition header reset after initial - creation. `#3035 `_ - - Log suppressed exceptions in ``GunicornWebWorker``. - `#3464 `_ - - Remove wildcard imports. - `#3468 `_ - - Use the same task for app initialization and web server handling in gunicorn workers. - It allows to use Python3.7 context vars smoothly. - `#3471 `_ - - Fix handling of chunked+gzipped response when first chunk does not give uncompressed - data `#3477 `_ - - Replace ``collections.MutableMapping`` with ``collections.abc.MutableMapping`` to - avoid a deprecation warning. `#3480 - `_ - - ``Payload.size`` type annotation changed from ``Optional[float]`` to - ``Optional[int]``. `#3484 `_ - - Ignore done tasks when cancels pending activities on ``web.run_app`` finalization. - `#3497 `_ - - - Improved Documentation - ---------------------- - - - Add documentation for ``aiohttp.web.HTTPException``. - `#3490 `_ - - - Misc - ---- - - - `#3487 `_ - - - ---- - - - 3.5.1 (2018-12-24) - ==================== - - - Fix a regression about ``ClientSession._requote_redirect_url`` modification in debug - mode. - - 3.5.0 (2018-12-22) - ==================== - - Features - -------- - - - The library type annotations are checked in strict mode now. - - Add support for setting cookies for individual request (`#2387 - `_) - - Application.add_domain implementation (`#2809 - `_) - - The default ``app`` in the request returned by ``test_utils.make_mocked_request`` can - now have objects assigned to it and retrieved using the ``[]`` operator. (`#3174 - `_) - - Make ``request.url`` accessible when transport is closed. (`#3177 - `_) - - Add ``zlib_executor_size`` argument to ``Response`` constructor to allow compression - to run in a background executor to avoid blocking the main thread and potentially - triggering health check failures. (`#3205 - `_) - - Enable users to set ``ClientTimeout`` in ``aiohttp.request`` (`#3213 - `_) - - Don't raise a warning if ``NETRC`` environment variable is not set and ``~/.netrc`` - file doesn't exist. (`#3267 `_) - - Add default logging handler to web.run_app If the ``Application.debug``` flag is set - and the default logger ``aiohttp.access`` is used, access logs will now be output - using a *stderr* ``StreamHandler`` if no handlers are attached. Furthermore, if the - default logger has no log level set, the log level will be set to ``DEBUG``. (`#3324 - `_) - - Add method argument to ``session.ws_connect()``. Sometimes server API requires a - different HTTP method for WebSocket connection establishment. For example, ``Docker - exec`` needs POST. (`#3378 `_) - - Create a task per request handling. (`#3406 - `_) - - - Bugfixes - -------- - - - Enable passing ``access_log_class`` via ``handler_args`` (`#3158 - `_) - - Return empty bytes with end-of-chunk marker in empty stream reader. (`#3186 - `_) - - Accept ``CIMultiDictProxy`` instances for ``headers`` argument in ``web.Response`` - constructor. (`#3207 `_) - - Don't uppercase HTTP method in parser (`#3233 - `_) - - Make method match regexp RFC-7230 compliant (`#3235 - `_) - - Add ``app.pre_frozen`` state to properly handle startup signals in - sub-applications. (`#3237 `_) - - Enhanced parsing and validation of helpers.BasicAuth.decode. (`#3239 - `_) - - Change imports from collections module in preparation for 3.8. (`#3258 - `_) - - Ensure Host header is added first to ClientRequest to better replicate browser (`#3265 - `_) - - Fix forward compatibility with Python 3.8: importing ABCs directly from the - collections module will not be supported anymore. (`#3273 - `_) - - Keep the query string by ``normalize_path_middleware``. (`#3278 - `_) - - Fix missing parameter ``raise_for_status`` for aiohttp.request() (`#3290 - `_) - - Bracket IPv6 addresses in the HOST header (`#3304 - `_) - - Fix default message for server ping and pong frames. (`#3308 - `_) - - Fix tests/test_connector.py typo and tests/autobahn/server.py duplicate loop - def. (`#3337 `_) - - Fix false-negative indicator end_of_HTTP_chunk in StreamReader.readchunk function - (`#3361 `_) - - Release HTTP response before raising status exception (`#3364 - `_) - - Fix task cancellation when ``sendfile()`` syscall is used by static file - handling. (`#3383 `_) - - Fix stack trace for ``asyncio.TimeoutError`` which was not logged, when it is caught - in the handler. (`#3414 `_) - - - Improved Documentation - ---------------------- - - - Improve documentation of ``Application.make_handler`` parameters. (`#3152 - `_) - - Fix BaseRequest.raw_headers doc. (`#3215 - `_) - - Fix typo in TypeError exception reason in ``web.Application._handle`` (`#3229 - `_) - - Make server access log format placeholder %b documentation reflect - behavior and docstring. (`#3307 `_) - - - Deprecations and Removals - ------------------------- - - - Deprecate modification of ``session.requote_redirect_url`` (`#2278 - `_) - - Deprecate ``stream.unread_data()`` (`#3260 - `_) - - Deprecated use of boolean in ``resp.enable_compression()`` (`#3318 - `_) - - Encourage creation of aiohttp public objects inside a coroutine (`#3331 - `_) - - Drop dead ``Connection.detach()`` and ``Connection.writer``. Both methods were broken - for more than 2 years. (`#3358 `_) - - Deprecate ``app.loop``, ``request.loop``, ``client.loop`` and ``connector.loop`` - properties. (`#3374 `_) - - Deprecate explicit debug argument. Use asyncio debug mode instead. (`#3381 - `_) - - Deprecate body parameter in HTTPException (and derived classes) constructor. (`#3385 - `_) - - Deprecate bare connector close, use ``async with connector:`` and ``await - connector.close()`` instead. (`#3417 - `_) - - Deprecate obsolete ``read_timeout`` and ``conn_timeout`` in ``ClientSession`` - constructor. (`#3438 `_) - - - Misc - ---- - - - #3341, #3351 -Platform: UNKNOWN -Classifier: License :: OSI Approved :: Apache Software License -Classifier: Intended Audience :: Developers -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Development Status :: 5 - Production/Stable -Classifier: Operating System :: POSIX -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Operating System :: Microsoft :: Windows -Classifier: Topic :: Internet :: WWW/HTTP -Classifier: Framework :: AsyncIO -Requires-Python: >=3.6 -Provides-Extra: speedups diff --git a/third_party/python/aiohttp/README.rst b/third_party/python/aiohttp/README.rst deleted file mode 100644 index 338adbcae240..000000000000 --- a/third_party/python/aiohttp/README.rst +++ /dev/null @@ -1,204 +0,0 @@ -================================== -Async http client/server framework -================================== - -.. image:: https://raw.githubusercontent.com/aio-libs/aiohttp/master/docs/_static/aiohttp-icon-128x128.png - :height: 64px - :width: 64px - :alt: aiohttp logo - -| - -.. image:: https://github.com/aio-libs/aiohttp/workflows/CI/badge.svg - :target: https://github.com/aio-libs/aiohttp/actions?query=workflow%3ACI - :alt: GitHub Actions status for master branch - -.. image:: https://codecov.io/gh/aio-libs/aiohttp/branch/master/graph/badge.svg - :target: https://codecov.io/gh/aio-libs/aiohttp - :alt: codecov.io status for master branch - -.. image:: https://badge.fury.io/py/aiohttp.svg - :target: https://pypi.org/project/aiohttp - :alt: Latest PyPI package version - -.. image:: https://readthedocs.org/projects/aiohttp/badge/?version=latest - :target: https://docs.aiohttp.org/ - :alt: Latest Read The Docs - -.. image:: https://img.shields.io/discourse/status?server=https%3A%2F%2Faio-libs.discourse.group - :target: https://aio-libs.discourse.group - :alt: Discourse status - -.. image:: https://badges.gitter.im/Join%20Chat.svg - :target: https://gitter.im/aio-libs/Lobby - :alt: Chat on Gitter - - -Key Features -============ - -- Supports both client and server side of HTTP protocol. -- Supports both client and server Web-Sockets out-of-the-box and avoids - Callback Hell. -- Provides Web-server with middlewares and plugable routing. - - -Getting started -=============== - -Client ------- - -To get something from the web: - -.. code-block:: python - - import aiohttp - import asyncio - - async def main(): - - async with aiohttp.ClientSession() as session: - async with session.get('http://python.org') as response: - - print("Status:", response.status) - print("Content-type:", response.headers['content-type']) - - html = await response.text() - print("Body:", html[:15], "...") - - loop = asyncio.get_event_loop() - loop.run_until_complete(main()) - -This prints: - -.. code-block:: - - Status: 200 - Content-type: text/html; charset=utf-8 - Body: ... - -Coming from `requests `_ ? Read `why we need so many lines `_. - -Server ------- - -An example using a simple server: - -.. code-block:: python - - # examples/server_simple.py - from aiohttp import web - - async def handle(request): - name = request.match_info.get('name', "Anonymous") - text = "Hello, " + name - return web.Response(text=text) - - async def wshandle(request): - ws = web.WebSocketResponse() - await ws.prepare(request) - - async for msg in ws: - if msg.type == web.WSMsgType.text: - await ws.send_str("Hello, {}".format(msg.data)) - elif msg.type == web.WSMsgType.binary: - await ws.send_bytes(msg.data) - elif msg.type == web.WSMsgType.close: - break - - return ws - - - app = web.Application() - app.add_routes([web.get('/', handle), - web.get('/echo', wshandle), - web.get('/{name}', handle)]) - - if __name__ == '__main__': - web.run_app(app) - - -Documentation -============= - -https://aiohttp.readthedocs.io/ - - -Demos -===== - -https://github.com/aio-libs/aiohttp-demos - - -External links -============== - -* `Third party libraries - `_ -* `Built with aiohttp - `_ -* `Powered by aiohttp - `_ - -Feel free to make a Pull Request for adding your link to these pages! - - -Communication channels -====================== - -*aio-libs discourse group*: https://aio-libs.discourse.group - -*gitter chat* https://gitter.im/aio-libs/Lobby - -We support `Stack Overflow -`_. -Please add *aiohttp* tag to your question there. - -Requirements -============ - -- Python >= 3.6 -- async-timeout_ -- attrs_ -- chardet_ -- multidict_ -- yarl_ - -Optionally you may install the cChardet_ and aiodns_ libraries (highly -recommended for sake of speed). - -.. _chardet: https://pypi.python.org/pypi/chardet -.. _aiodns: https://pypi.python.org/pypi/aiodns -.. _attrs: https://github.com/python-attrs/attrs -.. _multidict: https://pypi.python.org/pypi/multidict -.. _yarl: https://pypi.python.org/pypi/yarl -.. _async-timeout: https://pypi.python.org/pypi/async_timeout -.. _cChardet: https://pypi.python.org/pypi/cchardet - -License -======= - -``aiohttp`` is offered under the Apache 2 license. - - -Keepsafe -======== - -The aiohttp community would like to thank Keepsafe -(https://www.getkeepsafe.com) for its support in the early days of -the project. - - -Source code -=========== - -The latest developer version is available in a GitHub repository: -https://github.com/aio-libs/aiohttp - -Benchmarks -========== - -If you are interested in efficiency, the AsyncIO community maintains a -list of benchmarks on the official wiki: -https://github.com/python/asyncio/wiki/Benchmarks diff --git a/third_party/python/aiohttp/aiohttp/.hash/_cparser.pxd.hash b/third_party/python/aiohttp/aiohttp/.hash/_cparser.pxd.hash deleted file mode 100644 index 7f4060a33f44..000000000000 --- a/third_party/python/aiohttp/aiohttp/.hash/_cparser.pxd.hash +++ /dev/null @@ -1 +0,0 @@ -b60c37d122fa91049ccf318c94c871d82ba17ff3bc3fc64f8a65426fce7120b7 /home/runner/work/aiohttp/aiohttp/aiohttp/_cparser.pxd diff --git a/third_party/python/aiohttp/aiohttp/.hash/_find_header.pxd.hash b/third_party/python/aiohttp/aiohttp/.hash/_find_header.pxd.hash deleted file mode 100644 index f006c2de5d24..000000000000 --- a/third_party/python/aiohttp/aiohttp/.hash/_find_header.pxd.hash +++ /dev/null @@ -1 +0,0 @@ -d067f01423cddb3c442933b5fcc039b18ab651fcec1bc91c577693aafc25cf78 /home/runner/work/aiohttp/aiohttp/aiohttp/_find_header.pxd diff --git a/third_party/python/aiohttp/aiohttp/.hash/_frozenlist.pyx.hash b/third_party/python/aiohttp/aiohttp/.hash/_frozenlist.pyx.hash deleted file mode 100644 index ccad753d9a15..000000000000 --- a/third_party/python/aiohttp/aiohttp/.hash/_frozenlist.pyx.hash +++ /dev/null @@ -1 +0,0 @@ -043f0b704444c6c59da38ab3bae43ce1ff8bfe91d5ce45103b494400e7b71688 /home/runner/work/aiohttp/aiohttp/aiohttp/_frozenlist.pyx diff --git a/third_party/python/aiohttp/aiohttp/.hash/_helpers.pyi.hash b/third_party/python/aiohttp/aiohttp/.hash/_helpers.pyi.hash deleted file mode 100644 index 6a30d6325b65..000000000000 --- a/third_party/python/aiohttp/aiohttp/.hash/_helpers.pyi.hash +++ /dev/null @@ -1 +0,0 @@ -6682a22524b9d4fc442e123672622be7bdfb6238d9709b7b15b2113b7ca6d52b /home/runner/work/aiohttp/aiohttp/aiohttp/_helpers.pyi diff --git a/third_party/python/aiohttp/aiohttp/.hash/_helpers.pyx.hash b/third_party/python/aiohttp/aiohttp/.hash/_helpers.pyx.hash deleted file mode 100644 index 8f38727d780d..000000000000 --- a/third_party/python/aiohttp/aiohttp/.hash/_helpers.pyx.hash +++ /dev/null @@ -1 +0,0 @@ -5de2db35fb795ffe227e2f1007c8ba4f2ad1b9aca28cc48edc80c779203cf6e3 /home/runner/work/aiohttp/aiohttp/aiohttp/_helpers.pyx diff --git a/third_party/python/aiohttp/aiohttp/.hash/_http_parser.pyx.hash b/third_party/python/aiohttp/aiohttp/.hash/_http_parser.pyx.hash deleted file mode 100644 index ea0ea796ec3d..000000000000 --- a/third_party/python/aiohttp/aiohttp/.hash/_http_parser.pyx.hash +++ /dev/null @@ -1 +0,0 @@ -f0688fb2e81ea92bf0a17822260d9591a30979101da12a4b873113fc459fb5fa /home/runner/work/aiohttp/aiohttp/aiohttp/_http_parser.pyx diff --git a/third_party/python/aiohttp/aiohttp/.hash/_http_writer.pyx.hash b/third_party/python/aiohttp/aiohttp/.hash/_http_writer.pyx.hash deleted file mode 100644 index b325e7dfbf90..000000000000 --- a/third_party/python/aiohttp/aiohttp/.hash/_http_writer.pyx.hash +++ /dev/null @@ -1 +0,0 @@ -4e7b7f7baa5c65954e85a5b7c8db7786a0ec3498081b0a9420f792a803086281 /home/runner/work/aiohttp/aiohttp/aiohttp/_http_writer.pyx diff --git a/third_party/python/aiohttp/aiohttp/.hash/_websocket.pyx.hash b/third_party/python/aiohttp/aiohttp/.hash/_websocket.pyx.hash deleted file mode 100644 index ddbb4c7a6f87..000000000000 --- a/third_party/python/aiohttp/aiohttp/.hash/_websocket.pyx.hash +++ /dev/null @@ -1 +0,0 @@ -d57b8e48d0c26f20ebcc5e6e300da2b2a6aeb12b3c9768d64cb0e53432ccf48a /home/runner/work/aiohttp/aiohttp/aiohttp/_websocket.pyx diff --git a/third_party/python/aiohttp/aiohttp/.hash/frozenlist.pyi.hash b/third_party/python/aiohttp/aiohttp/.hash/frozenlist.pyi.hash deleted file mode 100644 index e461073ac4f6..000000000000 --- a/third_party/python/aiohttp/aiohttp/.hash/frozenlist.pyi.hash +++ /dev/null @@ -1 +0,0 @@ -6d134aa08da3d6ba0f76d81fc7f9ec7836a7bc1a99b1950d1c3aa65ed7e3951a /home/runner/work/aiohttp/aiohttp/aiohttp/frozenlist.pyi diff --git a/third_party/python/aiohttp/aiohttp/.hash/hdrs.py.hash b/third_party/python/aiohttp/aiohttp/.hash/hdrs.py.hash deleted file mode 100644 index 0e3477744244..000000000000 --- a/third_party/python/aiohttp/aiohttp/.hash/hdrs.py.hash +++ /dev/null @@ -1 +0,0 @@ -5ac8c3258003604c8993bfa8357361036337330b722e4849024972ccbb5c95f5 /home/runner/work/aiohttp/aiohttp/aiohttp/hdrs.py diff --git a/third_party/python/aiohttp/aiohttp/.hash/signals.pyi.hash b/third_party/python/aiohttp/aiohttp/.hash/signals.pyi.hash deleted file mode 100644 index 29acd69f02e3..000000000000 --- a/third_party/python/aiohttp/aiohttp/.hash/signals.pyi.hash +++ /dev/null @@ -1 +0,0 @@ -48b4df50f771d7e8385524ea0a7057ca1482974f8a43e674982b04b08bc17d5e /home/runner/work/aiohttp/aiohttp/aiohttp/signals.pyi diff --git a/third_party/python/aiohttp/aiohttp/__init__.py b/third_party/python/aiohttp/aiohttp/__init__.py deleted file mode 100644 index 12c73f4a3211..000000000000 --- a/third_party/python/aiohttp/aiohttp/__init__.py +++ /dev/null @@ -1,217 +0,0 @@ -__version__ = "3.7.4.post0" - -from typing import Tuple - -from . import hdrs as hdrs -from .client import ( - BaseConnector as BaseConnector, - ClientConnectionError as ClientConnectionError, - ClientConnectorCertificateError as ClientConnectorCertificateError, - ClientConnectorError as ClientConnectorError, - ClientConnectorSSLError as ClientConnectorSSLError, - ClientError as ClientError, - ClientHttpProxyError as ClientHttpProxyError, - ClientOSError as ClientOSError, - ClientPayloadError as ClientPayloadError, - ClientProxyConnectionError as ClientProxyConnectionError, - ClientRequest as ClientRequest, - ClientResponse as ClientResponse, - ClientResponseError as ClientResponseError, - ClientSession as ClientSession, - ClientSSLError as ClientSSLError, - ClientTimeout as ClientTimeout, - ClientWebSocketResponse as ClientWebSocketResponse, - ContentTypeError as ContentTypeError, - Fingerprint as Fingerprint, - InvalidURL as InvalidURL, - NamedPipeConnector as NamedPipeConnector, - RequestInfo as RequestInfo, - ServerConnectionError as ServerConnectionError, - ServerDisconnectedError as ServerDisconnectedError, - ServerFingerprintMismatch as ServerFingerprintMismatch, - ServerTimeoutError as ServerTimeoutError, - TCPConnector as TCPConnector, - TooManyRedirects as TooManyRedirects, - UnixConnector as UnixConnector, - WSServerHandshakeError as WSServerHandshakeError, - request as request, -) -from .cookiejar import CookieJar as CookieJar, DummyCookieJar as DummyCookieJar -from .formdata import FormData as FormData -from .helpers import BasicAuth as BasicAuth, ChainMapProxy as ChainMapProxy -from .http import ( - HttpVersion as HttpVersion, - HttpVersion10 as HttpVersion10, - HttpVersion11 as HttpVersion11, - WebSocketError as WebSocketError, - WSCloseCode as WSCloseCode, - WSMessage as WSMessage, - WSMsgType as WSMsgType, -) -from .multipart import ( - BadContentDispositionHeader as BadContentDispositionHeader, - BadContentDispositionParam as BadContentDispositionParam, - BodyPartReader as BodyPartReader, - MultipartReader as MultipartReader, - MultipartWriter as MultipartWriter, - content_disposition_filename as content_disposition_filename, - parse_content_disposition as parse_content_disposition, -) -from .payload import ( - PAYLOAD_REGISTRY as PAYLOAD_REGISTRY, - AsyncIterablePayload as AsyncIterablePayload, - BufferedReaderPayload as BufferedReaderPayload, - BytesIOPayload as BytesIOPayload, - BytesPayload as BytesPayload, - IOBasePayload as IOBasePayload, - JsonPayload as JsonPayload, - Payload as Payload, - StringIOPayload as StringIOPayload, - StringPayload as StringPayload, - TextIOPayload as TextIOPayload, - get_payload as get_payload, - payload_type as payload_type, -) -from .payload_streamer import streamer as streamer -from .resolver import ( - AsyncResolver as AsyncResolver, - DefaultResolver as DefaultResolver, - ThreadedResolver as ThreadedResolver, -) -from .signals import Signal as Signal -from .streams import ( - EMPTY_PAYLOAD as EMPTY_PAYLOAD, - DataQueue as DataQueue, - EofStream as EofStream, - FlowControlDataQueue as FlowControlDataQueue, - StreamReader as StreamReader, -) -from .tracing import ( - TraceConfig as TraceConfig, - TraceConnectionCreateEndParams as TraceConnectionCreateEndParams, - TraceConnectionCreateStartParams as TraceConnectionCreateStartParams, - TraceConnectionQueuedEndParams as TraceConnectionQueuedEndParams, - TraceConnectionQueuedStartParams as TraceConnectionQueuedStartParams, - TraceConnectionReuseconnParams as TraceConnectionReuseconnParams, - TraceDnsCacheHitParams as TraceDnsCacheHitParams, - TraceDnsCacheMissParams as TraceDnsCacheMissParams, - TraceDnsResolveHostEndParams as TraceDnsResolveHostEndParams, - TraceDnsResolveHostStartParams as TraceDnsResolveHostStartParams, - TraceRequestChunkSentParams as TraceRequestChunkSentParams, - TraceRequestEndParams as TraceRequestEndParams, - TraceRequestExceptionParams as TraceRequestExceptionParams, - TraceRequestRedirectParams as TraceRequestRedirectParams, - TraceRequestStartParams as TraceRequestStartParams, - TraceResponseChunkReceivedParams as TraceResponseChunkReceivedParams, -) - -__all__: Tuple[str, ...] = ( - "hdrs", - # client - "BaseConnector", - "ClientConnectionError", - "ClientConnectorCertificateError", - "ClientConnectorError", - "ClientConnectorSSLError", - "ClientError", - "ClientHttpProxyError", - "ClientOSError", - "ClientPayloadError", - "ClientProxyConnectionError", - "ClientResponse", - "ClientRequest", - "ClientResponseError", - "ClientSSLError", - "ClientSession", - "ClientTimeout", - "ClientWebSocketResponse", - "ContentTypeError", - "Fingerprint", - "InvalidURL", - "RequestInfo", - "ServerConnectionError", - "ServerDisconnectedError", - "ServerFingerprintMismatch", - "ServerTimeoutError", - "TCPConnector", - "TooManyRedirects", - "UnixConnector", - "NamedPipeConnector", - "WSServerHandshakeError", - "request", - # cookiejar - "CookieJar", - "DummyCookieJar", - # formdata - "FormData", - # helpers - "BasicAuth", - "ChainMapProxy", - # http - "HttpVersion", - "HttpVersion10", - "HttpVersion11", - "WSMsgType", - "WSCloseCode", - "WSMessage", - "WebSocketError", - # multipart - "BadContentDispositionHeader", - "BadContentDispositionParam", - "BodyPartReader", - "MultipartReader", - "MultipartWriter", - "content_disposition_filename", - "parse_content_disposition", - # payload - "AsyncIterablePayload", - "BufferedReaderPayload", - "BytesIOPayload", - "BytesPayload", - "IOBasePayload", - "JsonPayload", - "PAYLOAD_REGISTRY", - "Payload", - "StringIOPayload", - "StringPayload", - "TextIOPayload", - "get_payload", - "payload_type", - # payload_streamer - "streamer", - # resolver - "AsyncResolver", - "DefaultResolver", - "ThreadedResolver", - # signals - "Signal", - "DataQueue", - "EMPTY_PAYLOAD", - "EofStream", - "FlowControlDataQueue", - "StreamReader", - # tracing - "TraceConfig", - "TraceConnectionCreateEndParams", - "TraceConnectionCreateStartParams", - "TraceConnectionQueuedEndParams", - "TraceConnectionQueuedStartParams", - "TraceConnectionReuseconnParams", - "TraceDnsCacheHitParams", - "TraceDnsCacheMissParams", - "TraceDnsResolveHostEndParams", - "TraceDnsResolveHostStartParams", - "TraceRequestChunkSentParams", - "TraceRequestEndParams", - "TraceRequestExceptionParams", - "TraceRequestRedirectParams", - "TraceRequestStartParams", - "TraceResponseChunkReceivedParams", -) - -try: - from .worker import GunicornUVLoopWebWorker, GunicornWebWorker - - __all__ += ("GunicornWebWorker", "GunicornUVLoopWebWorker") -except ImportError: # pragma: no cover - pass diff --git a/third_party/python/aiohttp/aiohttp/_cparser.pxd b/third_party/python/aiohttp/aiohttp/_cparser.pxd deleted file mode 100644 index 0f9fc0092361..000000000000 --- a/third_party/python/aiohttp/aiohttp/_cparser.pxd +++ /dev/null @@ -1,140 +0,0 @@ -from libc.stdint cimport uint16_t, uint32_t, uint64_t - - -cdef extern from "../vendor/http-parser/http_parser.h": - ctypedef int (*http_data_cb) (http_parser*, - const char *at, - size_t length) except -1 - - ctypedef int (*http_cb) (http_parser*) except -1 - - struct http_parser: - unsigned int type - unsigned int flags - unsigned int state - unsigned int header_state - unsigned int index - - uint32_t nread - uint64_t content_length - - unsigned short http_major - unsigned short http_minor - unsigned int status_code - unsigned int method - unsigned int http_errno - - unsigned int upgrade - - void *data - - struct http_parser_settings: - http_cb on_message_begin - http_data_cb on_url - http_data_cb on_status - http_data_cb on_header_field - http_data_cb on_header_value - http_cb on_headers_complete - http_data_cb on_body - http_cb on_message_complete - http_cb on_chunk_header - http_cb on_chunk_complete - - enum http_parser_type: - HTTP_REQUEST, - HTTP_RESPONSE, - HTTP_BOTH - - enum http_errno: - HPE_OK, - HPE_CB_message_begin, - HPE_CB_url, - HPE_CB_header_field, - HPE_CB_header_value, - HPE_CB_headers_complete, - HPE_CB_body, - HPE_CB_message_complete, - HPE_CB_status, - HPE_CB_chunk_header, - HPE_CB_chunk_complete, - HPE_INVALID_EOF_STATE, - HPE_HEADER_OVERFLOW, - HPE_CLOSED_CONNECTION, - HPE_INVALID_VERSION, - HPE_INVALID_STATUS, - HPE_INVALID_METHOD, - HPE_INVALID_URL, - HPE_INVALID_HOST, - HPE_INVALID_PORT, - HPE_INVALID_PATH, - HPE_INVALID_QUERY_STRING, - HPE_INVALID_FRAGMENT, - HPE_LF_EXPECTED, - HPE_INVALID_HEADER_TOKEN, - HPE_INVALID_CONTENT_LENGTH, - HPE_INVALID_CHUNK_SIZE, - HPE_INVALID_CONSTANT, - HPE_INVALID_INTERNAL_STATE, - HPE_STRICT, - HPE_PAUSED, - HPE_UNKNOWN - - enum flags: - F_CHUNKED, - F_CONNECTION_KEEP_ALIVE, - F_CONNECTION_CLOSE, - F_CONNECTION_UPGRADE, - F_TRAILING, - F_UPGRADE, - F_SKIPBODY, - F_CONTENTLENGTH - - enum http_method: - DELETE, GET, HEAD, POST, PUT, CONNECT, OPTIONS, TRACE, COPY, - LOCK, MKCOL, MOVE, PROPFIND, PROPPATCH, SEARCH, UNLOCK, BIND, - REBIND, UNBIND, ACL, REPORT, MKACTIVITY, CHECKOUT, MERGE, - MSEARCH, NOTIFY, SUBSCRIBE, UNSUBSCRIBE, PATCH, PURGE, MKCALENDAR, - LINK, UNLINK - - void http_parser_init(http_parser *parser, http_parser_type type) - - size_t http_parser_execute(http_parser *parser, - const http_parser_settings *settings, - const char *data, - size_t len) - - int http_should_keep_alive(const http_parser *parser) - - void http_parser_settings_init(http_parser_settings *settings) - - const char *http_errno_name(http_errno err) - const char *http_errno_description(http_errno err) - const char *http_method_str(http_method m) - - # URL Parser - - enum http_parser_url_fields: - UF_SCHEMA = 0, - UF_HOST = 1, - UF_PORT = 2, - UF_PATH = 3, - UF_QUERY = 4, - UF_FRAGMENT = 5, - UF_USERINFO = 6, - UF_MAX = 7 - - struct http_parser_url_field_data: - uint16_t off - uint16_t len - - struct http_parser_url: - uint16_t field_set - uint16_t port - http_parser_url_field_data[UF_MAX] field_data - - void http_parser_url_init(http_parser_url *u) - - int http_parser_parse_url(const char *buf, - size_t buflen, - int is_connect, - http_parser_url *u) diff --git a/third_party/python/aiohttp/aiohttp/_find_header.c b/third_party/python/aiohttp/aiohttp/_find_header.c deleted file mode 100644 index 012cba33ac13..000000000000 --- a/third_party/python/aiohttp/aiohttp/_find_header.c +++ /dev/null @@ -1,9870 +0,0 @@ -/* The file is autogenerated from aiohttp/hdrs.py -Run ./tools/gen.py to update it after the origin changing. */ - -#include "_find_header.h" - -#define NEXT_CHAR() \ -{ \ - count++; \ - if (count == size) { \ - /* end of search */ \ - return -1; \ - } \ - pchar++; \ - ch = *pchar; \ - last = (count == size -1); \ -} while(0); - -int -find_header(const char *str, int size) -{ - char *pchar = str; - int last; - char ch; - int count = -1; - pchar--; - - - NEXT_CHAR(); - switch (ch) { - case 'A': - if (last) { - return -1; - } - goto A; - case 'a': - if (last) { - return -1; - } - goto A; - case 'C': - if (last) { - return -1; - } - goto C; - case 'c': - if (last) { - return -1; - } - goto C; - case 'D': - if (last) { - return -1; - } - goto D; - case 'd': - if (last) { - return -1; - } - goto D; - case 'E': - if (last) { - return -1; - } - goto E; - case 'e': - if (last) { - return -1; - } - goto E; - case 'F': - if (last) { - return -1; - } - goto F; - case 'f': - if (last) { - return -1; - } - goto F; - case 'H': - if (last) { - return -1; - } - goto H; - case 'h': - if (last) { - return -1; - } - goto H; - case 'I': - if (last) { - return -1; - } - goto I; - case 'i': - if (last) { - return -1; - } - goto I; - case 'K': - if (last) { - return -1; - } - goto K; - case 'k': - if (last) { - return -1; - } - goto K; - case 'L': - if (last) { - return -1; - } - goto L; - case 'l': - if (last) { - return -1; - } - goto L; - case 'M': - if (last) { - return -1; - } - goto M; - case 'm': - if (last) { - return -1; - } - goto M; - case 'O': - if (last) { - return -1; - } - goto O; - case 'o': - if (last) { - return -1; - } - goto O; - case 'P': - if (last) { - return -1; - } - goto P; - case 'p': - if (last) { - return -1; - } - goto P; - case 'R': - if (last) { - return -1; - } - goto R; - case 'r': - if (last) { - return -1; - } - goto R; - case 'S': - if (last) { - return -1; - } - goto S; - case 's': - if (last) { - return -1; - } - goto S; - case 'T': - if (last) { - return -1; - } - goto T; - case 't': - if (last) { - return -1; - } - goto T; - case 'U': - if (last) { - return -1; - } - goto U; - case 'u': - if (last) { - return -1; - } - goto U; - case 'V': - if (last) { - return -1; - } - goto V; - case 'v': - if (last) { - return -1; - } - goto V; - case 'W': - if (last) { - return -1; - } - goto W; - case 'w': - if (last) { - return -1; - } - goto W; - case 'X': - if (last) { - return -1; - } - goto X; - case 'x': - if (last) { - return -1; - } - goto X; - default: - return -1; - } - -A: - NEXT_CHAR(); - switch (ch) { - case 'C': - if (last) { - return -1; - } - goto AC; - case 'c': - if (last) { - return -1; - } - goto AC; - case 'G': - if (last) { - return -1; - } - goto AG; - case 'g': - if (last) { - return -1; - } - goto AG; - case 'L': - if (last) { - return -1; - } - goto AL; - case 'l': - if (last) { - return -1; - } - goto AL; - case 'U': - if (last) { - return -1; - } - goto AU; - case 'u': - if (last) { - return -1; - } - goto AU; - default: - return -1; - } - -AC: - NEXT_CHAR(); - switch (ch) { - case 'C': - if (last) { - return -1; - } - goto ACC; - case 'c': - if (last) { - return -1; - } - goto ACC; - default: - return -1; - } - -ACC: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return -1; - } - goto ACCE; - case 'e': - if (last) { - return -1; - } - goto ACCE; - default: - return -1; - } - -ACCE: - NEXT_CHAR(); - switch (ch) { - case 'P': - if (last) { - return -1; - } - goto ACCEP; - case 'p': - if (last) { - return -1; - } - goto ACCEP; - case 'S': - if (last) { - return -1; - } - goto ACCES; - case 's': - if (last) { - return -1; - } - goto ACCES; - default: - return -1; - } - -ACCEP: - NEXT_CHAR(); - switch (ch) { - case 'T': - if (last) { - return 0; - } - goto ACCEPT; - case 't': - if (last) { - return 0; - } - goto ACCEPT; - default: - return -1; - } - -ACCEPT: - NEXT_CHAR(); - switch (ch) { - case '-': - if (last) { - return -1; - } - goto ACCEPT_; - default: - return -1; - } - -ACCEPT_: - NEXT_CHAR(); - switch (ch) { - case 'C': - if (last) { - return -1; - } - goto ACCEPT_C; - case 'c': - if (last) { - return -1; - } - goto ACCEPT_C; - case 'E': - if (last) { - return -1; - } - goto ACCEPT_E; - case 'e': - if (last) { - return -1; - } - goto ACCEPT_E; - case 'L': - if (last) { - return -1; - } - goto ACCEPT_L; - case 'l': - if (last) { - return -1; - } - goto ACCEPT_L; - case 'R': - if (last) { - return -1; - } - goto ACCEPT_R; - case 'r': - if (last) { - return -1; - } - goto ACCEPT_R; - default: - return -1; - } - -ACCEPT_C: - NEXT_CHAR(); - switch (ch) { - case 'H': - if (last) { - return -1; - } - goto ACCEPT_CH; - case 'h': - if (last) { - return -1; - } - goto ACCEPT_CH; - default: - return -1; - } - -ACCEPT_CH: - NEXT_CHAR(); - switch (ch) { - case 'A': - if (last) { - return -1; - } - goto ACCEPT_CHA; - case 'a': - if (last) { - return -1; - } - goto ACCEPT_CHA; - default: - return -1; - } - -ACCEPT_CHA: - NEXT_CHAR(); - switch (ch) { - case 'R': - if (last) { - return -1; - } - goto ACCEPT_CHAR; - case 'r': - if (last) { - return -1; - } - goto ACCEPT_CHAR; - default: - return -1; - } - -ACCEPT_CHAR: - NEXT_CHAR(); - switch (ch) { - case 'S': - if (last) { - return -1; - } - goto ACCEPT_CHARS; - case 's': - if (last) { - return -1; - } - goto ACCEPT_CHARS; - default: - return -1; - } - -ACCEPT_CHARS: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return -1; - } - goto ACCEPT_CHARSE; - case 'e': - if (last) { - return -1; - } - goto ACCEPT_CHARSE; - default: - return -1; - } - -ACCEPT_CHARSE: - NEXT_CHAR(); - switch (ch) { - case 'T': - if (last) { - return 1; - } - goto ACCEPT_CHARSET; - case 't': - if (last) { - return 1; - } - goto ACCEPT_CHARSET; - default: - return -1; - } - -ACCEPT_E: - NEXT_CHAR(); - switch (ch) { - case 'N': - if (last) { - return -1; - } - goto ACCEPT_EN; - case 'n': - if (last) { - return -1; - } - goto ACCEPT_EN; - default: - return -1; - } - -ACCEPT_EN: - NEXT_CHAR(); - switch (ch) { - case 'C': - if (last) { - return -1; - } - goto ACCEPT_ENC; - case 'c': - if (last) { - return -1; - } - goto ACCEPT_ENC; - default: - return -1; - } - -ACCEPT_ENC: - NEXT_CHAR(); - switch (ch) { - case 'O': - if (last) { - return -1; - } - goto ACCEPT_ENCO; - case 'o': - if (last) { - return -1; - } - goto ACCEPT_ENCO; - default: - return -1; - } - -ACCEPT_ENCO: - NEXT_CHAR(); - switch (ch) { - case 'D': - if (last) { - return -1; - } - goto ACCEPT_ENCOD; - case 'd': - if (last) { - return -1; - } - goto ACCEPT_ENCOD; - default: - return -1; - } - -ACCEPT_ENCOD: - NEXT_CHAR(); - switch (ch) { - case 'I': - if (last) { - return -1; - } - goto ACCEPT_ENCODI; - case 'i': - if (last) { - return -1; - } - goto ACCEPT_ENCODI; - default: - return -1; - } - -ACCEPT_ENCODI: - NEXT_CHAR(); - switch (ch) { - case 'N': - if (last) { - return -1; - } - goto ACCEPT_ENCODIN; - case 'n': - if (last) { - return -1; - } - goto ACCEPT_ENCODIN; - default: - return -1; - } - -ACCEPT_ENCODIN: - NEXT_CHAR(); - switch (ch) { - case 'G': - if (last) { - return 2; - } - goto ACCEPT_ENCODING; - case 'g': - if (last) { - return 2; - } - goto ACCEPT_ENCODING; - default: - return -1; - } - -ACCEPT_L: - NEXT_CHAR(); - switch (ch) { - case 'A': - if (last) { - return -1; - } - goto ACCEPT_LA; - case 'a': - if (last) { - return -1; - } - goto ACCEPT_LA; - default: - return -1; - } - -ACCEPT_LA: - NEXT_CHAR(); - switch (ch) { - case 'N': - if (last) { - return -1; - } - goto ACCEPT_LAN; - case 'n': - if (last) { - return -1; - } - goto ACCEPT_LAN; - default: - return -1; - } - -ACCEPT_LAN: - NEXT_CHAR(); - switch (ch) { - case 'G': - if (last) { - return -1; - } - goto ACCEPT_LANG; - case 'g': - if (last) { - return -1; - } - goto ACCEPT_LANG; - default: - return -1; - } - -ACCEPT_LANG: - NEXT_CHAR(); - switch (ch) { - case 'U': - if (last) { - return -1; - } - goto ACCEPT_LANGU; - case 'u': - if (last) { - return -1; - } - goto ACCEPT_LANGU; - default: - return -1; - } - -ACCEPT_LANGU: - NEXT_CHAR(); - switch (ch) { - case 'A': - if (last) { - return -1; - } - goto ACCEPT_LANGUA; - case 'a': - if (last) { - return -1; - } - goto ACCEPT_LANGUA; - default: - return -1; - } - -ACCEPT_LANGUA: - NEXT_CHAR(); - switch (ch) { - case 'G': - if (last) { - return -1; - } - goto ACCEPT_LANGUAG; - case 'g': - if (last) { - return -1; - } - goto ACCEPT_LANGUAG; - default: - return -1; - } - -ACCEPT_LANGUAG: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return 3; - } - goto ACCEPT_LANGUAGE; - case 'e': - if (last) { - return 3; - } - goto ACCEPT_LANGUAGE; - default: - return -1; - } - -ACCEPT_R: - NEXT_CHAR(); - switch (ch) { - case 'A': - if (last) { - return -1; - } - goto ACCEPT_RA; - case 'a': - if (last) { - return -1; - } - goto ACCEPT_RA; - default: - return -1; - } - -ACCEPT_RA: - NEXT_CHAR(); - switch (ch) { - case 'N': - if (last) { - return -1; - } - goto ACCEPT_RAN; - case 'n': - if (last) { - return -1; - } - goto ACCEPT_RAN; - default: - return -1; - } - -ACCEPT_RAN: - NEXT_CHAR(); - switch (ch) { - case 'G': - if (last) { - return -1; - } - goto ACCEPT_RANG; - case 'g': - if (last) { - return -1; - } - goto ACCEPT_RANG; - default: - return -1; - } - -ACCEPT_RANG: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return -1; - } - goto ACCEPT_RANGE; - case 'e': - if (last) { - return -1; - } - goto ACCEPT_RANGE; - default: - return -1; - } - -ACCEPT_RANGE: - NEXT_CHAR(); - switch (ch) { - case 'S': - if (last) { - return 4; - } - goto ACCEPT_RANGES; - case 's': - if (last) { - return 4; - } - goto ACCEPT_RANGES; - default: - return -1; - } - -ACCES: - NEXT_CHAR(); - switch (ch) { - case 'S': - if (last) { - return -1; - } - goto ACCESS; - case 's': - if (last) { - return -1; - } - goto ACCESS; - default: - return -1; - } - -ACCESS: - NEXT_CHAR(); - switch (ch) { - case '-': - if (last) { - return -1; - } - goto ACCESS_; - default: - return -1; - } - -ACCESS_: - NEXT_CHAR(); - switch (ch) { - case 'C': - if (last) { - return -1; - } - goto ACCESS_C; - case 'c': - if (last) { - return -1; - } - goto ACCESS_C; - default: - return -1; - } - -ACCESS_C: - NEXT_CHAR(); - switch (ch) { - case 'O': - if (last) { - return -1; - } - goto ACCESS_CO; - case 'o': - if (last) { - return -1; - } - goto ACCESS_CO; - default: - return -1; - } - -ACCESS_CO: - NEXT_CHAR(); - switch (ch) { - case 'N': - if (last) { - return -1; - } - goto ACCESS_CON; - case 'n': - if (last) { - return -1; - } - goto ACCESS_CON; - default: - return -1; - } - -ACCESS_CON: - NEXT_CHAR(); - switch (ch) { - case 'T': - if (last) { - return -1; - } - goto ACCESS_CONT; - case 't': - if (last) { - return -1; - } - goto ACCESS_CONT; - default: - return -1; - } - -ACCESS_CONT: - NEXT_CHAR(); - switch (ch) { - case 'R': - if (last) { - return -1; - } - goto ACCESS_CONTR; - case 'r': - if (last) { - return -1; - } - goto ACCESS_CONTR; - default: - return -1; - } - -ACCESS_CONTR: - NEXT_CHAR(); - switch (ch) { - case 'O': - if (last) { - return -1; - } - goto ACCESS_CONTRO; - case 'o': - if (last) { - return -1; - } - goto ACCESS_CONTRO; - default: - return -1; - } - -ACCESS_CONTRO: - NEXT_CHAR(); - switch (ch) { - case 'L': - if (last) { - return -1; - } - goto ACCESS_CONTROL; - case 'l': - if (last) { - return -1; - } - goto ACCESS_CONTROL; - default: - return -1; - } - -ACCESS_CONTROL: - NEXT_CHAR(); - switch (ch) { - case '-': - if (last) { - return -1; - } - goto ACCESS_CONTROL_; - default: - return -1; - } - -ACCESS_CONTROL_: - NEXT_CHAR(); - switch (ch) { - case 'A': - if (last) { - return -1; - } - goto ACCESS_CONTROL_A; - case 'a': - if (last) { - return -1; - } - goto ACCESS_CONTROL_A; - case 'E': - if (last) { - return -1; - } - goto ACCESS_CONTROL_E; - case 'e': - if (last) { - return -1; - } - goto ACCESS_CONTROL_E; - case 'M': - if (last) { - return -1; - } - goto ACCESS_CONTROL_M; - case 'm': - if (last) { - return -1; - } - goto ACCESS_CONTROL_M; - case 'R': - if (last) { - return -1; - } - goto ACCESS_CONTROL_R; - case 'r': - if (last) { - return -1; - } - goto ACCESS_CONTROL_R; - default: - return -1; - } - -ACCESS_CONTROL_A: - NEXT_CHAR(); - switch (ch) { - case 'L': - if (last) { - return -1; - } - goto ACCESS_CONTROL_AL; - case 'l': - if (last) { - return -1; - } - goto ACCESS_CONTROL_AL; - default: - return -1; - } - -ACCESS_CONTROL_AL: - NEXT_CHAR(); - switch (ch) { - case 'L': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALL; - case 'l': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALL; - default: - return -1; - } - -ACCESS_CONTROL_ALL: - NEXT_CHAR(); - switch (ch) { - case 'O': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLO; - case 'o': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLO; - default: - return -1; - } - -ACCESS_CONTROL_ALLO: - NEXT_CHAR(); - switch (ch) { - case 'W': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLOW; - case 'w': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLOW; - default: - return -1; - } - -ACCESS_CONTROL_ALLOW: - NEXT_CHAR(); - switch (ch) { - case '-': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLOW_; - default: - return -1; - } - -ACCESS_CONTROL_ALLOW_: - NEXT_CHAR(); - switch (ch) { - case 'C': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLOW_C; - case 'c': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLOW_C; - case 'H': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLOW_H; - case 'h': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLOW_H; - case 'M': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLOW_M; - case 'm': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLOW_M; - case 'O': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLOW_O; - case 'o': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLOW_O; - default: - return -1; - } - -ACCESS_CONTROL_ALLOW_C: - NEXT_CHAR(); - switch (ch) { - case 'R': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLOW_CR; - case 'r': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLOW_CR; - default: - return -1; - } - -ACCESS_CONTROL_ALLOW_CR: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLOW_CRE; - case 'e': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLOW_CRE; - default: - return -1; - } - -ACCESS_CONTROL_ALLOW_CRE: - NEXT_CHAR(); - switch (ch) { - case 'D': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLOW_CRED; - case 'd': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLOW_CRED; - default: - return -1; - } - -ACCESS_CONTROL_ALLOW_CRED: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLOW_CREDE; - case 'e': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLOW_CREDE; - default: - return -1; - } - -ACCESS_CONTROL_ALLOW_CREDE: - NEXT_CHAR(); - switch (ch) { - case 'N': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLOW_CREDEN; - case 'n': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLOW_CREDEN; - default: - return -1; - } - -ACCESS_CONTROL_ALLOW_CREDEN: - NEXT_CHAR(); - switch (ch) { - case 'T': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLOW_CREDENT; - case 't': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLOW_CREDENT; - default: - return -1; - } - -ACCESS_CONTROL_ALLOW_CREDENT: - NEXT_CHAR(); - switch (ch) { - case 'I': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLOW_CREDENTI; - case 'i': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLOW_CREDENTI; - default: - return -1; - } - -ACCESS_CONTROL_ALLOW_CREDENTI: - NEXT_CHAR(); - switch (ch) { - case 'A': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLOW_CREDENTIA; - case 'a': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLOW_CREDENTIA; - default: - return -1; - } - -ACCESS_CONTROL_ALLOW_CREDENTIA: - NEXT_CHAR(); - switch (ch) { - case 'L': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLOW_CREDENTIAL; - case 'l': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLOW_CREDENTIAL; - default: - return -1; - } - -ACCESS_CONTROL_ALLOW_CREDENTIAL: - NEXT_CHAR(); - switch (ch) { - case 'S': - if (last) { - return 5; - } - goto ACCESS_CONTROL_ALLOW_CREDENTIALS; - case 's': - if (last) { - return 5; - } - goto ACCESS_CONTROL_ALLOW_CREDENTIALS; - default: - return -1; - } - -ACCESS_CONTROL_ALLOW_H: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLOW_HE; - case 'e': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLOW_HE; - default: - return -1; - } - -ACCESS_CONTROL_ALLOW_HE: - NEXT_CHAR(); - switch (ch) { - case 'A': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLOW_HEA; - case 'a': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLOW_HEA; - default: - return -1; - } - -ACCESS_CONTROL_ALLOW_HEA: - NEXT_CHAR(); - switch (ch) { - case 'D': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLOW_HEAD; - case 'd': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLOW_HEAD; - default: - return -1; - } - -ACCESS_CONTROL_ALLOW_HEAD: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLOW_HEADE; - case 'e': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLOW_HEADE; - default: - return -1; - } - -ACCESS_CONTROL_ALLOW_HEADE: - NEXT_CHAR(); - switch (ch) { - case 'R': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLOW_HEADER; - case 'r': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLOW_HEADER; - default: - return -1; - } - -ACCESS_CONTROL_ALLOW_HEADER: - NEXT_CHAR(); - switch (ch) { - case 'S': - if (last) { - return 6; - } - goto ACCESS_CONTROL_ALLOW_HEADERS; - case 's': - if (last) { - return 6; - } - goto ACCESS_CONTROL_ALLOW_HEADERS; - default: - return -1; - } - -ACCESS_CONTROL_ALLOW_M: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLOW_ME; - case 'e': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLOW_ME; - default: - return -1; - } - -ACCESS_CONTROL_ALLOW_ME: - NEXT_CHAR(); - switch (ch) { - case 'T': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLOW_MET; - case 't': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLOW_MET; - default: - return -1; - } - -ACCESS_CONTROL_ALLOW_MET: - NEXT_CHAR(); - switch (ch) { - case 'H': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLOW_METH; - case 'h': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLOW_METH; - default: - return -1; - } - -ACCESS_CONTROL_ALLOW_METH: - NEXT_CHAR(); - switch (ch) { - case 'O': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLOW_METHO; - case 'o': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLOW_METHO; - default: - return -1; - } - -ACCESS_CONTROL_ALLOW_METHO: - NEXT_CHAR(); - switch (ch) { - case 'D': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLOW_METHOD; - case 'd': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLOW_METHOD; - default: - return -1; - } - -ACCESS_CONTROL_ALLOW_METHOD: - NEXT_CHAR(); - switch (ch) { - case 'S': - if (last) { - return 7; - } - goto ACCESS_CONTROL_ALLOW_METHODS; - case 's': - if (last) { - return 7; - } - goto ACCESS_CONTROL_ALLOW_METHODS; - default: - return -1; - } - -ACCESS_CONTROL_ALLOW_O: - NEXT_CHAR(); - switch (ch) { - case 'R': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLOW_OR; - case 'r': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLOW_OR; - default: - return -1; - } - -ACCESS_CONTROL_ALLOW_OR: - NEXT_CHAR(); - switch (ch) { - case 'I': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLOW_ORI; - case 'i': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLOW_ORI; - default: - return -1; - } - -ACCESS_CONTROL_ALLOW_ORI: - NEXT_CHAR(); - switch (ch) { - case 'G': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLOW_ORIG; - case 'g': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLOW_ORIG; - default: - return -1; - } - -ACCESS_CONTROL_ALLOW_ORIG: - NEXT_CHAR(); - switch (ch) { - case 'I': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLOW_ORIGI; - case 'i': - if (last) { - return -1; - } - goto ACCESS_CONTROL_ALLOW_ORIGI; - default: - return -1; - } - -ACCESS_CONTROL_ALLOW_ORIGI: - NEXT_CHAR(); - switch (ch) { - case 'N': - if (last) { - return 8; - } - goto ACCESS_CONTROL_ALLOW_ORIGIN; - case 'n': - if (last) { - return 8; - } - goto ACCESS_CONTROL_ALLOW_ORIGIN; - default: - return -1; - } - -ACCESS_CONTROL_E: - NEXT_CHAR(); - switch (ch) { - case 'X': - if (last) { - return -1; - } - goto ACCESS_CONTROL_EX; - case 'x': - if (last) { - return -1; - } - goto ACCESS_CONTROL_EX; - default: - return -1; - } - -ACCESS_CONTROL_EX: - NEXT_CHAR(); - switch (ch) { - case 'P': - if (last) { - return -1; - } - goto ACCESS_CONTROL_EXP; - case 'p': - if (last) { - return -1; - } - goto ACCESS_CONTROL_EXP; - default: - return -1; - } - -ACCESS_CONTROL_EXP: - NEXT_CHAR(); - switch (ch) { - case 'O': - if (last) { - return -1; - } - goto ACCESS_CONTROL_EXPO; - case 'o': - if (last) { - return -1; - } - goto ACCESS_CONTROL_EXPO; - default: - return -1; - } - -ACCESS_CONTROL_EXPO: - NEXT_CHAR(); - switch (ch) { - case 'S': - if (last) { - return -1; - } - goto ACCESS_CONTROL_EXPOS; - case 's': - if (last) { - return -1; - } - goto ACCESS_CONTROL_EXPOS; - default: - return -1; - } - -ACCESS_CONTROL_EXPOS: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return -1; - } - goto ACCESS_CONTROL_EXPOSE; - case 'e': - if (last) { - return -1; - } - goto ACCESS_CONTROL_EXPOSE; - default: - return -1; - } - -ACCESS_CONTROL_EXPOSE: - NEXT_CHAR(); - switch (ch) { - case '-': - if (last) { - return -1; - } - goto ACCESS_CONTROL_EXPOSE_; - default: - return -1; - } - -ACCESS_CONTROL_EXPOSE_: - NEXT_CHAR(); - switch (ch) { - case 'H': - if (last) { - return -1; - } - goto ACCESS_CONTROL_EXPOSE_H; - case 'h': - if (last) { - return -1; - } - goto ACCESS_CONTROL_EXPOSE_H; - default: - return -1; - } - -ACCESS_CONTROL_EXPOSE_H: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return -1; - } - goto ACCESS_CONTROL_EXPOSE_HE; - case 'e': - if (last) { - return -1; - } - goto ACCESS_CONTROL_EXPOSE_HE; - default: - return -1; - } - -ACCESS_CONTROL_EXPOSE_HE: - NEXT_CHAR(); - switch (ch) { - case 'A': - if (last) { - return -1; - } - goto ACCESS_CONTROL_EXPOSE_HEA; - case 'a': - if (last) { - return -1; - } - goto ACCESS_CONTROL_EXPOSE_HEA; - default: - return -1; - } - -ACCESS_CONTROL_EXPOSE_HEA: - NEXT_CHAR(); - switch (ch) { - case 'D': - if (last) { - return -1; - } - goto ACCESS_CONTROL_EXPOSE_HEAD; - case 'd': - if (last) { - return -1; - } - goto ACCESS_CONTROL_EXPOSE_HEAD; - default: - return -1; - } - -ACCESS_CONTROL_EXPOSE_HEAD: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return -1; - } - goto ACCESS_CONTROL_EXPOSE_HEADE; - case 'e': - if (last) { - return -1; - } - goto ACCESS_CONTROL_EXPOSE_HEADE; - default: - return -1; - } - -ACCESS_CONTROL_EXPOSE_HEADE: - NEXT_CHAR(); - switch (ch) { - case 'R': - if (last) { - return -1; - } - goto ACCESS_CONTROL_EXPOSE_HEADER; - case 'r': - if (last) { - return -1; - } - goto ACCESS_CONTROL_EXPOSE_HEADER; - default: - return -1; - } - -ACCESS_CONTROL_EXPOSE_HEADER: - NEXT_CHAR(); - switch (ch) { - case 'S': - if (last) { - return 9; - } - goto ACCESS_CONTROL_EXPOSE_HEADERS; - case 's': - if (last) { - return 9; - } - goto ACCESS_CONTROL_EXPOSE_HEADERS; - default: - return -1; - } - -ACCESS_CONTROL_M: - NEXT_CHAR(); - switch (ch) { - case 'A': - if (last) { - return -1; - } - goto ACCESS_CONTROL_MA; - case 'a': - if (last) { - return -1; - } - goto ACCESS_CONTROL_MA; - default: - return -1; - } - -ACCESS_CONTROL_MA: - NEXT_CHAR(); - switch (ch) { - case 'X': - if (last) { - return -1; - } - goto ACCESS_CONTROL_MAX; - case 'x': - if (last) { - return -1; - } - goto ACCESS_CONTROL_MAX; - default: - return -1; - } - -ACCESS_CONTROL_MAX: - NEXT_CHAR(); - switch (ch) { - case '-': - if (last) { - return -1; - } - goto ACCESS_CONTROL_MAX_; - default: - return -1; - } - -ACCESS_CONTROL_MAX_: - NEXT_CHAR(); - switch (ch) { - case 'A': - if (last) { - return -1; - } - goto ACCESS_CONTROL_MAX_A; - case 'a': - if (last) { - return -1; - } - goto ACCESS_CONTROL_MAX_A; - default: - return -1; - } - -ACCESS_CONTROL_MAX_A: - NEXT_CHAR(); - switch (ch) { - case 'G': - if (last) { - return -1; - } - goto ACCESS_CONTROL_MAX_AG; - case 'g': - if (last) { - return -1; - } - goto ACCESS_CONTROL_MAX_AG; - default: - return -1; - } - -ACCESS_CONTROL_MAX_AG: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return 10; - } - goto ACCESS_CONTROL_MAX_AGE; - case 'e': - if (last) { - return 10; - } - goto ACCESS_CONTROL_MAX_AGE; - default: - return -1; - } - -ACCESS_CONTROL_R: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return -1; - } - goto ACCESS_CONTROL_RE; - case 'e': - if (last) { - return -1; - } - goto ACCESS_CONTROL_RE; - default: - return -1; - } - -ACCESS_CONTROL_RE: - NEXT_CHAR(); - switch (ch) { - case 'Q': - if (last) { - return -1; - } - goto ACCESS_CONTROL_REQ; - case 'q': - if (last) { - return -1; - } - goto ACCESS_CONTROL_REQ; - default: - return -1; - } - -ACCESS_CONTROL_REQ: - NEXT_CHAR(); - switch (ch) { - case 'U': - if (last) { - return -1; - } - goto ACCESS_CONTROL_REQU; - case 'u': - if (last) { - return -1; - } - goto ACCESS_CONTROL_REQU; - default: - return -1; - } - -ACCESS_CONTROL_REQU: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return -1; - } - goto ACCESS_CONTROL_REQUE; - case 'e': - if (last) { - return -1; - } - goto ACCESS_CONTROL_REQUE; - default: - return -1; - } - -ACCESS_CONTROL_REQUE: - NEXT_CHAR(); - switch (ch) { - case 'S': - if (last) { - return -1; - } - goto ACCESS_CONTROL_REQUES; - case 's': - if (last) { - return -1; - } - goto ACCESS_CONTROL_REQUES; - default: - return -1; - } - -ACCESS_CONTROL_REQUES: - NEXT_CHAR(); - switch (ch) { - case 'T': - if (last) { - return -1; - } - goto ACCESS_CONTROL_REQUEST; - case 't': - if (last) { - return -1; - } - goto ACCESS_CONTROL_REQUEST; - default: - return -1; - } - -ACCESS_CONTROL_REQUEST: - NEXT_CHAR(); - switch (ch) { - case '-': - if (last) { - return -1; - } - goto ACCESS_CONTROL_REQUEST_; - default: - return -1; - } - -ACCESS_CONTROL_REQUEST_: - NEXT_CHAR(); - switch (ch) { - case 'H': - if (last) { - return -1; - } - goto ACCESS_CONTROL_REQUEST_H; - case 'h': - if (last) { - return -1; - } - goto ACCESS_CONTROL_REQUEST_H; - case 'M': - if (last) { - return -1; - } - goto ACCESS_CONTROL_REQUEST_M; - case 'm': - if (last) { - return -1; - } - goto ACCESS_CONTROL_REQUEST_M; - default: - return -1; - } - -ACCESS_CONTROL_REQUEST_H: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return -1; - } - goto ACCESS_CONTROL_REQUEST_HE; - case 'e': - if (last) { - return -1; - } - goto ACCESS_CONTROL_REQUEST_HE; - default: - return -1; - } - -ACCESS_CONTROL_REQUEST_HE: - NEXT_CHAR(); - switch (ch) { - case 'A': - if (last) { - return -1; - } - goto ACCESS_CONTROL_REQUEST_HEA; - case 'a': - if (last) { - return -1; - } - goto ACCESS_CONTROL_REQUEST_HEA; - default: - return -1; - } - -ACCESS_CONTROL_REQUEST_HEA: - NEXT_CHAR(); - switch (ch) { - case 'D': - if (last) { - return -1; - } - goto ACCESS_CONTROL_REQUEST_HEAD; - case 'd': - if (last) { - return -1; - } - goto ACCESS_CONTROL_REQUEST_HEAD; - default: - return -1; - } - -ACCESS_CONTROL_REQUEST_HEAD: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return -1; - } - goto ACCESS_CONTROL_REQUEST_HEADE; - case 'e': - if (last) { - return -1; - } - goto ACCESS_CONTROL_REQUEST_HEADE; - default: - return -1; - } - -ACCESS_CONTROL_REQUEST_HEADE: - NEXT_CHAR(); - switch (ch) { - case 'R': - if (last) { - return -1; - } - goto ACCESS_CONTROL_REQUEST_HEADER; - case 'r': - if (last) { - return -1; - } - goto ACCESS_CONTROL_REQUEST_HEADER; - default: - return -1; - } - -ACCESS_CONTROL_REQUEST_HEADER: - NEXT_CHAR(); - switch (ch) { - case 'S': - if (last) { - return 11; - } - goto ACCESS_CONTROL_REQUEST_HEADERS; - case 's': - if (last) { - return 11; - } - goto ACCESS_CONTROL_REQUEST_HEADERS; - default: - return -1; - } - -ACCESS_CONTROL_REQUEST_M: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return -1; - } - goto ACCESS_CONTROL_REQUEST_ME; - case 'e': - if (last) { - return -1; - } - goto ACCESS_CONTROL_REQUEST_ME; - default: - return -1; - } - -ACCESS_CONTROL_REQUEST_ME: - NEXT_CHAR(); - switch (ch) { - case 'T': - if (last) { - return -1; - } - goto ACCESS_CONTROL_REQUEST_MET; - case 't': - if (last) { - return -1; - } - goto ACCESS_CONTROL_REQUEST_MET; - default: - return -1; - } - -ACCESS_CONTROL_REQUEST_MET: - NEXT_CHAR(); - switch (ch) { - case 'H': - if (last) { - return -1; - } - goto ACCESS_CONTROL_REQUEST_METH; - case 'h': - if (last) { - return -1; - } - goto ACCESS_CONTROL_REQUEST_METH; - default: - return -1; - } - -ACCESS_CONTROL_REQUEST_METH: - NEXT_CHAR(); - switch (ch) { - case 'O': - if (last) { - return -1; - } - goto ACCESS_CONTROL_REQUEST_METHO; - case 'o': - if (last) { - return -1; - } - goto ACCESS_CONTROL_REQUEST_METHO; - default: - return -1; - } - -ACCESS_CONTROL_REQUEST_METHO: - NEXT_CHAR(); - switch (ch) { - case 'D': - if (last) { - return 12; - } - goto ACCESS_CONTROL_REQUEST_METHOD; - case 'd': - if (last) { - return 12; - } - goto ACCESS_CONTROL_REQUEST_METHOD; - default: - return -1; - } - -AG: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return 13; - } - goto AGE; - case 'e': - if (last) { - return 13; - } - goto AGE; - default: - return -1; - } - -AL: - NEXT_CHAR(); - switch (ch) { - case 'L': - if (last) { - return -1; - } - goto ALL; - case 'l': - if (last) { - return -1; - } - goto ALL; - default: - return -1; - } - -ALL: - NEXT_CHAR(); - switch (ch) { - case 'O': - if (last) { - return -1; - } - goto ALLO; - case 'o': - if (last) { - return -1; - } - goto ALLO; - default: - return -1; - } - -ALLO: - NEXT_CHAR(); - switch (ch) { - case 'W': - if (last) { - return 14; - } - goto ALLOW; - case 'w': - if (last) { - return 14; - } - goto ALLOW; - default: - return -1; - } - -AU: - NEXT_CHAR(); - switch (ch) { - case 'T': - if (last) { - return -1; - } - goto AUT; - case 't': - if (last) { - return -1; - } - goto AUT; - default: - return -1; - } - -AUT: - NEXT_CHAR(); - switch (ch) { - case 'H': - if (last) { - return -1; - } - goto AUTH; - case 'h': - if (last) { - return -1; - } - goto AUTH; - default: - return -1; - } - -AUTH: - NEXT_CHAR(); - switch (ch) { - case 'O': - if (last) { - return -1; - } - goto AUTHO; - case 'o': - if (last) { - return -1; - } - goto AUTHO; - default: - return -1; - } - -AUTHO: - NEXT_CHAR(); - switch (ch) { - case 'R': - if (last) { - return -1; - } - goto AUTHOR; - case 'r': - if (last) { - return -1; - } - goto AUTHOR; - default: - return -1; - } - -AUTHOR: - NEXT_CHAR(); - switch (ch) { - case 'I': - if (last) { - return -1; - } - goto AUTHORI; - case 'i': - if (last) { - return -1; - } - goto AUTHORI; - default: - return -1; - } - -AUTHORI: - NEXT_CHAR(); - switch (ch) { - case 'Z': - if (last) { - return -1; - } - goto AUTHORIZ; - case 'z': - if (last) { - return -1; - } - goto AUTHORIZ; - default: - return -1; - } - -AUTHORIZ: - NEXT_CHAR(); - switch (ch) { - case 'A': - if (last) { - return -1; - } - goto AUTHORIZA; - case 'a': - if (last) { - return -1; - } - goto AUTHORIZA; - default: - return -1; - } - -AUTHORIZA: - NEXT_CHAR(); - switch (ch) { - case 'T': - if (last) { - return -1; - } - goto AUTHORIZAT; - case 't': - if (last) { - return -1; - } - goto AUTHORIZAT; - default: - return -1; - } - -AUTHORIZAT: - NEXT_CHAR(); - switch (ch) { - case 'I': - if (last) { - return -1; - } - goto AUTHORIZATI; - case 'i': - if (last) { - return -1; - } - goto AUTHORIZATI; - default: - return -1; - } - -AUTHORIZATI: - NEXT_CHAR(); - switch (ch) { - case 'O': - if (last) { - return -1; - } - goto AUTHORIZATIO; - case 'o': - if (last) { - return -1; - } - goto AUTHORIZATIO; - default: - return -1; - } - -AUTHORIZATIO: - NEXT_CHAR(); - switch (ch) { - case 'N': - if (last) { - return 15; - } - goto AUTHORIZATION; - case 'n': - if (last) { - return 15; - } - goto AUTHORIZATION; - default: - return -1; - } - -C: - NEXT_CHAR(); - switch (ch) { - case 'A': - if (last) { - return -1; - } - goto CA; - case 'a': - if (last) { - return -1; - } - goto CA; - case 'O': - if (last) { - return -1; - } - goto CO; - case 'o': - if (last) { - return -1; - } - goto CO; - default: - return -1; - } - -CA: - NEXT_CHAR(); - switch (ch) { - case 'C': - if (last) { - return -1; - } - goto CAC; - case 'c': - if (last) { - return -1; - } - goto CAC; - default: - return -1; - } - -CAC: - NEXT_CHAR(); - switch (ch) { - case 'H': - if (last) { - return -1; - } - goto CACH; - case 'h': - if (last) { - return -1; - } - goto CACH; - default: - return -1; - } - -CACH: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return -1; - } - goto CACHE; - case 'e': - if (last) { - return -1; - } - goto CACHE; - default: - return -1; - } - -CACHE: - NEXT_CHAR(); - switch (ch) { - case '-': - if (last) { - return -1; - } - goto CACHE_; - default: - return -1; - } - -CACHE_: - NEXT_CHAR(); - switch (ch) { - case 'C': - if (last) { - return -1; - } - goto CACHE_C; - case 'c': - if (last) { - return -1; - } - goto CACHE_C; - default: - return -1; - } - -CACHE_C: - NEXT_CHAR(); - switch (ch) { - case 'O': - if (last) { - return -1; - } - goto CACHE_CO; - case 'o': - if (last) { - return -1; - } - goto CACHE_CO; - default: - return -1; - } - -CACHE_CO: - NEXT_CHAR(); - switch (ch) { - case 'N': - if (last) { - return -1; - } - goto CACHE_CON; - case 'n': - if (last) { - return -1; - } - goto CACHE_CON; - default: - return -1; - } - -CACHE_CON: - NEXT_CHAR(); - switch (ch) { - case 'T': - if (last) { - return -1; - } - goto CACHE_CONT; - case 't': - if (last) { - return -1; - } - goto CACHE_CONT; - default: - return -1; - } - -CACHE_CONT: - NEXT_CHAR(); - switch (ch) { - case 'R': - if (last) { - return -1; - } - goto CACHE_CONTR; - case 'r': - if (last) { - return -1; - } - goto CACHE_CONTR; - default: - return -1; - } - -CACHE_CONTR: - NEXT_CHAR(); - switch (ch) { - case 'O': - if (last) { - return -1; - } - goto CACHE_CONTRO; - case 'o': - if (last) { - return -1; - } - goto CACHE_CONTRO; - default: - return -1; - } - -CACHE_CONTRO: - NEXT_CHAR(); - switch (ch) { - case 'L': - if (last) { - return 16; - } - goto CACHE_CONTROL; - case 'l': - if (last) { - return 16; - } - goto CACHE_CONTROL; - default: - return -1; - } - -CO: - NEXT_CHAR(); - switch (ch) { - case 'N': - if (last) { - return -1; - } - goto CON; - case 'n': - if (last) { - return -1; - } - goto CON; - case 'O': - if (last) { - return -1; - } - goto COO; - case 'o': - if (last) { - return -1; - } - goto COO; - default: - return -1; - } - -CON: - NEXT_CHAR(); - switch (ch) { - case 'N': - if (last) { - return -1; - } - goto CONN; - case 'n': - if (last) { - return -1; - } - goto CONN; - case 'T': - if (last) { - return -1; - } - goto CONT; - case 't': - if (last) { - return -1; - } - goto CONT; - default: - return -1; - } - -CONN: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return -1; - } - goto CONNE; - case 'e': - if (last) { - return -1; - } - goto CONNE; - default: - return -1; - } - -CONNE: - NEXT_CHAR(); - switch (ch) { - case 'C': - if (last) { - return -1; - } - goto CONNEC; - case 'c': - if (last) { - return -1; - } - goto CONNEC; - default: - return -1; - } - -CONNEC: - NEXT_CHAR(); - switch (ch) { - case 'T': - if (last) { - return -1; - } - goto CONNECT; - case 't': - if (last) { - return -1; - } - goto CONNECT; - default: - return -1; - } - -CONNECT: - NEXT_CHAR(); - switch (ch) { - case 'I': - if (last) { - return -1; - } - goto CONNECTI; - case 'i': - if (last) { - return -1; - } - goto CONNECTI; - default: - return -1; - } - -CONNECTI: - NEXT_CHAR(); - switch (ch) { - case 'O': - if (last) { - return -1; - } - goto CONNECTIO; - case 'o': - if (last) { - return -1; - } - goto CONNECTIO; - default: - return -1; - } - -CONNECTIO: - NEXT_CHAR(); - switch (ch) { - case 'N': - if (last) { - return 17; - } - goto CONNECTION; - case 'n': - if (last) { - return 17; - } - goto CONNECTION; - default: - return -1; - } - -CONT: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return -1; - } - goto CONTE; - case 'e': - if (last) { - return -1; - } - goto CONTE; - default: - return -1; - } - -CONTE: - NEXT_CHAR(); - switch (ch) { - case 'N': - if (last) { - return -1; - } - goto CONTEN; - case 'n': - if (last) { - return -1; - } - goto CONTEN; - default: - return -1; - } - -CONTEN: - NEXT_CHAR(); - switch (ch) { - case 'T': - if (last) { - return -1; - } - goto CONTENT; - case 't': - if (last) { - return -1; - } - goto CONTENT; - default: - return -1; - } - -CONTENT: - NEXT_CHAR(); - switch (ch) { - case '-': - if (last) { - return -1; - } - goto CONTENT_; - default: - return -1; - } - -CONTENT_: - NEXT_CHAR(); - switch (ch) { - case 'D': - if (last) { - return -1; - } - goto CONTENT_D; - case 'd': - if (last) { - return -1; - } - goto CONTENT_D; - case 'E': - if (last) { - return -1; - } - goto CONTENT_E; - case 'e': - if (last) { - return -1; - } - goto CONTENT_E; - case 'L': - if (last) { - return -1; - } - goto CONTENT_L; - case 'l': - if (last) { - return -1; - } - goto CONTENT_L; - case 'M': - if (last) { - return -1; - } - goto CONTENT_M; - case 'm': - if (last) { - return -1; - } - goto CONTENT_M; - case 'R': - if (last) { - return -1; - } - goto CONTENT_R; - case 'r': - if (last) { - return -1; - } - goto CONTENT_R; - case 'T': - if (last) { - return -1; - } - goto CONTENT_T; - case 't': - if (last) { - return -1; - } - goto CONTENT_T; - default: - return -1; - } - -CONTENT_D: - NEXT_CHAR(); - switch (ch) { - case 'I': - if (last) { - return -1; - } - goto CONTENT_DI; - case 'i': - if (last) { - return -1; - } - goto CONTENT_DI; - default: - return -1; - } - -CONTENT_DI: - NEXT_CHAR(); - switch (ch) { - case 'S': - if (last) { - return -1; - } - goto CONTENT_DIS; - case 's': - if (last) { - return -1; - } - goto CONTENT_DIS; - default: - return -1; - } - -CONTENT_DIS: - NEXT_CHAR(); - switch (ch) { - case 'P': - if (last) { - return -1; - } - goto CONTENT_DISP; - case 'p': - if (last) { - return -1; - } - goto CONTENT_DISP; - default: - return -1; - } - -CONTENT_DISP: - NEXT_CHAR(); - switch (ch) { - case 'O': - if (last) { - return -1; - } - goto CONTENT_DISPO; - case 'o': - if (last) { - return -1; - } - goto CONTENT_DISPO; - default: - return -1; - } - -CONTENT_DISPO: - NEXT_CHAR(); - switch (ch) { - case 'S': - if (last) { - return -1; - } - goto CONTENT_DISPOS; - case 's': - if (last) { - return -1; - } - goto CONTENT_DISPOS; - default: - return -1; - } - -CONTENT_DISPOS: - NEXT_CHAR(); - switch (ch) { - case 'I': - if (last) { - return -1; - } - goto CONTENT_DISPOSI; - case 'i': - if (last) { - return -1; - } - goto CONTENT_DISPOSI; - default: - return -1; - } - -CONTENT_DISPOSI: - NEXT_CHAR(); - switch (ch) { - case 'T': - if (last) { - return -1; - } - goto CONTENT_DISPOSIT; - case 't': - if (last) { - return -1; - } - goto CONTENT_DISPOSIT; - default: - return -1; - } - -CONTENT_DISPOSIT: - NEXT_CHAR(); - switch (ch) { - case 'I': - if (last) { - return -1; - } - goto CONTENT_DISPOSITI; - case 'i': - if (last) { - return -1; - } - goto CONTENT_DISPOSITI; - default: - return -1; - } - -CONTENT_DISPOSITI: - NEXT_CHAR(); - switch (ch) { - case 'O': - if (last) { - return -1; - } - goto CONTENT_DISPOSITIO; - case 'o': - if (last) { - return -1; - } - goto CONTENT_DISPOSITIO; - default: - return -1; - } - -CONTENT_DISPOSITIO: - NEXT_CHAR(); - switch (ch) { - case 'N': - if (last) { - return 18; - } - goto CONTENT_DISPOSITION; - case 'n': - if (last) { - return 18; - } - goto CONTENT_DISPOSITION; - default: - return -1; - } - -CONTENT_E: - NEXT_CHAR(); - switch (ch) { - case 'N': - if (last) { - return -1; - } - goto CONTENT_EN; - case 'n': - if (last) { - return -1; - } - goto CONTENT_EN; - default: - return -1; - } - -CONTENT_EN: - NEXT_CHAR(); - switch (ch) { - case 'C': - if (last) { - return -1; - } - goto CONTENT_ENC; - case 'c': - if (last) { - return -1; - } - goto CONTENT_ENC; - default: - return -1; - } - -CONTENT_ENC: - NEXT_CHAR(); - switch (ch) { - case 'O': - if (last) { - return -1; - } - goto CONTENT_ENCO; - case 'o': - if (last) { - return -1; - } - goto CONTENT_ENCO; - default: - return -1; - } - -CONTENT_ENCO: - NEXT_CHAR(); - switch (ch) { - case 'D': - if (last) { - return -1; - } - goto CONTENT_ENCOD; - case 'd': - if (last) { - return -1; - } - goto CONTENT_ENCOD; - default: - return -1; - } - -CONTENT_ENCOD: - NEXT_CHAR(); - switch (ch) { - case 'I': - if (last) { - return -1; - } - goto CONTENT_ENCODI; - case 'i': - if (last) { - return -1; - } - goto CONTENT_ENCODI; - default: - return -1; - } - -CONTENT_ENCODI: - NEXT_CHAR(); - switch (ch) { - case 'N': - if (last) { - return -1; - } - goto CONTENT_ENCODIN; - case 'n': - if (last) { - return -1; - } - goto CONTENT_ENCODIN; - default: - return -1; - } - -CONTENT_ENCODIN: - NEXT_CHAR(); - switch (ch) { - case 'G': - if (last) { - return 19; - } - goto CONTENT_ENCODING; - case 'g': - if (last) { - return 19; - } - goto CONTENT_ENCODING; - default: - return -1; - } - -CONTENT_L: - NEXT_CHAR(); - switch (ch) { - case 'A': - if (last) { - return -1; - } - goto CONTENT_LA; - case 'a': - if (last) { - return -1; - } - goto CONTENT_LA; - case 'E': - if (last) { - return -1; - } - goto CONTENT_LE; - case 'e': - if (last) { - return -1; - } - goto CONTENT_LE; - case 'O': - if (last) { - return -1; - } - goto CONTENT_LO; - case 'o': - if (last) { - return -1; - } - goto CONTENT_LO; - default: - return -1; - } - -CONTENT_LA: - NEXT_CHAR(); - switch (ch) { - case 'N': - if (last) { - return -1; - } - goto CONTENT_LAN; - case 'n': - if (last) { - return -1; - } - goto CONTENT_LAN; - default: - return -1; - } - -CONTENT_LAN: - NEXT_CHAR(); - switch (ch) { - case 'G': - if (last) { - return -1; - } - goto CONTENT_LANG; - case 'g': - if (last) { - return -1; - } - goto CONTENT_LANG; - default: - return -1; - } - -CONTENT_LANG: - NEXT_CHAR(); - switch (ch) { - case 'U': - if (last) { - return -1; - } - goto CONTENT_LANGU; - case 'u': - if (last) { - return -1; - } - goto CONTENT_LANGU; - default: - return -1; - } - -CONTENT_LANGU: - NEXT_CHAR(); - switch (ch) { - case 'A': - if (last) { - return -1; - } - goto CONTENT_LANGUA; - case 'a': - if (last) { - return -1; - } - goto CONTENT_LANGUA; - default: - return -1; - } - -CONTENT_LANGUA: - NEXT_CHAR(); - switch (ch) { - case 'G': - if (last) { - return -1; - } - goto CONTENT_LANGUAG; - case 'g': - if (last) { - return -1; - } - goto CONTENT_LANGUAG; - default: - return -1; - } - -CONTENT_LANGUAG: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return 20; - } - goto CONTENT_LANGUAGE; - case 'e': - if (last) { - return 20; - } - goto CONTENT_LANGUAGE; - default: - return -1; - } - -CONTENT_LE: - NEXT_CHAR(); - switch (ch) { - case 'N': - if (last) { - return -1; - } - goto CONTENT_LEN; - case 'n': - if (last) { - return -1; - } - goto CONTENT_LEN; - default: - return -1; - } - -CONTENT_LEN: - NEXT_CHAR(); - switch (ch) { - case 'G': - if (last) { - return -1; - } - goto CONTENT_LENG; - case 'g': - if (last) { - return -1; - } - goto CONTENT_LENG; - default: - return -1; - } - -CONTENT_LENG: - NEXT_CHAR(); - switch (ch) { - case 'T': - if (last) { - return -1; - } - goto CONTENT_LENGT; - case 't': - if (last) { - return -1; - } - goto CONTENT_LENGT; - default: - return -1; - } - -CONTENT_LENGT: - NEXT_CHAR(); - switch (ch) { - case 'H': - if (last) { - return 21; - } - goto CONTENT_LENGTH; - case 'h': - if (last) { - return 21; - } - goto CONTENT_LENGTH; - default: - return -1; - } - -CONTENT_LO: - NEXT_CHAR(); - switch (ch) { - case 'C': - if (last) { - return -1; - } - goto CONTENT_LOC; - case 'c': - if (last) { - return -1; - } - goto CONTENT_LOC; - default: - return -1; - } - -CONTENT_LOC: - NEXT_CHAR(); - switch (ch) { - case 'A': - if (last) { - return -1; - } - goto CONTENT_LOCA; - case 'a': - if (last) { - return -1; - } - goto CONTENT_LOCA; - default: - return -1; - } - -CONTENT_LOCA: - NEXT_CHAR(); - switch (ch) { - case 'T': - if (last) { - return -1; - } - goto CONTENT_LOCAT; - case 't': - if (last) { - return -1; - } - goto CONTENT_LOCAT; - default: - return -1; - } - -CONTENT_LOCAT: - NEXT_CHAR(); - switch (ch) { - case 'I': - if (last) { - return -1; - } - goto CONTENT_LOCATI; - case 'i': - if (last) { - return -1; - } - goto CONTENT_LOCATI; - default: - return -1; - } - -CONTENT_LOCATI: - NEXT_CHAR(); - switch (ch) { - case 'O': - if (last) { - return -1; - } - goto CONTENT_LOCATIO; - case 'o': - if (last) { - return -1; - } - goto CONTENT_LOCATIO; - default: - return -1; - } - -CONTENT_LOCATIO: - NEXT_CHAR(); - switch (ch) { - case 'N': - if (last) { - return 22; - } - goto CONTENT_LOCATION; - case 'n': - if (last) { - return 22; - } - goto CONTENT_LOCATION; - default: - return -1; - } - -CONTENT_M: - NEXT_CHAR(); - switch (ch) { - case 'D': - if (last) { - return -1; - } - goto CONTENT_MD; - case 'd': - if (last) { - return -1; - } - goto CONTENT_MD; - default: - return -1; - } - -CONTENT_MD: - NEXT_CHAR(); - switch (ch) { - case '5': - if (last) { - return 23; - } - goto CONTENT_MD5; - default: - return -1; - } - -CONTENT_R: - NEXT_CHAR(); - switch (ch) { - case 'A': - if (last) { - return -1; - } - goto CONTENT_RA; - case 'a': - if (last) { - return -1; - } - goto CONTENT_RA; - default: - return -1; - } - -CONTENT_RA: - NEXT_CHAR(); - switch (ch) { - case 'N': - if (last) { - return -1; - } - goto CONTENT_RAN; - case 'n': - if (last) { - return -1; - } - goto CONTENT_RAN; - default: - return -1; - } - -CONTENT_RAN: - NEXT_CHAR(); - switch (ch) { - case 'G': - if (last) { - return -1; - } - goto CONTENT_RANG; - case 'g': - if (last) { - return -1; - } - goto CONTENT_RANG; - default: - return -1; - } - -CONTENT_RANG: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return 24; - } - goto CONTENT_RANGE; - case 'e': - if (last) { - return 24; - } - goto CONTENT_RANGE; - default: - return -1; - } - -CONTENT_T: - NEXT_CHAR(); - switch (ch) { - case 'R': - if (last) { - return -1; - } - goto CONTENT_TR; - case 'r': - if (last) { - return -1; - } - goto CONTENT_TR; - case 'Y': - if (last) { - return -1; - } - goto CONTENT_TY; - case 'y': - if (last) { - return -1; - } - goto CONTENT_TY; - default: - return -1; - } - -CONTENT_TR: - NEXT_CHAR(); - switch (ch) { - case 'A': - if (last) { - return -1; - } - goto CONTENT_TRA; - case 'a': - if (last) { - return -1; - } - goto CONTENT_TRA; - default: - return -1; - } - -CONTENT_TRA: - NEXT_CHAR(); - switch (ch) { - case 'N': - if (last) { - return -1; - } - goto CONTENT_TRAN; - case 'n': - if (last) { - return -1; - } - goto CONTENT_TRAN; - default: - return -1; - } - -CONTENT_TRAN: - NEXT_CHAR(); - switch (ch) { - case 'S': - if (last) { - return -1; - } - goto CONTENT_TRANS; - case 's': - if (last) { - return -1; - } - goto CONTENT_TRANS; - default: - return -1; - } - -CONTENT_TRANS: - NEXT_CHAR(); - switch (ch) { - case 'F': - if (last) { - return -1; - } - goto CONTENT_TRANSF; - case 'f': - if (last) { - return -1; - } - goto CONTENT_TRANSF; - default: - return -1; - } - -CONTENT_TRANSF: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return -1; - } - goto CONTENT_TRANSFE; - case 'e': - if (last) { - return -1; - } - goto CONTENT_TRANSFE; - default: - return -1; - } - -CONTENT_TRANSFE: - NEXT_CHAR(); - switch (ch) { - case 'R': - if (last) { - return -1; - } - goto CONTENT_TRANSFER; - case 'r': - if (last) { - return -1; - } - goto CONTENT_TRANSFER; - default: - return -1; - } - -CONTENT_TRANSFER: - NEXT_CHAR(); - switch (ch) { - case '-': - if (last) { - return -1; - } - goto CONTENT_TRANSFER_; - default: - return -1; - } - -CONTENT_TRANSFER_: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return -1; - } - goto CONTENT_TRANSFER_E; - case 'e': - if (last) { - return -1; - } - goto CONTENT_TRANSFER_E; - default: - return -1; - } - -CONTENT_TRANSFER_E: - NEXT_CHAR(); - switch (ch) { - case 'N': - if (last) { - return -1; - } - goto CONTENT_TRANSFER_EN; - case 'n': - if (last) { - return -1; - } - goto CONTENT_TRANSFER_EN; - default: - return -1; - } - -CONTENT_TRANSFER_EN: - NEXT_CHAR(); - switch (ch) { - case 'C': - if (last) { - return -1; - } - goto CONTENT_TRANSFER_ENC; - case 'c': - if (last) { - return -1; - } - goto CONTENT_TRANSFER_ENC; - default: - return -1; - } - -CONTENT_TRANSFER_ENC: - NEXT_CHAR(); - switch (ch) { - case 'O': - if (last) { - return -1; - } - goto CONTENT_TRANSFER_ENCO; - case 'o': - if (last) { - return -1; - } - goto CONTENT_TRANSFER_ENCO; - default: - return -1; - } - -CONTENT_TRANSFER_ENCO: - NEXT_CHAR(); - switch (ch) { - case 'D': - if (last) { - return -1; - } - goto CONTENT_TRANSFER_ENCOD; - case 'd': - if (last) { - return -1; - } - goto CONTENT_TRANSFER_ENCOD; - default: - return -1; - } - -CONTENT_TRANSFER_ENCOD: - NEXT_CHAR(); - switch (ch) { - case 'I': - if (last) { - return -1; - } - goto CONTENT_TRANSFER_ENCODI; - case 'i': - if (last) { - return -1; - } - goto CONTENT_TRANSFER_ENCODI; - default: - return -1; - } - -CONTENT_TRANSFER_ENCODI: - NEXT_CHAR(); - switch (ch) { - case 'N': - if (last) { - return -1; - } - goto CONTENT_TRANSFER_ENCODIN; - case 'n': - if (last) { - return -1; - } - goto CONTENT_TRANSFER_ENCODIN; - default: - return -1; - } - -CONTENT_TRANSFER_ENCODIN: - NEXT_CHAR(); - switch (ch) { - case 'G': - if (last) { - return 25; - } - goto CONTENT_TRANSFER_ENCODING; - case 'g': - if (last) { - return 25; - } - goto CONTENT_TRANSFER_ENCODING; - default: - return -1; - } - -CONTENT_TY: - NEXT_CHAR(); - switch (ch) { - case 'P': - if (last) { - return -1; - } - goto CONTENT_TYP; - case 'p': - if (last) { - return -1; - } - goto CONTENT_TYP; - default: - return -1; - } - -CONTENT_TYP: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return 26; - } - goto CONTENT_TYPE; - case 'e': - if (last) { - return 26; - } - goto CONTENT_TYPE; - default: - return -1; - } - -COO: - NEXT_CHAR(); - switch (ch) { - case 'K': - if (last) { - return -1; - } - goto COOK; - case 'k': - if (last) { - return -1; - } - goto COOK; - default: - return -1; - } - -COOK: - NEXT_CHAR(); - switch (ch) { - case 'I': - if (last) { - return -1; - } - goto COOKI; - case 'i': - if (last) { - return -1; - } - goto COOKI; - default: - return -1; - } - -COOKI: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return 27; - } - goto COOKIE; - case 'e': - if (last) { - return 27; - } - goto COOKIE; - default: - return -1; - } - -D: - NEXT_CHAR(); - switch (ch) { - case 'A': - if (last) { - return -1; - } - goto DA; - case 'a': - if (last) { - return -1; - } - goto DA; - case 'E': - if (last) { - return -1; - } - goto DE; - case 'e': - if (last) { - return -1; - } - goto DE; - case 'I': - if (last) { - return -1; - } - goto DI; - case 'i': - if (last) { - return -1; - } - goto DI; - default: - return -1; - } - -DA: - NEXT_CHAR(); - switch (ch) { - case 'T': - if (last) { - return -1; - } - goto DAT; - case 't': - if (last) { - return -1; - } - goto DAT; - default: - return -1; - } - -DAT: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return 28; - } - goto DATE; - case 'e': - if (last) { - return 28; - } - goto DATE; - default: - return -1; - } - -DE: - NEXT_CHAR(); - switch (ch) { - case 'S': - if (last) { - return -1; - } - goto DES; - case 's': - if (last) { - return -1; - } - goto DES; - default: - return -1; - } - -DES: - NEXT_CHAR(); - switch (ch) { - case 'T': - if (last) { - return -1; - } - goto DEST; - case 't': - if (last) { - return -1; - } - goto DEST; - default: - return -1; - } - -DEST: - NEXT_CHAR(); - switch (ch) { - case 'I': - if (last) { - return -1; - } - goto DESTI; - case 'i': - if (last) { - return -1; - } - goto DESTI; - default: - return -1; - } - -DESTI: - NEXT_CHAR(); - switch (ch) { - case 'N': - if (last) { - return -1; - } - goto DESTIN; - case 'n': - if (last) { - return -1; - } - goto DESTIN; - default: - return -1; - } - -DESTIN: - NEXT_CHAR(); - switch (ch) { - case 'A': - if (last) { - return -1; - } - goto DESTINA; - case 'a': - if (last) { - return -1; - } - goto DESTINA; - default: - return -1; - } - -DESTINA: - NEXT_CHAR(); - switch (ch) { - case 'T': - if (last) { - return -1; - } - goto DESTINAT; - case 't': - if (last) { - return -1; - } - goto DESTINAT; - default: - return -1; - } - -DESTINAT: - NEXT_CHAR(); - switch (ch) { - case 'I': - if (last) { - return -1; - } - goto DESTINATI; - case 'i': - if (last) { - return -1; - } - goto DESTINATI; - default: - return -1; - } - -DESTINATI: - NEXT_CHAR(); - switch (ch) { - case 'O': - if (last) { - return -1; - } - goto DESTINATIO; - case 'o': - if (last) { - return -1; - } - goto DESTINATIO; - default: - return -1; - } - -DESTINATIO: - NEXT_CHAR(); - switch (ch) { - case 'N': - if (last) { - return 29; - } - goto DESTINATION; - case 'n': - if (last) { - return 29; - } - goto DESTINATION; - default: - return -1; - } - -DI: - NEXT_CHAR(); - switch (ch) { - case 'G': - if (last) { - return -1; - } - goto DIG; - case 'g': - if (last) { - return -1; - } - goto DIG; - default: - return -1; - } - -DIG: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return -1; - } - goto DIGE; - case 'e': - if (last) { - return -1; - } - goto DIGE; - default: - return -1; - } - -DIGE: - NEXT_CHAR(); - switch (ch) { - case 'S': - if (last) { - return -1; - } - goto DIGES; - case 's': - if (last) { - return -1; - } - goto DIGES; - default: - return -1; - } - -DIGES: - NEXT_CHAR(); - switch (ch) { - case 'T': - if (last) { - return 30; - } - goto DIGEST; - case 't': - if (last) { - return 30; - } - goto DIGEST; - default: - return -1; - } - -E: - NEXT_CHAR(); - switch (ch) { - case 'T': - if (last) { - return -1; - } - goto ET; - case 't': - if (last) { - return -1; - } - goto ET; - case 'X': - if (last) { - return -1; - } - goto EX; - case 'x': - if (last) { - return -1; - } - goto EX; - default: - return -1; - } - -ET: - NEXT_CHAR(); - switch (ch) { - case 'A': - if (last) { - return -1; - } - goto ETA; - case 'a': - if (last) { - return -1; - } - goto ETA; - default: - return -1; - } - -ETA: - NEXT_CHAR(); - switch (ch) { - case 'G': - if (last) { - return 31; - } - goto ETAG; - case 'g': - if (last) { - return 31; - } - goto ETAG; - default: - return -1; - } - -EX: - NEXT_CHAR(); - switch (ch) { - case 'P': - if (last) { - return -1; - } - goto EXP; - case 'p': - if (last) { - return -1; - } - goto EXP; - default: - return -1; - } - -EXP: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return -1; - } - goto EXPE; - case 'e': - if (last) { - return -1; - } - goto EXPE; - case 'I': - if (last) { - return -1; - } - goto EXPI; - case 'i': - if (last) { - return -1; - } - goto EXPI; - default: - return -1; - } - -EXPE: - NEXT_CHAR(); - switch (ch) { - case 'C': - if (last) { - return -1; - } - goto EXPEC; - case 'c': - if (last) { - return -1; - } - goto EXPEC; - default: - return -1; - } - -EXPEC: - NEXT_CHAR(); - switch (ch) { - case 'T': - if (last) { - return 32; - } - goto EXPECT; - case 't': - if (last) { - return 32; - } - goto EXPECT; - default: - return -1; - } - -EXPI: - NEXT_CHAR(); - switch (ch) { - case 'R': - if (last) { - return -1; - } - goto EXPIR; - case 'r': - if (last) { - return -1; - } - goto EXPIR; - default: - return -1; - } - -EXPIR: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return -1; - } - goto EXPIRE; - case 'e': - if (last) { - return -1; - } - goto EXPIRE; - default: - return -1; - } - -EXPIRE: - NEXT_CHAR(); - switch (ch) { - case 'S': - if (last) { - return 33; - } - goto EXPIRES; - case 's': - if (last) { - return 33; - } - goto EXPIRES; - default: - return -1; - } - -F: - NEXT_CHAR(); - switch (ch) { - case 'O': - if (last) { - return -1; - } - goto FO; - case 'o': - if (last) { - return -1; - } - goto FO; - case 'R': - if (last) { - return -1; - } - goto FR; - case 'r': - if (last) { - return -1; - } - goto FR; - default: - return -1; - } - -FO: - NEXT_CHAR(); - switch (ch) { - case 'R': - if (last) { - return -1; - } - goto FOR; - case 'r': - if (last) { - return -1; - } - goto FOR; - default: - return -1; - } - -FOR: - NEXT_CHAR(); - switch (ch) { - case 'W': - if (last) { - return -1; - } - goto FORW; - case 'w': - if (last) { - return -1; - } - goto FORW; - default: - return -1; - } - -FORW: - NEXT_CHAR(); - switch (ch) { - case 'A': - if (last) { - return -1; - } - goto FORWA; - case 'a': - if (last) { - return -1; - } - goto FORWA; - default: - return -1; - } - -FORWA: - NEXT_CHAR(); - switch (ch) { - case 'R': - if (last) { - return -1; - } - goto FORWAR; - case 'r': - if (last) { - return -1; - } - goto FORWAR; - default: - return -1; - } - -FORWAR: - NEXT_CHAR(); - switch (ch) { - case 'D': - if (last) { - return -1; - } - goto FORWARD; - case 'd': - if (last) { - return -1; - } - goto FORWARD; - default: - return -1; - } - -FORWARD: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return -1; - } - goto FORWARDE; - case 'e': - if (last) { - return -1; - } - goto FORWARDE; - default: - return -1; - } - -FORWARDE: - NEXT_CHAR(); - switch (ch) { - case 'D': - if (last) { - return 34; - } - goto FORWARDED; - case 'd': - if (last) { - return 34; - } - goto FORWARDED; - default: - return -1; - } - -FR: - NEXT_CHAR(); - switch (ch) { - case 'O': - if (last) { - return -1; - } - goto FRO; - case 'o': - if (last) { - return -1; - } - goto FRO; - default: - return -1; - } - -FRO: - NEXT_CHAR(); - switch (ch) { - case 'M': - if (last) { - return 35; - } - goto FROM; - case 'm': - if (last) { - return 35; - } - goto FROM; - default: - return -1; - } - -H: - NEXT_CHAR(); - switch (ch) { - case 'O': - if (last) { - return -1; - } - goto HO; - case 'o': - if (last) { - return -1; - } - goto HO; - default: - return -1; - } - -HO: - NEXT_CHAR(); - switch (ch) { - case 'S': - if (last) { - return -1; - } - goto HOS; - case 's': - if (last) { - return -1; - } - goto HOS; - default: - return -1; - } - -HOS: - NEXT_CHAR(); - switch (ch) { - case 'T': - if (last) { - return 36; - } - goto HOST; - case 't': - if (last) { - return 36; - } - goto HOST; - default: - return -1; - } - -I: - NEXT_CHAR(); - switch (ch) { - case 'F': - if (last) { - return -1; - } - goto IF; - case 'f': - if (last) { - return -1; - } - goto IF; - default: - return -1; - } - -IF: - NEXT_CHAR(); - switch (ch) { - case '-': - if (last) { - return -1; - } - goto IF_; - default: - return -1; - } - -IF_: - NEXT_CHAR(); - switch (ch) { - case 'M': - if (last) { - return -1; - } - goto IF_M; - case 'm': - if (last) { - return -1; - } - goto IF_M; - case 'N': - if (last) { - return -1; - } - goto IF_N; - case 'n': - if (last) { - return -1; - } - goto IF_N; - case 'R': - if (last) { - return -1; - } - goto IF_R; - case 'r': - if (last) { - return -1; - } - goto IF_R; - case 'U': - if (last) { - return -1; - } - goto IF_U; - case 'u': - if (last) { - return -1; - } - goto IF_U; - default: - return -1; - } - -IF_M: - NEXT_CHAR(); - switch (ch) { - case 'A': - if (last) { - return -1; - } - goto IF_MA; - case 'a': - if (last) { - return -1; - } - goto IF_MA; - case 'O': - if (last) { - return -1; - } - goto IF_MO; - case 'o': - if (last) { - return -1; - } - goto IF_MO; - default: - return -1; - } - -IF_MA: - NEXT_CHAR(); - switch (ch) { - case 'T': - if (last) { - return -1; - } - goto IF_MAT; - case 't': - if (last) { - return -1; - } - goto IF_MAT; - default: - return -1; - } - -IF_MAT: - NEXT_CHAR(); - switch (ch) { - case 'C': - if (last) { - return -1; - } - goto IF_MATC; - case 'c': - if (last) { - return -1; - } - goto IF_MATC; - default: - return -1; - } - -IF_MATC: - NEXT_CHAR(); - switch (ch) { - case 'H': - if (last) { - return 37; - } - goto IF_MATCH; - case 'h': - if (last) { - return 37; - } - goto IF_MATCH; - default: - return -1; - } - -IF_MO: - NEXT_CHAR(); - switch (ch) { - case 'D': - if (last) { - return -1; - } - goto IF_MOD; - case 'd': - if (last) { - return -1; - } - goto IF_MOD; - default: - return -1; - } - -IF_MOD: - NEXT_CHAR(); - switch (ch) { - case 'I': - if (last) { - return -1; - } - goto IF_MODI; - case 'i': - if (last) { - return -1; - } - goto IF_MODI; - default: - return -1; - } - -IF_MODI: - NEXT_CHAR(); - switch (ch) { - case 'F': - if (last) { - return -1; - } - goto IF_MODIF; - case 'f': - if (last) { - return -1; - } - goto IF_MODIF; - default: - return -1; - } - -IF_MODIF: - NEXT_CHAR(); - switch (ch) { - case 'I': - if (last) { - return -1; - } - goto IF_MODIFI; - case 'i': - if (last) { - return -1; - } - goto IF_MODIFI; - default: - return -1; - } - -IF_MODIFI: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return -1; - } - goto IF_MODIFIE; - case 'e': - if (last) { - return -1; - } - goto IF_MODIFIE; - default: - return -1; - } - -IF_MODIFIE: - NEXT_CHAR(); - switch (ch) { - case 'D': - if (last) { - return -1; - } - goto IF_MODIFIED; - case 'd': - if (last) { - return -1; - } - goto IF_MODIFIED; - default: - return -1; - } - -IF_MODIFIED: - NEXT_CHAR(); - switch (ch) { - case '-': - if (last) { - return -1; - } - goto IF_MODIFIED_; - default: - return -1; - } - -IF_MODIFIED_: - NEXT_CHAR(); - switch (ch) { - case 'S': - if (last) { - return -1; - } - goto IF_MODIFIED_S; - case 's': - if (last) { - return -1; - } - goto IF_MODIFIED_S; - default: - return -1; - } - -IF_MODIFIED_S: - NEXT_CHAR(); - switch (ch) { - case 'I': - if (last) { - return -1; - } - goto IF_MODIFIED_SI; - case 'i': - if (last) { - return -1; - } - goto IF_MODIFIED_SI; - default: - return -1; - } - -IF_MODIFIED_SI: - NEXT_CHAR(); - switch (ch) { - case 'N': - if (last) { - return -1; - } - goto IF_MODIFIED_SIN; - case 'n': - if (last) { - return -1; - } - goto IF_MODIFIED_SIN; - default: - return -1; - } - -IF_MODIFIED_SIN: - NEXT_CHAR(); - switch (ch) { - case 'C': - if (last) { - return -1; - } - goto IF_MODIFIED_SINC; - case 'c': - if (last) { - return -1; - } - goto IF_MODIFIED_SINC; - default: - return -1; - } - -IF_MODIFIED_SINC: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return 38; - } - goto IF_MODIFIED_SINCE; - case 'e': - if (last) { - return 38; - } - goto IF_MODIFIED_SINCE; - default: - return -1; - } - -IF_N: - NEXT_CHAR(); - switch (ch) { - case 'O': - if (last) { - return -1; - } - goto IF_NO; - case 'o': - if (last) { - return -1; - } - goto IF_NO; - default: - return -1; - } - -IF_NO: - NEXT_CHAR(); - switch (ch) { - case 'N': - if (last) { - return -1; - } - goto IF_NON; - case 'n': - if (last) { - return -1; - } - goto IF_NON; - default: - return -1; - } - -IF_NON: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return -1; - } - goto IF_NONE; - case 'e': - if (last) { - return -1; - } - goto IF_NONE; - default: - return -1; - } - -IF_NONE: - NEXT_CHAR(); - switch (ch) { - case '-': - if (last) { - return -1; - } - goto IF_NONE_; - default: - return -1; - } - -IF_NONE_: - NEXT_CHAR(); - switch (ch) { - case 'M': - if (last) { - return -1; - } - goto IF_NONE_M; - case 'm': - if (last) { - return -1; - } - goto IF_NONE_M; - default: - return -1; - } - -IF_NONE_M: - NEXT_CHAR(); - switch (ch) { - case 'A': - if (last) { - return -1; - } - goto IF_NONE_MA; - case 'a': - if (last) { - return -1; - } - goto IF_NONE_MA; - default: - return -1; - } - -IF_NONE_MA: - NEXT_CHAR(); - switch (ch) { - case 'T': - if (last) { - return -1; - } - goto IF_NONE_MAT; - case 't': - if (last) { - return -1; - } - goto IF_NONE_MAT; - default: - return -1; - } - -IF_NONE_MAT: - NEXT_CHAR(); - switch (ch) { - case 'C': - if (last) { - return -1; - } - goto IF_NONE_MATC; - case 'c': - if (last) { - return -1; - } - goto IF_NONE_MATC; - default: - return -1; - } - -IF_NONE_MATC: - NEXT_CHAR(); - switch (ch) { - case 'H': - if (last) { - return 39; - } - goto IF_NONE_MATCH; - case 'h': - if (last) { - return 39; - } - goto IF_NONE_MATCH; - default: - return -1; - } - -IF_R: - NEXT_CHAR(); - switch (ch) { - case 'A': - if (last) { - return -1; - } - goto IF_RA; - case 'a': - if (last) { - return -1; - } - goto IF_RA; - default: - return -1; - } - -IF_RA: - NEXT_CHAR(); - switch (ch) { - case 'N': - if (last) { - return -1; - } - goto IF_RAN; - case 'n': - if (last) { - return -1; - } - goto IF_RAN; - default: - return -1; - } - -IF_RAN: - NEXT_CHAR(); - switch (ch) { - case 'G': - if (last) { - return -1; - } - goto IF_RANG; - case 'g': - if (last) { - return -1; - } - goto IF_RANG; - default: - return -1; - } - -IF_RANG: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return 40; - } - goto IF_RANGE; - case 'e': - if (last) { - return 40; - } - goto IF_RANGE; - default: - return -1; - } - -IF_U: - NEXT_CHAR(); - switch (ch) { - case 'N': - if (last) { - return -1; - } - goto IF_UN; - case 'n': - if (last) { - return -1; - } - goto IF_UN; - default: - return -1; - } - -IF_UN: - NEXT_CHAR(); - switch (ch) { - case 'M': - if (last) { - return -1; - } - goto IF_UNM; - case 'm': - if (last) { - return -1; - } - goto IF_UNM; - default: - return -1; - } - -IF_UNM: - NEXT_CHAR(); - switch (ch) { - case 'O': - if (last) { - return -1; - } - goto IF_UNMO; - case 'o': - if (last) { - return -1; - } - goto IF_UNMO; - default: - return -1; - } - -IF_UNMO: - NEXT_CHAR(); - switch (ch) { - case 'D': - if (last) { - return -1; - } - goto IF_UNMOD; - case 'd': - if (last) { - return -1; - } - goto IF_UNMOD; - default: - return -1; - } - -IF_UNMOD: - NEXT_CHAR(); - switch (ch) { - case 'I': - if (last) { - return -1; - } - goto IF_UNMODI; - case 'i': - if (last) { - return -1; - } - goto IF_UNMODI; - default: - return -1; - } - -IF_UNMODI: - NEXT_CHAR(); - switch (ch) { - case 'F': - if (last) { - return -1; - } - goto IF_UNMODIF; - case 'f': - if (last) { - return -1; - } - goto IF_UNMODIF; - default: - return -1; - } - -IF_UNMODIF: - NEXT_CHAR(); - switch (ch) { - case 'I': - if (last) { - return -1; - } - goto IF_UNMODIFI; - case 'i': - if (last) { - return -1; - } - goto IF_UNMODIFI; - default: - return -1; - } - -IF_UNMODIFI: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return -1; - } - goto IF_UNMODIFIE; - case 'e': - if (last) { - return -1; - } - goto IF_UNMODIFIE; - default: - return -1; - } - -IF_UNMODIFIE: - NEXT_CHAR(); - switch (ch) { - case 'D': - if (last) { - return -1; - } - goto IF_UNMODIFIED; - case 'd': - if (last) { - return -1; - } - goto IF_UNMODIFIED; - default: - return -1; - } - -IF_UNMODIFIED: - NEXT_CHAR(); - switch (ch) { - case '-': - if (last) { - return -1; - } - goto IF_UNMODIFIED_; - default: - return -1; - } - -IF_UNMODIFIED_: - NEXT_CHAR(); - switch (ch) { - case 'S': - if (last) { - return -1; - } - goto IF_UNMODIFIED_S; - case 's': - if (last) { - return -1; - } - goto IF_UNMODIFIED_S; - default: - return -1; - } - -IF_UNMODIFIED_S: - NEXT_CHAR(); - switch (ch) { - case 'I': - if (last) { - return -1; - } - goto IF_UNMODIFIED_SI; - case 'i': - if (last) { - return -1; - } - goto IF_UNMODIFIED_SI; - default: - return -1; - } - -IF_UNMODIFIED_SI: - NEXT_CHAR(); - switch (ch) { - case 'N': - if (last) { - return -1; - } - goto IF_UNMODIFIED_SIN; - case 'n': - if (last) { - return -1; - } - goto IF_UNMODIFIED_SIN; - default: - return -1; - } - -IF_UNMODIFIED_SIN: - NEXT_CHAR(); - switch (ch) { - case 'C': - if (last) { - return -1; - } - goto IF_UNMODIFIED_SINC; - case 'c': - if (last) { - return -1; - } - goto IF_UNMODIFIED_SINC; - default: - return -1; - } - -IF_UNMODIFIED_SINC: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return 41; - } - goto IF_UNMODIFIED_SINCE; - case 'e': - if (last) { - return 41; - } - goto IF_UNMODIFIED_SINCE; - default: - return -1; - } - -K: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return -1; - } - goto KE; - case 'e': - if (last) { - return -1; - } - goto KE; - default: - return -1; - } - -KE: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return -1; - } - goto KEE; - case 'e': - if (last) { - return -1; - } - goto KEE; - default: - return -1; - } - -KEE: - NEXT_CHAR(); - switch (ch) { - case 'P': - if (last) { - return -1; - } - goto KEEP; - case 'p': - if (last) { - return -1; - } - goto KEEP; - default: - return -1; - } - -KEEP: - NEXT_CHAR(); - switch (ch) { - case '-': - if (last) { - return -1; - } - goto KEEP_; - default: - return -1; - } - -KEEP_: - NEXT_CHAR(); - switch (ch) { - case 'A': - if (last) { - return -1; - } - goto KEEP_A; - case 'a': - if (last) { - return -1; - } - goto KEEP_A; - default: - return -1; - } - -KEEP_A: - NEXT_CHAR(); - switch (ch) { - case 'L': - if (last) { - return -1; - } - goto KEEP_AL; - case 'l': - if (last) { - return -1; - } - goto KEEP_AL; - default: - return -1; - } - -KEEP_AL: - NEXT_CHAR(); - switch (ch) { - case 'I': - if (last) { - return -1; - } - goto KEEP_ALI; - case 'i': - if (last) { - return -1; - } - goto KEEP_ALI; - default: - return -1; - } - -KEEP_ALI: - NEXT_CHAR(); - switch (ch) { - case 'V': - if (last) { - return -1; - } - goto KEEP_ALIV; - case 'v': - if (last) { - return -1; - } - goto KEEP_ALIV; - default: - return -1; - } - -KEEP_ALIV: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return 42; - } - goto KEEP_ALIVE; - case 'e': - if (last) { - return 42; - } - goto KEEP_ALIVE; - default: - return -1; - } - -L: - NEXT_CHAR(); - switch (ch) { - case 'A': - if (last) { - return -1; - } - goto LA; - case 'a': - if (last) { - return -1; - } - goto LA; - case 'I': - if (last) { - return -1; - } - goto LI; - case 'i': - if (last) { - return -1; - } - goto LI; - case 'O': - if (last) { - return -1; - } - goto LO; - case 'o': - if (last) { - return -1; - } - goto LO; - default: - return -1; - } - -LA: - NEXT_CHAR(); - switch (ch) { - case 'S': - if (last) { - return -1; - } - goto LAS; - case 's': - if (last) { - return -1; - } - goto LAS; - default: - return -1; - } - -LAS: - NEXT_CHAR(); - switch (ch) { - case 'T': - if (last) { - return -1; - } - goto LAST; - case 't': - if (last) { - return -1; - } - goto LAST; - default: - return -1; - } - -LAST: - NEXT_CHAR(); - switch (ch) { - case '-': - if (last) { - return -1; - } - goto LAST_; - default: - return -1; - } - -LAST_: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return -1; - } - goto LAST_E; - case 'e': - if (last) { - return -1; - } - goto LAST_E; - case 'M': - if (last) { - return -1; - } - goto LAST_M; - case 'm': - if (last) { - return -1; - } - goto LAST_M; - default: - return -1; - } - -LAST_E: - NEXT_CHAR(); - switch (ch) { - case 'V': - if (last) { - return -1; - } - goto LAST_EV; - case 'v': - if (last) { - return -1; - } - goto LAST_EV; - default: - return -1; - } - -LAST_EV: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return -1; - } - goto LAST_EVE; - case 'e': - if (last) { - return -1; - } - goto LAST_EVE; - default: - return -1; - } - -LAST_EVE: - NEXT_CHAR(); - switch (ch) { - case 'N': - if (last) { - return -1; - } - goto LAST_EVEN; - case 'n': - if (last) { - return -1; - } - goto LAST_EVEN; - default: - return -1; - } - -LAST_EVEN: - NEXT_CHAR(); - switch (ch) { - case 'T': - if (last) { - return -1; - } - goto LAST_EVENT; - case 't': - if (last) { - return -1; - } - goto LAST_EVENT; - default: - return -1; - } - -LAST_EVENT: - NEXT_CHAR(); - switch (ch) { - case '-': - if (last) { - return -1; - } - goto LAST_EVENT_; - default: - return -1; - } - -LAST_EVENT_: - NEXT_CHAR(); - switch (ch) { - case 'I': - if (last) { - return -1; - } - goto LAST_EVENT_I; - case 'i': - if (last) { - return -1; - } - goto LAST_EVENT_I; - default: - return -1; - } - -LAST_EVENT_I: - NEXT_CHAR(); - switch (ch) { - case 'D': - if (last) { - return 43; - } - goto LAST_EVENT_ID; - case 'd': - if (last) { - return 43; - } - goto LAST_EVENT_ID; - default: - return -1; - } - -LAST_M: - NEXT_CHAR(); - switch (ch) { - case 'O': - if (last) { - return -1; - } - goto LAST_MO; - case 'o': - if (last) { - return -1; - } - goto LAST_MO; - default: - return -1; - } - -LAST_MO: - NEXT_CHAR(); - switch (ch) { - case 'D': - if (last) { - return -1; - } - goto LAST_MOD; - case 'd': - if (last) { - return -1; - } - goto LAST_MOD; - default: - return -1; - } - -LAST_MOD: - NEXT_CHAR(); - switch (ch) { - case 'I': - if (last) { - return -1; - } - goto LAST_MODI; - case 'i': - if (last) { - return -1; - } - goto LAST_MODI; - default: - return -1; - } - -LAST_MODI: - NEXT_CHAR(); - switch (ch) { - case 'F': - if (last) { - return -1; - } - goto LAST_MODIF; - case 'f': - if (last) { - return -1; - } - goto LAST_MODIF; - default: - return -1; - } - -LAST_MODIF: - NEXT_CHAR(); - switch (ch) { - case 'I': - if (last) { - return -1; - } - goto LAST_MODIFI; - case 'i': - if (last) { - return -1; - } - goto LAST_MODIFI; - default: - return -1; - } - -LAST_MODIFI: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return -1; - } - goto LAST_MODIFIE; - case 'e': - if (last) { - return -1; - } - goto LAST_MODIFIE; - default: - return -1; - } - -LAST_MODIFIE: - NEXT_CHAR(); - switch (ch) { - case 'D': - if (last) { - return 44; - } - goto LAST_MODIFIED; - case 'd': - if (last) { - return 44; - } - goto LAST_MODIFIED; - default: - return -1; - } - -LI: - NEXT_CHAR(); - switch (ch) { - case 'N': - if (last) { - return -1; - } - goto LIN; - case 'n': - if (last) { - return -1; - } - goto LIN; - default: - return -1; - } - -LIN: - NEXT_CHAR(); - switch (ch) { - case 'K': - if (last) { - return 45; - } - goto LINK; - case 'k': - if (last) { - return 45; - } - goto LINK; - default: - return -1; - } - -LO: - NEXT_CHAR(); - switch (ch) { - case 'C': - if (last) { - return -1; - } - goto LOC; - case 'c': - if (last) { - return -1; - } - goto LOC; - default: - return -1; - } - -LOC: - NEXT_CHAR(); - switch (ch) { - case 'A': - if (last) { - return -1; - } - goto LOCA; - case 'a': - if (last) { - return -1; - } - goto LOCA; - default: - return -1; - } - -LOCA: - NEXT_CHAR(); - switch (ch) { - case 'T': - if (last) { - return -1; - } - goto LOCAT; - case 't': - if (last) { - return -1; - } - goto LOCAT; - default: - return -1; - } - -LOCAT: - NEXT_CHAR(); - switch (ch) { - case 'I': - if (last) { - return -1; - } - goto LOCATI; - case 'i': - if (last) { - return -1; - } - goto LOCATI; - default: - return -1; - } - -LOCATI: - NEXT_CHAR(); - switch (ch) { - case 'O': - if (last) { - return -1; - } - goto LOCATIO; - case 'o': - if (last) { - return -1; - } - goto LOCATIO; - default: - return -1; - } - -LOCATIO: - NEXT_CHAR(); - switch (ch) { - case 'N': - if (last) { - return 46; - } - goto LOCATION; - case 'n': - if (last) { - return 46; - } - goto LOCATION; - default: - return -1; - } - -M: - NEXT_CHAR(); - switch (ch) { - case 'A': - if (last) { - return -1; - } - goto MA; - case 'a': - if (last) { - return -1; - } - goto MA; - default: - return -1; - } - -MA: - NEXT_CHAR(); - switch (ch) { - case 'X': - if (last) { - return -1; - } - goto MAX; - case 'x': - if (last) { - return -1; - } - goto MAX; - default: - return -1; - } - -MAX: - NEXT_CHAR(); - switch (ch) { - case '-': - if (last) { - return -1; - } - goto MAX_; - default: - return -1; - } - -MAX_: - NEXT_CHAR(); - switch (ch) { - case 'F': - if (last) { - return -1; - } - goto MAX_F; - case 'f': - if (last) { - return -1; - } - goto MAX_F; - default: - return -1; - } - -MAX_F: - NEXT_CHAR(); - switch (ch) { - case 'O': - if (last) { - return -1; - } - goto MAX_FO; - case 'o': - if (last) { - return -1; - } - goto MAX_FO; - default: - return -1; - } - -MAX_FO: - NEXT_CHAR(); - switch (ch) { - case 'R': - if (last) { - return -1; - } - goto MAX_FOR; - case 'r': - if (last) { - return -1; - } - goto MAX_FOR; - default: - return -1; - } - -MAX_FOR: - NEXT_CHAR(); - switch (ch) { - case 'W': - if (last) { - return -1; - } - goto MAX_FORW; - case 'w': - if (last) { - return -1; - } - goto MAX_FORW; - default: - return -1; - } - -MAX_FORW: - NEXT_CHAR(); - switch (ch) { - case 'A': - if (last) { - return -1; - } - goto MAX_FORWA; - case 'a': - if (last) { - return -1; - } - goto MAX_FORWA; - default: - return -1; - } - -MAX_FORWA: - NEXT_CHAR(); - switch (ch) { - case 'R': - if (last) { - return -1; - } - goto MAX_FORWAR; - case 'r': - if (last) { - return -1; - } - goto MAX_FORWAR; - default: - return -1; - } - -MAX_FORWAR: - NEXT_CHAR(); - switch (ch) { - case 'D': - if (last) { - return -1; - } - goto MAX_FORWARD; - case 'd': - if (last) { - return -1; - } - goto MAX_FORWARD; - default: - return -1; - } - -MAX_FORWARD: - NEXT_CHAR(); - switch (ch) { - case 'S': - if (last) { - return 47; - } - goto MAX_FORWARDS; - case 's': - if (last) { - return 47; - } - goto MAX_FORWARDS; - default: - return -1; - } - -O: - NEXT_CHAR(); - switch (ch) { - case 'R': - if (last) { - return -1; - } - goto OR; - case 'r': - if (last) { - return -1; - } - goto OR; - default: - return -1; - } - -OR: - NEXT_CHAR(); - switch (ch) { - case 'I': - if (last) { - return -1; - } - goto ORI; - case 'i': - if (last) { - return -1; - } - goto ORI; - default: - return -1; - } - -ORI: - NEXT_CHAR(); - switch (ch) { - case 'G': - if (last) { - return -1; - } - goto ORIG; - case 'g': - if (last) { - return -1; - } - goto ORIG; - default: - return -1; - } - -ORIG: - NEXT_CHAR(); - switch (ch) { - case 'I': - if (last) { - return -1; - } - goto ORIGI; - case 'i': - if (last) { - return -1; - } - goto ORIGI; - default: - return -1; - } - -ORIGI: - NEXT_CHAR(); - switch (ch) { - case 'N': - if (last) { - return 48; - } - goto ORIGIN; - case 'n': - if (last) { - return 48; - } - goto ORIGIN; - default: - return -1; - } - -P: - NEXT_CHAR(); - switch (ch) { - case 'R': - if (last) { - return -1; - } - goto PR; - case 'r': - if (last) { - return -1; - } - goto PR; - default: - return -1; - } - -PR: - NEXT_CHAR(); - switch (ch) { - case 'A': - if (last) { - return -1; - } - goto PRA; - case 'a': - if (last) { - return -1; - } - goto PRA; - case 'O': - if (last) { - return -1; - } - goto PRO; - case 'o': - if (last) { - return -1; - } - goto PRO; - default: - return -1; - } - -PRA: - NEXT_CHAR(); - switch (ch) { - case 'G': - if (last) { - return -1; - } - goto PRAG; - case 'g': - if (last) { - return -1; - } - goto PRAG; - default: - return -1; - } - -PRAG: - NEXT_CHAR(); - switch (ch) { - case 'M': - if (last) { - return -1; - } - goto PRAGM; - case 'm': - if (last) { - return -1; - } - goto PRAGM; - default: - return -1; - } - -PRAGM: - NEXT_CHAR(); - switch (ch) { - case 'A': - if (last) { - return 49; - } - goto PRAGMA; - case 'a': - if (last) { - return 49; - } - goto PRAGMA; - default: - return -1; - } - -PRO: - NEXT_CHAR(); - switch (ch) { - case 'X': - if (last) { - return -1; - } - goto PROX; - case 'x': - if (last) { - return -1; - } - goto PROX; - default: - return -1; - } - -PROX: - NEXT_CHAR(); - switch (ch) { - case 'Y': - if (last) { - return -1; - } - goto PROXY; - case 'y': - if (last) { - return -1; - } - goto PROXY; - default: - return -1; - } - -PROXY: - NEXT_CHAR(); - switch (ch) { - case '-': - if (last) { - return -1; - } - goto PROXY_; - default: - return -1; - } - -PROXY_: - NEXT_CHAR(); - switch (ch) { - case 'A': - if (last) { - return -1; - } - goto PROXY_A; - case 'a': - if (last) { - return -1; - } - goto PROXY_A; - default: - return -1; - } - -PROXY_A: - NEXT_CHAR(); - switch (ch) { - case 'U': - if (last) { - return -1; - } - goto PROXY_AU; - case 'u': - if (last) { - return -1; - } - goto PROXY_AU; - default: - return -1; - } - -PROXY_AU: - NEXT_CHAR(); - switch (ch) { - case 'T': - if (last) { - return -1; - } - goto PROXY_AUT; - case 't': - if (last) { - return -1; - } - goto PROXY_AUT; - default: - return -1; - } - -PROXY_AUT: - NEXT_CHAR(); - switch (ch) { - case 'H': - if (last) { - return -1; - } - goto PROXY_AUTH; - case 'h': - if (last) { - return -1; - } - goto PROXY_AUTH; - default: - return -1; - } - -PROXY_AUTH: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return -1; - } - goto PROXY_AUTHE; - case 'e': - if (last) { - return -1; - } - goto PROXY_AUTHE; - case 'O': - if (last) { - return -1; - } - goto PROXY_AUTHO; - case 'o': - if (last) { - return -1; - } - goto PROXY_AUTHO; - default: - return -1; - } - -PROXY_AUTHE: - NEXT_CHAR(); - switch (ch) { - case 'N': - if (last) { - return -1; - } - goto PROXY_AUTHEN; - case 'n': - if (last) { - return -1; - } - goto PROXY_AUTHEN; - default: - return -1; - } - -PROXY_AUTHEN: - NEXT_CHAR(); - switch (ch) { - case 'T': - if (last) { - return -1; - } - goto PROXY_AUTHENT; - case 't': - if (last) { - return -1; - } - goto PROXY_AUTHENT; - default: - return -1; - } - -PROXY_AUTHENT: - NEXT_CHAR(); - switch (ch) { - case 'I': - if (last) { - return -1; - } - goto PROXY_AUTHENTI; - case 'i': - if (last) { - return -1; - } - goto PROXY_AUTHENTI; - default: - return -1; - } - -PROXY_AUTHENTI: - NEXT_CHAR(); - switch (ch) { - case 'C': - if (last) { - return -1; - } - goto PROXY_AUTHENTIC; - case 'c': - if (last) { - return -1; - } - goto PROXY_AUTHENTIC; - default: - return -1; - } - -PROXY_AUTHENTIC: - NEXT_CHAR(); - switch (ch) { - case 'A': - if (last) { - return -1; - } - goto PROXY_AUTHENTICA; - case 'a': - if (last) { - return -1; - } - goto PROXY_AUTHENTICA; - default: - return -1; - } - -PROXY_AUTHENTICA: - NEXT_CHAR(); - switch (ch) { - case 'T': - if (last) { - return -1; - } - goto PROXY_AUTHENTICAT; - case 't': - if (last) { - return -1; - } - goto PROXY_AUTHENTICAT; - default: - return -1; - } - -PROXY_AUTHENTICAT: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return 50; - } - goto PROXY_AUTHENTICATE; - case 'e': - if (last) { - return 50; - } - goto PROXY_AUTHENTICATE; - default: - return -1; - } - -PROXY_AUTHO: - NEXT_CHAR(); - switch (ch) { - case 'R': - if (last) { - return -1; - } - goto PROXY_AUTHOR; - case 'r': - if (last) { - return -1; - } - goto PROXY_AUTHOR; - default: - return -1; - } - -PROXY_AUTHOR: - NEXT_CHAR(); - switch (ch) { - case 'I': - if (last) { - return -1; - } - goto PROXY_AUTHORI; - case 'i': - if (last) { - return -1; - } - goto PROXY_AUTHORI; - default: - return -1; - } - -PROXY_AUTHORI: - NEXT_CHAR(); - switch (ch) { - case 'Z': - if (last) { - return -1; - } - goto PROXY_AUTHORIZ; - case 'z': - if (last) { - return -1; - } - goto PROXY_AUTHORIZ; - default: - return -1; - } - -PROXY_AUTHORIZ: - NEXT_CHAR(); - switch (ch) { - case 'A': - if (last) { - return -1; - } - goto PROXY_AUTHORIZA; - case 'a': - if (last) { - return -1; - } - goto PROXY_AUTHORIZA; - default: - return -1; - } - -PROXY_AUTHORIZA: - NEXT_CHAR(); - switch (ch) { - case 'T': - if (last) { - return -1; - } - goto PROXY_AUTHORIZAT; - case 't': - if (last) { - return -1; - } - goto PROXY_AUTHORIZAT; - default: - return -1; - } - -PROXY_AUTHORIZAT: - NEXT_CHAR(); - switch (ch) { - case 'I': - if (last) { - return -1; - } - goto PROXY_AUTHORIZATI; - case 'i': - if (last) { - return -1; - } - goto PROXY_AUTHORIZATI; - default: - return -1; - } - -PROXY_AUTHORIZATI: - NEXT_CHAR(); - switch (ch) { - case 'O': - if (last) { - return -1; - } - goto PROXY_AUTHORIZATIO; - case 'o': - if (last) { - return -1; - } - goto PROXY_AUTHORIZATIO; - default: - return -1; - } - -PROXY_AUTHORIZATIO: - NEXT_CHAR(); - switch (ch) { - case 'N': - if (last) { - return 51; - } - goto PROXY_AUTHORIZATION; - case 'n': - if (last) { - return 51; - } - goto PROXY_AUTHORIZATION; - default: - return -1; - } - -R: - NEXT_CHAR(); - switch (ch) { - case 'A': - if (last) { - return -1; - } - goto RA; - case 'a': - if (last) { - return -1; - } - goto RA; - case 'E': - if (last) { - return -1; - } - goto RE; - case 'e': - if (last) { - return -1; - } - goto RE; - default: - return -1; - } - -RA: - NEXT_CHAR(); - switch (ch) { - case 'N': - if (last) { - return -1; - } - goto RAN; - case 'n': - if (last) { - return -1; - } - goto RAN; - default: - return -1; - } - -RAN: - NEXT_CHAR(); - switch (ch) { - case 'G': - if (last) { - return -1; - } - goto RANG; - case 'g': - if (last) { - return -1; - } - goto RANG; - default: - return -1; - } - -RANG: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return 52; - } - goto RANGE; - case 'e': - if (last) { - return 52; - } - goto RANGE; - default: - return -1; - } - -RE: - NEXT_CHAR(); - switch (ch) { - case 'F': - if (last) { - return -1; - } - goto REF; - case 'f': - if (last) { - return -1; - } - goto REF; - case 'T': - if (last) { - return -1; - } - goto RET; - case 't': - if (last) { - return -1; - } - goto RET; - default: - return -1; - } - -REF: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return -1; - } - goto REFE; - case 'e': - if (last) { - return -1; - } - goto REFE; - default: - return -1; - } - -REFE: - NEXT_CHAR(); - switch (ch) { - case 'R': - if (last) { - return -1; - } - goto REFER; - case 'r': - if (last) { - return -1; - } - goto REFER; - default: - return -1; - } - -REFER: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return -1; - } - goto REFERE; - case 'e': - if (last) { - return -1; - } - goto REFERE; - default: - return -1; - } - -REFERE: - NEXT_CHAR(); - switch (ch) { - case 'R': - if (last) { - return 53; - } - goto REFERER; - case 'r': - if (last) { - return 53; - } - goto REFERER; - default: - return -1; - } - -RET: - NEXT_CHAR(); - switch (ch) { - case 'R': - if (last) { - return -1; - } - goto RETR; - case 'r': - if (last) { - return -1; - } - goto RETR; - default: - return -1; - } - -RETR: - NEXT_CHAR(); - switch (ch) { - case 'Y': - if (last) { - return -1; - } - goto RETRY; - case 'y': - if (last) { - return -1; - } - goto RETRY; - default: - return -1; - } - -RETRY: - NEXT_CHAR(); - switch (ch) { - case '-': - if (last) { - return -1; - } - goto RETRY_; - default: - return -1; - } - -RETRY_: - NEXT_CHAR(); - switch (ch) { - case 'A': - if (last) { - return -1; - } - goto RETRY_A; - case 'a': - if (last) { - return -1; - } - goto RETRY_A; - default: - return -1; - } - -RETRY_A: - NEXT_CHAR(); - switch (ch) { - case 'F': - if (last) { - return -1; - } - goto RETRY_AF; - case 'f': - if (last) { - return -1; - } - goto RETRY_AF; - default: - return -1; - } - -RETRY_AF: - NEXT_CHAR(); - switch (ch) { - case 'T': - if (last) { - return -1; - } - goto RETRY_AFT; - case 't': - if (last) { - return -1; - } - goto RETRY_AFT; - default: - return -1; - } - -RETRY_AFT: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return -1; - } - goto RETRY_AFTE; - case 'e': - if (last) { - return -1; - } - goto RETRY_AFTE; - default: - return -1; - } - -RETRY_AFTE: - NEXT_CHAR(); - switch (ch) { - case 'R': - if (last) { - return 54; - } - goto RETRY_AFTER; - case 'r': - if (last) { - return 54; - } - goto RETRY_AFTER; - default: - return -1; - } - -S: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return -1; - } - goto SE; - case 'e': - if (last) { - return -1; - } - goto SE; - default: - return -1; - } - -SE: - NEXT_CHAR(); - switch (ch) { - case 'C': - if (last) { - return -1; - } - goto SEC; - case 'c': - if (last) { - return -1; - } - goto SEC; - case 'R': - if (last) { - return -1; - } - goto SER; - case 'r': - if (last) { - return -1; - } - goto SER; - case 'T': - if (last) { - return -1; - } - goto SET; - case 't': - if (last) { - return -1; - } - goto SET; - default: - return -1; - } - -SEC: - NEXT_CHAR(); - switch (ch) { - case '-': - if (last) { - return -1; - } - goto SEC_; - default: - return -1; - } - -SEC_: - NEXT_CHAR(); - switch (ch) { - case 'W': - if (last) { - return -1; - } - goto SEC_W; - case 'w': - if (last) { - return -1; - } - goto SEC_W; - default: - return -1; - } - -SEC_W: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return -1; - } - goto SEC_WE; - case 'e': - if (last) { - return -1; - } - goto SEC_WE; - default: - return -1; - } - -SEC_WE: - NEXT_CHAR(); - switch (ch) { - case 'B': - if (last) { - return -1; - } - goto SEC_WEB; - case 'b': - if (last) { - return -1; - } - goto SEC_WEB; - default: - return -1; - } - -SEC_WEB: - NEXT_CHAR(); - switch (ch) { - case 'S': - if (last) { - return -1; - } - goto SEC_WEBS; - case 's': - if (last) { - return -1; - } - goto SEC_WEBS; - default: - return -1; - } - -SEC_WEBS: - NEXT_CHAR(); - switch (ch) { - case 'O': - if (last) { - return -1; - } - goto SEC_WEBSO; - case 'o': - if (last) { - return -1; - } - goto SEC_WEBSO; - default: - return -1; - } - -SEC_WEBSO: - NEXT_CHAR(); - switch (ch) { - case 'C': - if (last) { - return -1; - } - goto SEC_WEBSOC; - case 'c': - if (last) { - return -1; - } - goto SEC_WEBSOC; - default: - return -1; - } - -SEC_WEBSOC: - NEXT_CHAR(); - switch (ch) { - case 'K': - if (last) { - return -1; - } - goto SEC_WEBSOCK; - case 'k': - if (last) { - return -1; - } - goto SEC_WEBSOCK; - default: - return -1; - } - -SEC_WEBSOCK: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return -1; - } - goto SEC_WEBSOCKE; - case 'e': - if (last) { - return -1; - } - goto SEC_WEBSOCKE; - default: - return -1; - } - -SEC_WEBSOCKE: - NEXT_CHAR(); - switch (ch) { - case 'T': - if (last) { - return -1; - } - goto SEC_WEBSOCKET; - case 't': - if (last) { - return -1; - } - goto SEC_WEBSOCKET; - default: - return -1; - } - -SEC_WEBSOCKET: - NEXT_CHAR(); - switch (ch) { - case '-': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_; - default: - return -1; - } - -SEC_WEBSOCKET_: - NEXT_CHAR(); - switch (ch) { - case 'A': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_A; - case 'a': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_A; - case 'E': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_E; - case 'e': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_E; - case 'K': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_K; - case 'k': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_K; - case 'P': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_P; - case 'p': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_P; - case 'V': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_V; - case 'v': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_V; - default: - return -1; - } - -SEC_WEBSOCKET_A: - NEXT_CHAR(); - switch (ch) { - case 'C': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_AC; - case 'c': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_AC; - default: - return -1; - } - -SEC_WEBSOCKET_AC: - NEXT_CHAR(); - switch (ch) { - case 'C': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_ACC; - case 'c': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_ACC; - default: - return -1; - } - -SEC_WEBSOCKET_ACC: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_ACCE; - case 'e': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_ACCE; - default: - return -1; - } - -SEC_WEBSOCKET_ACCE: - NEXT_CHAR(); - switch (ch) { - case 'P': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_ACCEP; - case 'p': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_ACCEP; - default: - return -1; - } - -SEC_WEBSOCKET_ACCEP: - NEXT_CHAR(); - switch (ch) { - case 'T': - if (last) { - return 55; - } - goto SEC_WEBSOCKET_ACCEPT; - case 't': - if (last) { - return 55; - } - goto SEC_WEBSOCKET_ACCEPT; - default: - return -1; - } - -SEC_WEBSOCKET_E: - NEXT_CHAR(); - switch (ch) { - case 'X': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_EX; - case 'x': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_EX; - default: - return -1; - } - -SEC_WEBSOCKET_EX: - NEXT_CHAR(); - switch (ch) { - case 'T': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_EXT; - case 't': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_EXT; - default: - return -1; - } - -SEC_WEBSOCKET_EXT: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_EXTE; - case 'e': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_EXTE; - default: - return -1; - } - -SEC_WEBSOCKET_EXTE: - NEXT_CHAR(); - switch (ch) { - case 'N': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_EXTEN; - case 'n': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_EXTEN; - default: - return -1; - } - -SEC_WEBSOCKET_EXTEN: - NEXT_CHAR(); - switch (ch) { - case 'S': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_EXTENS; - case 's': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_EXTENS; - default: - return -1; - } - -SEC_WEBSOCKET_EXTENS: - NEXT_CHAR(); - switch (ch) { - case 'I': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_EXTENSI; - case 'i': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_EXTENSI; - default: - return -1; - } - -SEC_WEBSOCKET_EXTENSI: - NEXT_CHAR(); - switch (ch) { - case 'O': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_EXTENSIO; - case 'o': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_EXTENSIO; - default: - return -1; - } - -SEC_WEBSOCKET_EXTENSIO: - NEXT_CHAR(); - switch (ch) { - case 'N': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_EXTENSION; - case 'n': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_EXTENSION; - default: - return -1; - } - -SEC_WEBSOCKET_EXTENSION: - NEXT_CHAR(); - switch (ch) { - case 'S': - if (last) { - return 56; - } - goto SEC_WEBSOCKET_EXTENSIONS; - case 's': - if (last) { - return 56; - } - goto SEC_WEBSOCKET_EXTENSIONS; - default: - return -1; - } - -SEC_WEBSOCKET_K: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_KE; - case 'e': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_KE; - default: - return -1; - } - -SEC_WEBSOCKET_KE: - NEXT_CHAR(); - switch (ch) { - case 'Y': - if (last) { - return 57; - } - goto SEC_WEBSOCKET_KEY; - case 'y': - if (last) { - return 57; - } - goto SEC_WEBSOCKET_KEY; - default: - return -1; - } - -SEC_WEBSOCKET_KEY: - NEXT_CHAR(); - switch (ch) { - case '1': - if (last) { - return 58; - } - goto SEC_WEBSOCKET_KEY1; - default: - return -1; - } - -SEC_WEBSOCKET_P: - NEXT_CHAR(); - switch (ch) { - case 'R': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_PR; - case 'r': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_PR; - default: - return -1; - } - -SEC_WEBSOCKET_PR: - NEXT_CHAR(); - switch (ch) { - case 'O': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_PRO; - case 'o': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_PRO; - default: - return -1; - } - -SEC_WEBSOCKET_PRO: - NEXT_CHAR(); - switch (ch) { - case 'T': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_PROT; - case 't': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_PROT; - default: - return -1; - } - -SEC_WEBSOCKET_PROT: - NEXT_CHAR(); - switch (ch) { - case 'O': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_PROTO; - case 'o': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_PROTO; - default: - return -1; - } - -SEC_WEBSOCKET_PROTO: - NEXT_CHAR(); - switch (ch) { - case 'C': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_PROTOC; - case 'c': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_PROTOC; - default: - return -1; - } - -SEC_WEBSOCKET_PROTOC: - NEXT_CHAR(); - switch (ch) { - case 'O': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_PROTOCO; - case 'o': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_PROTOCO; - default: - return -1; - } - -SEC_WEBSOCKET_PROTOCO: - NEXT_CHAR(); - switch (ch) { - case 'L': - if (last) { - return 59; - } - goto SEC_WEBSOCKET_PROTOCOL; - case 'l': - if (last) { - return 59; - } - goto SEC_WEBSOCKET_PROTOCOL; - default: - return -1; - } - -SEC_WEBSOCKET_V: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_VE; - case 'e': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_VE; - default: - return -1; - } - -SEC_WEBSOCKET_VE: - NEXT_CHAR(); - switch (ch) { - case 'R': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_VER; - case 'r': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_VER; - default: - return -1; - } - -SEC_WEBSOCKET_VER: - NEXT_CHAR(); - switch (ch) { - case 'S': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_VERS; - case 's': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_VERS; - default: - return -1; - } - -SEC_WEBSOCKET_VERS: - NEXT_CHAR(); - switch (ch) { - case 'I': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_VERSI; - case 'i': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_VERSI; - default: - return -1; - } - -SEC_WEBSOCKET_VERSI: - NEXT_CHAR(); - switch (ch) { - case 'O': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_VERSIO; - case 'o': - if (last) { - return -1; - } - goto SEC_WEBSOCKET_VERSIO; - default: - return -1; - } - -SEC_WEBSOCKET_VERSIO: - NEXT_CHAR(); - switch (ch) { - case 'N': - if (last) { - return 60; - } - goto SEC_WEBSOCKET_VERSION; - case 'n': - if (last) { - return 60; - } - goto SEC_WEBSOCKET_VERSION; - default: - return -1; - } - -SER: - NEXT_CHAR(); - switch (ch) { - case 'V': - if (last) { - return -1; - } - goto SERV; - case 'v': - if (last) { - return -1; - } - goto SERV; - default: - return -1; - } - -SERV: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return -1; - } - goto SERVE; - case 'e': - if (last) { - return -1; - } - goto SERVE; - default: - return -1; - } - -SERVE: - NEXT_CHAR(); - switch (ch) { - case 'R': - if (last) { - return 61; - } - goto SERVER; - case 'r': - if (last) { - return 61; - } - goto SERVER; - default: - return -1; - } - -SET: - NEXT_CHAR(); - switch (ch) { - case '-': - if (last) { - return -1; - } - goto SET_; - default: - return -1; - } - -SET_: - NEXT_CHAR(); - switch (ch) { - case 'C': - if (last) { - return -1; - } - goto SET_C; - case 'c': - if (last) { - return -1; - } - goto SET_C; - default: - return -1; - } - -SET_C: - NEXT_CHAR(); - switch (ch) { - case 'O': - if (last) { - return -1; - } - goto SET_CO; - case 'o': - if (last) { - return -1; - } - goto SET_CO; - default: - return -1; - } - -SET_CO: - NEXT_CHAR(); - switch (ch) { - case 'O': - if (last) { - return -1; - } - goto SET_COO; - case 'o': - if (last) { - return -1; - } - goto SET_COO; - default: - return -1; - } - -SET_COO: - NEXT_CHAR(); - switch (ch) { - case 'K': - if (last) { - return -1; - } - goto SET_COOK; - case 'k': - if (last) { - return -1; - } - goto SET_COOK; - default: - return -1; - } - -SET_COOK: - NEXT_CHAR(); - switch (ch) { - case 'I': - if (last) { - return -1; - } - goto SET_COOKI; - case 'i': - if (last) { - return -1; - } - goto SET_COOKI; - default: - return -1; - } - -SET_COOKI: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return 62; - } - goto SET_COOKIE; - case 'e': - if (last) { - return 62; - } - goto SET_COOKIE; - default: - return -1; - } - -T: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return 63; - } - goto TE; - case 'e': - if (last) { - return 63; - } - goto TE; - case 'R': - if (last) { - return -1; - } - goto TR; - case 'r': - if (last) { - return -1; - } - goto TR; - default: - return -1; - } - -TR: - NEXT_CHAR(); - switch (ch) { - case 'A': - if (last) { - return -1; - } - goto TRA; - case 'a': - if (last) { - return -1; - } - goto TRA; - default: - return -1; - } - -TRA: - NEXT_CHAR(); - switch (ch) { - case 'I': - if (last) { - return -1; - } - goto TRAI; - case 'i': - if (last) { - return -1; - } - goto TRAI; - case 'N': - if (last) { - return -1; - } - goto TRAN; - case 'n': - if (last) { - return -1; - } - goto TRAN; - default: - return -1; - } - -TRAI: - NEXT_CHAR(); - switch (ch) { - case 'L': - if (last) { - return -1; - } - goto TRAIL; - case 'l': - if (last) { - return -1; - } - goto TRAIL; - default: - return -1; - } - -TRAIL: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return -1; - } - goto TRAILE; - case 'e': - if (last) { - return -1; - } - goto TRAILE; - default: - return -1; - } - -TRAILE: - NEXT_CHAR(); - switch (ch) { - case 'R': - if (last) { - return 64; - } - goto TRAILER; - case 'r': - if (last) { - return 64; - } - goto TRAILER; - default: - return -1; - } - -TRAN: - NEXT_CHAR(); - switch (ch) { - case 'S': - if (last) { - return -1; - } - goto TRANS; - case 's': - if (last) { - return -1; - } - goto TRANS; - default: - return -1; - } - -TRANS: - NEXT_CHAR(); - switch (ch) { - case 'F': - if (last) { - return -1; - } - goto TRANSF; - case 'f': - if (last) { - return -1; - } - goto TRANSF; - default: - return -1; - } - -TRANSF: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return -1; - } - goto TRANSFE; - case 'e': - if (last) { - return -1; - } - goto TRANSFE; - default: - return -1; - } - -TRANSFE: - NEXT_CHAR(); - switch (ch) { - case 'R': - if (last) { - return -1; - } - goto TRANSFER; - case 'r': - if (last) { - return -1; - } - goto TRANSFER; - default: - return -1; - } - -TRANSFER: - NEXT_CHAR(); - switch (ch) { - case '-': - if (last) { - return -1; - } - goto TRANSFER_; - default: - return -1; - } - -TRANSFER_: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return -1; - } - goto TRANSFER_E; - case 'e': - if (last) { - return -1; - } - goto TRANSFER_E; - default: - return -1; - } - -TRANSFER_E: - NEXT_CHAR(); - switch (ch) { - case 'N': - if (last) { - return -1; - } - goto TRANSFER_EN; - case 'n': - if (last) { - return -1; - } - goto TRANSFER_EN; - default: - return -1; - } - -TRANSFER_EN: - NEXT_CHAR(); - switch (ch) { - case 'C': - if (last) { - return -1; - } - goto TRANSFER_ENC; - case 'c': - if (last) { - return -1; - } - goto TRANSFER_ENC; - default: - return -1; - } - -TRANSFER_ENC: - NEXT_CHAR(); - switch (ch) { - case 'O': - if (last) { - return -1; - } - goto TRANSFER_ENCO; - case 'o': - if (last) { - return -1; - } - goto TRANSFER_ENCO; - default: - return -1; - } - -TRANSFER_ENCO: - NEXT_CHAR(); - switch (ch) { - case 'D': - if (last) { - return -1; - } - goto TRANSFER_ENCOD; - case 'd': - if (last) { - return -1; - } - goto TRANSFER_ENCOD; - default: - return -1; - } - -TRANSFER_ENCOD: - NEXT_CHAR(); - switch (ch) { - case 'I': - if (last) { - return -1; - } - goto TRANSFER_ENCODI; - case 'i': - if (last) { - return -1; - } - goto TRANSFER_ENCODI; - default: - return -1; - } - -TRANSFER_ENCODI: - NEXT_CHAR(); - switch (ch) { - case 'N': - if (last) { - return -1; - } - goto TRANSFER_ENCODIN; - case 'n': - if (last) { - return -1; - } - goto TRANSFER_ENCODIN; - default: - return -1; - } - -TRANSFER_ENCODIN: - NEXT_CHAR(); - switch (ch) { - case 'G': - if (last) { - return 65; - } - goto TRANSFER_ENCODING; - case 'g': - if (last) { - return 65; - } - goto TRANSFER_ENCODING; - default: - return -1; - } - -U: - NEXT_CHAR(); - switch (ch) { - case 'R': - if (last) { - return -1; - } - goto UR; - case 'r': - if (last) { - return -1; - } - goto UR; - case 'P': - if (last) { - return -1; - } - goto UP; - case 'p': - if (last) { - return -1; - } - goto UP; - case 'S': - if (last) { - return -1; - } - goto US; - case 's': - if (last) { - return -1; - } - goto US; - default: - return -1; - } - -UR: - NEXT_CHAR(); - switch (ch) { - case 'I': - if (last) { - return 66; - } - goto URI; - case 'i': - if (last) { - return 66; - } - goto URI; - default: - return -1; - } - -UP: - NEXT_CHAR(); - switch (ch) { - case 'G': - if (last) { - return -1; - } - goto UPG; - case 'g': - if (last) { - return -1; - } - goto UPG; - default: - return -1; - } - -UPG: - NEXT_CHAR(); - switch (ch) { - case 'R': - if (last) { - return -1; - } - goto UPGR; - case 'r': - if (last) { - return -1; - } - goto UPGR; - default: - return -1; - } - -UPGR: - NEXT_CHAR(); - switch (ch) { - case 'A': - if (last) { - return -1; - } - goto UPGRA; - case 'a': - if (last) { - return -1; - } - goto UPGRA; - default: - return -1; - } - -UPGRA: - NEXT_CHAR(); - switch (ch) { - case 'D': - if (last) { - return -1; - } - goto UPGRAD; - case 'd': - if (last) { - return -1; - } - goto UPGRAD; - default: - return -1; - } - -UPGRAD: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return 67; - } - goto UPGRADE; - case 'e': - if (last) { - return 67; - } - goto UPGRADE; - default: - return -1; - } - -US: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return -1; - } - goto USE; - case 'e': - if (last) { - return -1; - } - goto USE; - default: - return -1; - } - -USE: - NEXT_CHAR(); - switch (ch) { - case 'R': - if (last) { - return -1; - } - goto USER; - case 'r': - if (last) { - return -1; - } - goto USER; - default: - return -1; - } - -USER: - NEXT_CHAR(); - switch (ch) { - case '-': - if (last) { - return -1; - } - goto USER_; - default: - return -1; - } - -USER_: - NEXT_CHAR(); - switch (ch) { - case 'A': - if (last) { - return -1; - } - goto USER_A; - case 'a': - if (last) { - return -1; - } - goto USER_A; - default: - return -1; - } - -USER_A: - NEXT_CHAR(); - switch (ch) { - case 'G': - if (last) { - return -1; - } - goto USER_AG; - case 'g': - if (last) { - return -1; - } - goto USER_AG; - default: - return -1; - } - -USER_AG: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return -1; - } - goto USER_AGE; - case 'e': - if (last) { - return -1; - } - goto USER_AGE; - default: - return -1; - } - -USER_AGE: - NEXT_CHAR(); - switch (ch) { - case 'N': - if (last) { - return -1; - } - goto USER_AGEN; - case 'n': - if (last) { - return -1; - } - goto USER_AGEN; - default: - return -1; - } - -USER_AGEN: - NEXT_CHAR(); - switch (ch) { - case 'T': - if (last) { - return 68; - } - goto USER_AGENT; - case 't': - if (last) { - return 68; - } - goto USER_AGENT; - default: - return -1; - } - -V: - NEXT_CHAR(); - switch (ch) { - case 'A': - if (last) { - return -1; - } - goto VA; - case 'a': - if (last) { - return -1; - } - goto VA; - case 'I': - if (last) { - return -1; - } - goto VI; - case 'i': - if (last) { - return -1; - } - goto VI; - default: - return -1; - } - -VA: - NEXT_CHAR(); - switch (ch) { - case 'R': - if (last) { - return -1; - } - goto VAR; - case 'r': - if (last) { - return -1; - } - goto VAR; - default: - return -1; - } - -VAR: - NEXT_CHAR(); - switch (ch) { - case 'Y': - if (last) { - return 69; - } - goto VARY; - case 'y': - if (last) { - return 69; - } - goto VARY; - default: - return -1; - } - -VI: - NEXT_CHAR(); - switch (ch) { - case 'A': - if (last) { - return 70; - } - goto VIA; - case 'a': - if (last) { - return 70; - } - goto VIA; - default: - return -1; - } - -W: - NEXT_CHAR(); - switch (ch) { - case 'W': - if (last) { - return -1; - } - goto WW; - case 'w': - if (last) { - return -1; - } - goto WW; - case 'A': - if (last) { - return -1; - } - goto WA; - case 'a': - if (last) { - return -1; - } - goto WA; - default: - return -1; - } - -WW: - NEXT_CHAR(); - switch (ch) { - case 'W': - if (last) { - return -1; - } - goto WWW; - case 'w': - if (last) { - return -1; - } - goto WWW; - default: - return -1; - } - -WWW: - NEXT_CHAR(); - switch (ch) { - case '-': - if (last) { - return -1; - } - goto WWW_; - default: - return -1; - } - -WWW_: - NEXT_CHAR(); - switch (ch) { - case 'A': - if (last) { - return -1; - } - goto WWW_A; - case 'a': - if (last) { - return -1; - } - goto WWW_A; - default: - return -1; - } - -WWW_A: - NEXT_CHAR(); - switch (ch) { - case 'U': - if (last) { - return -1; - } - goto WWW_AU; - case 'u': - if (last) { - return -1; - } - goto WWW_AU; - default: - return -1; - } - -WWW_AU: - NEXT_CHAR(); - switch (ch) { - case 'T': - if (last) { - return -1; - } - goto WWW_AUT; - case 't': - if (last) { - return -1; - } - goto WWW_AUT; - default: - return -1; - } - -WWW_AUT: - NEXT_CHAR(); - switch (ch) { - case 'H': - if (last) { - return -1; - } - goto WWW_AUTH; - case 'h': - if (last) { - return -1; - } - goto WWW_AUTH; - default: - return -1; - } - -WWW_AUTH: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return -1; - } - goto WWW_AUTHE; - case 'e': - if (last) { - return -1; - } - goto WWW_AUTHE; - default: - return -1; - } - -WWW_AUTHE: - NEXT_CHAR(); - switch (ch) { - case 'N': - if (last) { - return -1; - } - goto WWW_AUTHEN; - case 'n': - if (last) { - return -1; - } - goto WWW_AUTHEN; - default: - return -1; - } - -WWW_AUTHEN: - NEXT_CHAR(); - switch (ch) { - case 'T': - if (last) { - return -1; - } - goto WWW_AUTHENT; - case 't': - if (last) { - return -1; - } - goto WWW_AUTHENT; - default: - return -1; - } - -WWW_AUTHENT: - NEXT_CHAR(); - switch (ch) { - case 'I': - if (last) { - return -1; - } - goto WWW_AUTHENTI; - case 'i': - if (last) { - return -1; - } - goto WWW_AUTHENTI; - default: - return -1; - } - -WWW_AUTHENTI: - NEXT_CHAR(); - switch (ch) { - case 'C': - if (last) { - return -1; - } - goto WWW_AUTHENTIC; - case 'c': - if (last) { - return -1; - } - goto WWW_AUTHENTIC; - default: - return -1; - } - -WWW_AUTHENTIC: - NEXT_CHAR(); - switch (ch) { - case 'A': - if (last) { - return -1; - } - goto WWW_AUTHENTICA; - case 'a': - if (last) { - return -1; - } - goto WWW_AUTHENTICA; - default: - return -1; - } - -WWW_AUTHENTICA: - NEXT_CHAR(); - switch (ch) { - case 'T': - if (last) { - return -1; - } - goto WWW_AUTHENTICAT; - case 't': - if (last) { - return -1; - } - goto WWW_AUTHENTICAT; - default: - return -1; - } - -WWW_AUTHENTICAT: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return 71; - } - goto WWW_AUTHENTICATE; - case 'e': - if (last) { - return 71; - } - goto WWW_AUTHENTICATE; - default: - return -1; - } - -WA: - NEXT_CHAR(); - switch (ch) { - case 'N': - if (last) { - return -1; - } - goto WAN; - case 'n': - if (last) { - return -1; - } - goto WAN; - case 'R': - if (last) { - return -1; - } - goto WAR; - case 'r': - if (last) { - return -1; - } - goto WAR; - default: - return -1; - } - -WAN: - NEXT_CHAR(); - switch (ch) { - case 'T': - if (last) { - return -1; - } - goto WANT; - case 't': - if (last) { - return -1; - } - goto WANT; - default: - return -1; - } - -WANT: - NEXT_CHAR(); - switch (ch) { - case '-': - if (last) { - return -1; - } - goto WANT_; - default: - return -1; - } - -WANT_: - NEXT_CHAR(); - switch (ch) { - case 'D': - if (last) { - return -1; - } - goto WANT_D; - case 'd': - if (last) { - return -1; - } - goto WANT_D; - default: - return -1; - } - -WANT_D: - NEXT_CHAR(); - switch (ch) { - case 'I': - if (last) { - return -1; - } - goto WANT_DI; - case 'i': - if (last) { - return -1; - } - goto WANT_DI; - default: - return -1; - } - -WANT_DI: - NEXT_CHAR(); - switch (ch) { - case 'G': - if (last) { - return -1; - } - goto WANT_DIG; - case 'g': - if (last) { - return -1; - } - goto WANT_DIG; - default: - return -1; - } - -WANT_DIG: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return -1; - } - goto WANT_DIGE; - case 'e': - if (last) { - return -1; - } - goto WANT_DIGE; - default: - return -1; - } - -WANT_DIGE: - NEXT_CHAR(); - switch (ch) { - case 'S': - if (last) { - return -1; - } - goto WANT_DIGES; - case 's': - if (last) { - return -1; - } - goto WANT_DIGES; - default: - return -1; - } - -WANT_DIGES: - NEXT_CHAR(); - switch (ch) { - case 'T': - if (last) { - return 72; - } - goto WANT_DIGEST; - case 't': - if (last) { - return 72; - } - goto WANT_DIGEST; - default: - return -1; - } - -WAR: - NEXT_CHAR(); - switch (ch) { - case 'N': - if (last) { - return -1; - } - goto WARN; - case 'n': - if (last) { - return -1; - } - goto WARN; - default: - return -1; - } - -WARN: - NEXT_CHAR(); - switch (ch) { - case 'I': - if (last) { - return -1; - } - goto WARNI; - case 'i': - if (last) { - return -1; - } - goto WARNI; - default: - return -1; - } - -WARNI: - NEXT_CHAR(); - switch (ch) { - case 'N': - if (last) { - return -1; - } - goto WARNIN; - case 'n': - if (last) { - return -1; - } - goto WARNIN; - default: - return -1; - } - -WARNIN: - NEXT_CHAR(); - switch (ch) { - case 'G': - if (last) { - return 73; - } - goto WARNING; - case 'g': - if (last) { - return 73; - } - goto WARNING; - default: - return -1; - } - -X: - NEXT_CHAR(); - switch (ch) { - case '-': - if (last) { - return -1; - } - goto X_; - default: - return -1; - } - -X_: - NEXT_CHAR(); - switch (ch) { - case 'F': - if (last) { - return -1; - } - goto X_F; - case 'f': - if (last) { - return -1; - } - goto X_F; - default: - return -1; - } - -X_F: - NEXT_CHAR(); - switch (ch) { - case 'O': - if (last) { - return -1; - } - goto X_FO; - case 'o': - if (last) { - return -1; - } - goto X_FO; - default: - return -1; - } - -X_FO: - NEXT_CHAR(); - switch (ch) { - case 'R': - if (last) { - return -1; - } - goto X_FOR; - case 'r': - if (last) { - return -1; - } - goto X_FOR; - default: - return -1; - } - -X_FOR: - NEXT_CHAR(); - switch (ch) { - case 'W': - if (last) { - return -1; - } - goto X_FORW; - case 'w': - if (last) { - return -1; - } - goto X_FORW; - default: - return -1; - } - -X_FORW: - NEXT_CHAR(); - switch (ch) { - case 'A': - if (last) { - return -1; - } - goto X_FORWA; - case 'a': - if (last) { - return -1; - } - goto X_FORWA; - default: - return -1; - } - -X_FORWA: - NEXT_CHAR(); - switch (ch) { - case 'R': - if (last) { - return -1; - } - goto X_FORWAR; - case 'r': - if (last) { - return -1; - } - goto X_FORWAR; - default: - return -1; - } - -X_FORWAR: - NEXT_CHAR(); - switch (ch) { - case 'D': - if (last) { - return -1; - } - goto X_FORWARD; - case 'd': - if (last) { - return -1; - } - goto X_FORWARD; - default: - return -1; - } - -X_FORWARD: - NEXT_CHAR(); - switch (ch) { - case 'E': - if (last) { - return -1; - } - goto X_FORWARDE; - case 'e': - if (last) { - return -1; - } - goto X_FORWARDE; - default: - return -1; - } - -X_FORWARDE: - NEXT_CHAR(); - switch (ch) { - case 'D': - if (last) { - return -1; - } - goto X_FORWARDED; - case 'd': - if (last) { - return -1; - } - goto X_FORWARDED; - default: - return -1; - } - -X_FORWARDED: - NEXT_CHAR(); - switch (ch) { - case '-': - if (last) { - return -1; - } - goto X_FORWARDED_; - default: - return -1; - } - -X_FORWARDED_: - NEXT_CHAR(); - switch (ch) { - case 'F': - if (last) { - return -1; - } - goto X_FORWARDED_F; - case 'f': - if (last) { - return -1; - } - goto X_FORWARDED_F; - case 'H': - if (last) { - return -1; - } - goto X_FORWARDED_H; - case 'h': - if (last) { - return -1; - } - goto X_FORWARDED_H; - case 'P': - if (last) { - return -1; - } - goto X_FORWARDED_P; - case 'p': - if (last) { - return -1; - } - goto X_FORWARDED_P; - default: - return -1; - } - -X_FORWARDED_F: - NEXT_CHAR(); - switch (ch) { - case 'O': - if (last) { - return -1; - } - goto X_FORWARDED_FO; - case 'o': - if (last) { - return -1; - } - goto X_FORWARDED_FO; - default: - return -1; - } - -X_FORWARDED_FO: - NEXT_CHAR(); - switch (ch) { - case 'R': - if (last) { - return 74; - } - goto X_FORWARDED_FOR; - case 'r': - if (last) { - return 74; - } - goto X_FORWARDED_FOR; - default: - return -1; - } - -X_FORWARDED_H: - NEXT_CHAR(); - switch (ch) { - case 'O': - if (last) { - return -1; - } - goto X_FORWARDED_HO; - case 'o': - if (last) { - return -1; - } - goto X_FORWARDED_HO; - default: - return -1; - } - -X_FORWARDED_HO: - NEXT_CHAR(); - switch (ch) { - case 'S': - if (last) { - return -1; - } - goto X_FORWARDED_HOS; - case 's': - if (last) { - return -1; - } - goto X_FORWARDED_HOS; - default: - return -1; - } - -X_FORWARDED_HOS: - NEXT_CHAR(); - switch (ch) { - case 'T': - if (last) { - return 75; - } - goto X_FORWARDED_HOST; - case 't': - if (last) { - return 75; - } - goto X_FORWARDED_HOST; - default: - return -1; - } - -X_FORWARDED_P: - NEXT_CHAR(); - switch (ch) { - case 'R': - if (last) { - return -1; - } - goto X_FORWARDED_PR; - case 'r': - if (last) { - return -1; - } - goto X_FORWARDED_PR; - default: - return -1; - } - -X_FORWARDED_PR: - NEXT_CHAR(); - switch (ch) { - case 'O': - if (last) { - return -1; - } - goto X_FORWARDED_PRO; - case 'o': - if (last) { - return -1; - } - goto X_FORWARDED_PRO; - default: - return -1; - } - -X_FORWARDED_PRO: - NEXT_CHAR(); - switch (ch) { - case 'T': - if (last) { - return -1; - } - goto X_FORWARDED_PROT; - case 't': - if (last) { - return -1; - } - goto X_FORWARDED_PROT; - default: - return -1; - } - -X_FORWARDED_PROT: - NEXT_CHAR(); - switch (ch) { - case 'O': - if (last) { - return 76; - } - goto X_FORWARDED_PROTO; - case 'o': - if (last) { - return 76; - } - goto X_FORWARDED_PROTO; - default: - return -1; - } - -ACCEPT_CHARSET: -ACCEPT_ENCODING: -ACCEPT_LANGUAGE: -ACCEPT_RANGES: -ACCESS_CONTROL_ALLOW_CREDENTIALS: -ACCESS_CONTROL_ALLOW_HEADERS: -ACCESS_CONTROL_ALLOW_METHODS: -ACCESS_CONTROL_ALLOW_ORIGIN: -ACCESS_CONTROL_EXPOSE_HEADERS: -ACCESS_CONTROL_MAX_AGE: -ACCESS_CONTROL_REQUEST_HEADERS: -ACCESS_CONTROL_REQUEST_METHOD: -AGE: -ALLOW: -AUTHORIZATION: -CACHE_CONTROL: -CONNECTION: -CONTENT_DISPOSITION: -CONTENT_ENCODING: -CONTENT_LANGUAGE: -CONTENT_LENGTH: -CONTENT_LOCATION: -CONTENT_MD5: -CONTENT_RANGE: -CONTENT_TRANSFER_ENCODING: -CONTENT_TYPE: -COOKIE: -DATE: -DESTINATION: -DIGEST: -ETAG: -EXPECT: -EXPIRES: -FORWARDED: -FROM: -HOST: -IF_MATCH: -IF_MODIFIED_SINCE: -IF_NONE_MATCH: -IF_RANGE: -IF_UNMODIFIED_SINCE: -KEEP_ALIVE: -LAST_EVENT_ID: -LAST_MODIFIED: -LINK: -LOCATION: -MAX_FORWARDS: -ORIGIN: -PRAGMA: -PROXY_AUTHENTICATE: -PROXY_AUTHORIZATION: -RANGE: -REFERER: -RETRY_AFTER: -SEC_WEBSOCKET_ACCEPT: -SEC_WEBSOCKET_EXTENSIONS: -SEC_WEBSOCKET_KEY1: -SEC_WEBSOCKET_PROTOCOL: -SEC_WEBSOCKET_VERSION: -SERVER: -SET_COOKIE: -TE: -TRAILER: -TRANSFER_ENCODING: -UPGRADE: -URI: -USER_AGENT: -VARY: -VIA: -WANT_DIGEST: -WARNING: -WWW_AUTHENTICATE: -X_FORWARDED_FOR: -X_FORWARDED_HOST: -X_FORWARDED_PROTO: -missing: - /* nothing found */ - return -1; -} diff --git a/third_party/python/aiohttp/aiohttp/_find_header.h b/third_party/python/aiohttp/aiohttp/_find_header.h deleted file mode 100644 index 99b7b4f82821..000000000000 --- a/third_party/python/aiohttp/aiohttp/_find_header.h +++ /dev/null @@ -1,14 +0,0 @@ -#ifndef _FIND_HEADERS_H -#define _FIND_HEADERS_H - -#ifdef __cplusplus -extern "C" { -#endif - -int find_header(const char *str, int size); - - -#ifdef __cplusplus -} -#endif -#endif diff --git a/third_party/python/aiohttp/aiohttp/_find_header.pxd b/third_party/python/aiohttp/aiohttp/_find_header.pxd deleted file mode 100644 index 37a6c37268ee..000000000000 --- a/third_party/python/aiohttp/aiohttp/_find_header.pxd +++ /dev/null @@ -1,2 +0,0 @@ -cdef extern from "_find_header.h": - int find_header(char *, int) diff --git a/third_party/python/aiohttp/aiohttp/_frozenlist.c b/third_party/python/aiohttp/aiohttp/_frozenlist.c deleted file mode 100644 index 4a9d38237f28..000000000000 --- a/third_party/python/aiohttp/aiohttp/_frozenlist.c +++ /dev/null @@ -1,7512 +0,0 @@ -/* Generated by Cython 0.29.21 */ - -#define PY_SSIZE_T_CLEAN -#include "Python.h" -#ifndef Py_PYTHON_H - #error Python headers needed to compile C extensions, please install development version of Python. -#elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000) - #error Cython requires Python 2.6+ or Python 3.3+. -#else -#define CYTHON_ABI "0_29_21" -#define CYTHON_HEX_VERSION 0x001D15F0 -#define CYTHON_FUTURE_DIVISION 1 -#include -#ifndef offsetof - #define offsetof(type, member) ( (size_t) & ((type*)0) -> member ) -#endif -#if !defined(WIN32) && !defined(MS_WINDOWS) - #ifndef __stdcall - #define __stdcall - #endif - #ifndef __cdecl - #define __cdecl - #endif - #ifndef __fastcall - #define __fastcall - #endif -#endif -#ifndef DL_IMPORT - #define DL_IMPORT(t) t -#endif -#ifndef DL_EXPORT - #define DL_EXPORT(t) t -#endif -#define __PYX_COMMA , -#ifndef HAVE_LONG_LONG - #if PY_VERSION_HEX >= 0x02070000 - #define HAVE_LONG_LONG - #endif -#endif -#ifndef PY_LONG_LONG - #define PY_LONG_LONG LONG_LONG -#endif -#ifndef Py_HUGE_VAL - #define Py_HUGE_VAL HUGE_VAL -#endif -#ifdef PYPY_VERSION - #define CYTHON_COMPILING_IN_PYPY 1 - #define CYTHON_COMPILING_IN_PYSTON 0 - #define CYTHON_COMPILING_IN_CPYTHON 0 - #undef CYTHON_USE_TYPE_SLOTS - #define CYTHON_USE_TYPE_SLOTS 0 - #undef CYTHON_USE_PYTYPE_LOOKUP - #define CYTHON_USE_PYTYPE_LOOKUP 0 - #if PY_VERSION_HEX < 0x03050000 - #undef CYTHON_USE_ASYNC_SLOTS - #define CYTHON_USE_ASYNC_SLOTS 0 - #elif !defined(CYTHON_USE_ASYNC_SLOTS) - #define CYTHON_USE_ASYNC_SLOTS 1 - #endif - #undef CYTHON_USE_PYLIST_INTERNALS - #define CYTHON_USE_PYLIST_INTERNALS 0 - #undef CYTHON_USE_UNICODE_INTERNALS - #define CYTHON_USE_UNICODE_INTERNALS 0 - #undef CYTHON_USE_UNICODE_WRITER - #define CYTHON_USE_UNICODE_WRITER 0 - #undef CYTHON_USE_PYLONG_INTERNALS - #define CYTHON_USE_PYLONG_INTERNALS 0 - #undef CYTHON_AVOID_BORROWED_REFS - #define CYTHON_AVOID_BORROWED_REFS 1 - #undef CYTHON_ASSUME_SAFE_MACROS - #define CYTHON_ASSUME_SAFE_MACROS 0 - #undef CYTHON_UNPACK_METHODS - #define CYTHON_UNPACK_METHODS 0 - #undef CYTHON_FAST_THREAD_STATE - #define CYTHON_FAST_THREAD_STATE 0 - #undef CYTHON_FAST_PYCALL - #define CYTHON_FAST_PYCALL 0 - #undef CYTHON_PEP489_MULTI_PHASE_INIT - #define CYTHON_PEP489_MULTI_PHASE_INIT 0 - #undef CYTHON_USE_TP_FINALIZE - #define CYTHON_USE_TP_FINALIZE 0 - #undef CYTHON_USE_DICT_VERSIONS - #define CYTHON_USE_DICT_VERSIONS 0 - #undef CYTHON_USE_EXC_INFO_STACK - #define CYTHON_USE_EXC_INFO_STACK 0 -#elif defined(PYSTON_VERSION) - #define CYTHON_COMPILING_IN_PYPY 0 - #define CYTHON_COMPILING_IN_PYSTON 1 - #define CYTHON_COMPILING_IN_CPYTHON 0 - #ifndef CYTHON_USE_TYPE_SLOTS - #define CYTHON_USE_TYPE_SLOTS 1 - #endif - #undef CYTHON_USE_PYTYPE_LOOKUP - #define CYTHON_USE_PYTYPE_LOOKUP 0 - #undef CYTHON_USE_ASYNC_SLOTS - #define CYTHON_USE_ASYNC_SLOTS 0 - #undef CYTHON_USE_PYLIST_INTERNALS - #define CYTHON_USE_PYLIST_INTERNALS 0 - #ifndef CYTHON_USE_UNICODE_INTERNALS - #define CYTHON_USE_UNICODE_INTERNALS 1 - #endif - #undef CYTHON_USE_UNICODE_WRITER - #define CYTHON_USE_UNICODE_WRITER 0 - #undef CYTHON_USE_PYLONG_INTERNALS - #define CYTHON_USE_PYLONG_INTERNALS 0 - #ifndef CYTHON_AVOID_BORROWED_REFS - #define CYTHON_AVOID_BORROWED_REFS 0 - #endif - #ifndef CYTHON_ASSUME_SAFE_MACROS - #define CYTHON_ASSUME_SAFE_MACROS 1 - #endif - #ifndef CYTHON_UNPACK_METHODS - #define CYTHON_UNPACK_METHODS 1 - #endif - #undef CYTHON_FAST_THREAD_STATE - #define CYTHON_FAST_THREAD_STATE 0 - #undef CYTHON_FAST_PYCALL - #define CYTHON_FAST_PYCALL 0 - #undef CYTHON_PEP489_MULTI_PHASE_INIT - #define CYTHON_PEP489_MULTI_PHASE_INIT 0 - #undef CYTHON_USE_TP_FINALIZE - #define CYTHON_USE_TP_FINALIZE 0 - #undef CYTHON_USE_DICT_VERSIONS - #define CYTHON_USE_DICT_VERSIONS 0 - #undef CYTHON_USE_EXC_INFO_STACK - #define CYTHON_USE_EXC_INFO_STACK 0 -#else - #define CYTHON_COMPILING_IN_PYPY 0 - #define CYTHON_COMPILING_IN_PYSTON 0 - #define CYTHON_COMPILING_IN_CPYTHON 1 - #ifndef CYTHON_USE_TYPE_SLOTS - #define CYTHON_USE_TYPE_SLOTS 1 - #endif - #if PY_VERSION_HEX < 0x02070000 - #undef CYTHON_USE_PYTYPE_LOOKUP - #define CYTHON_USE_PYTYPE_LOOKUP 0 - #elif !defined(CYTHON_USE_PYTYPE_LOOKUP) - #define CYTHON_USE_PYTYPE_LOOKUP 1 - #endif - #if PY_MAJOR_VERSION < 3 - #undef CYTHON_USE_ASYNC_SLOTS - #define CYTHON_USE_ASYNC_SLOTS 0 - #elif !defined(CYTHON_USE_ASYNC_SLOTS) - #define CYTHON_USE_ASYNC_SLOTS 1 - #endif - #if PY_VERSION_HEX < 0x02070000 - #undef CYTHON_USE_PYLONG_INTERNALS - #define CYTHON_USE_PYLONG_INTERNALS 0 - #elif !defined(CYTHON_USE_PYLONG_INTERNALS) - #define CYTHON_USE_PYLONG_INTERNALS 1 - #endif - #ifndef CYTHON_USE_PYLIST_INTERNALS - #define CYTHON_USE_PYLIST_INTERNALS 1 - #endif - #ifndef CYTHON_USE_UNICODE_INTERNALS - #define CYTHON_USE_UNICODE_INTERNALS 1 - #endif - #if PY_VERSION_HEX < 0x030300F0 - #undef CYTHON_USE_UNICODE_WRITER - #define CYTHON_USE_UNICODE_WRITER 0 - #elif !defined(CYTHON_USE_UNICODE_WRITER) - #define CYTHON_USE_UNICODE_WRITER 1 - #endif - #ifndef CYTHON_AVOID_BORROWED_REFS - #define CYTHON_AVOID_BORROWED_REFS 0 - #endif - #ifndef CYTHON_ASSUME_SAFE_MACROS - #define CYTHON_ASSUME_SAFE_MACROS 1 - #endif - #ifndef CYTHON_UNPACK_METHODS - #define CYTHON_UNPACK_METHODS 1 - #endif - #ifndef CYTHON_FAST_THREAD_STATE - #define CYTHON_FAST_THREAD_STATE 1 - #endif - #ifndef CYTHON_FAST_PYCALL - #define CYTHON_FAST_PYCALL 1 - #endif - #ifndef CYTHON_PEP489_MULTI_PHASE_INIT - #define CYTHON_PEP489_MULTI_PHASE_INIT (PY_VERSION_HEX >= 0x03050000) - #endif - #ifndef CYTHON_USE_TP_FINALIZE - #define CYTHON_USE_TP_FINALIZE (PY_VERSION_HEX >= 0x030400a1) - #endif - #ifndef CYTHON_USE_DICT_VERSIONS - #define CYTHON_USE_DICT_VERSIONS (PY_VERSION_HEX >= 0x030600B1) - #endif - #ifndef CYTHON_USE_EXC_INFO_STACK - #define CYTHON_USE_EXC_INFO_STACK (PY_VERSION_HEX >= 0x030700A3) - #endif -#endif -#if !defined(CYTHON_FAST_PYCCALL) -#define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1) -#endif -#if CYTHON_USE_PYLONG_INTERNALS - #include "longintrepr.h" - #undef SHIFT - #undef BASE - #undef MASK - #ifdef SIZEOF_VOID_P - enum { __pyx_check_sizeof_voidp = 1 / (int)(SIZEOF_VOID_P == sizeof(void*)) }; - #endif -#endif -#ifndef __has_attribute - #define __has_attribute(x) 0 -#endif -#ifndef __has_cpp_attribute - #define __has_cpp_attribute(x) 0 -#endif -#ifndef CYTHON_RESTRICT - #if defined(__GNUC__) - #define CYTHON_RESTRICT __restrict__ - #elif defined(_MSC_VER) && _MSC_VER >= 1400 - #define CYTHON_RESTRICT __restrict - #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L - #define CYTHON_RESTRICT restrict - #else - #define CYTHON_RESTRICT - #endif -#endif -#ifndef CYTHON_UNUSED -# if defined(__GNUC__) -# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) -# define CYTHON_UNUSED __attribute__ ((__unused__)) -# else -# define CYTHON_UNUSED -# endif -# elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER)) -# define CYTHON_UNUSED __attribute__ ((__unused__)) -# else -# define CYTHON_UNUSED -# endif -#endif -#ifndef CYTHON_MAYBE_UNUSED_VAR -# if defined(__cplusplus) - template void CYTHON_MAYBE_UNUSED_VAR( const T& ) { } -# else -# define CYTHON_MAYBE_UNUSED_VAR(x) (void)(x) -# endif -#endif -#ifndef CYTHON_NCP_UNUSED -# if CYTHON_COMPILING_IN_CPYTHON -# define CYTHON_NCP_UNUSED -# else -# define CYTHON_NCP_UNUSED CYTHON_UNUSED -# endif -#endif -#define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None) -#ifdef _MSC_VER - #ifndef _MSC_STDINT_H_ - #if _MSC_VER < 1300 - typedef unsigned char uint8_t; - typedef unsigned int uint32_t; - #else - typedef unsigned __int8 uint8_t; - typedef unsigned __int32 uint32_t; - #endif - #endif -#else - #include -#endif -#ifndef CYTHON_FALLTHROUGH - #if defined(__cplusplus) && __cplusplus >= 201103L - #if __has_cpp_attribute(fallthrough) - #define CYTHON_FALLTHROUGH [[fallthrough]] - #elif __has_cpp_attribute(clang::fallthrough) - #define CYTHON_FALLTHROUGH [[clang::fallthrough]] - #elif __has_cpp_attribute(gnu::fallthrough) - #define CYTHON_FALLTHROUGH [[gnu::fallthrough]] - #endif - #endif - #ifndef CYTHON_FALLTHROUGH - #if __has_attribute(fallthrough) - #define CYTHON_FALLTHROUGH __attribute__((fallthrough)) - #else - #define CYTHON_FALLTHROUGH - #endif - #endif - #if defined(__clang__ ) && defined(__apple_build_version__) - #if __apple_build_version__ < 7000000 - #undef CYTHON_FALLTHROUGH - #define CYTHON_FALLTHROUGH - #endif - #endif -#endif - -#ifndef CYTHON_INLINE - #if defined(__clang__) - #define CYTHON_INLINE __inline__ __attribute__ ((__unused__)) - #elif defined(__GNUC__) - #define CYTHON_INLINE __inline__ - #elif defined(_MSC_VER) - #define CYTHON_INLINE __inline - #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L - #define CYTHON_INLINE inline - #else - #define CYTHON_INLINE - #endif -#endif - -#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x02070600 && !defined(Py_OptimizeFlag) - #define Py_OptimizeFlag 0 -#endif -#define __PYX_BUILD_PY_SSIZE_T "n" -#define CYTHON_FORMAT_SSIZE_T "z" -#if PY_MAJOR_VERSION < 3 - #define __Pyx_BUILTIN_MODULE_NAME "__builtin__" - #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ - PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) - #define __Pyx_DefaultClassType PyClass_Type -#else - #define __Pyx_BUILTIN_MODULE_NAME "builtins" -#if PY_VERSION_HEX >= 0x030800A4 && PY_VERSION_HEX < 0x030800B2 - #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ - PyCode_New(a, 0, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) -#else - #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ - PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) -#endif - #define __Pyx_DefaultClassType PyType_Type -#endif -#ifndef Py_TPFLAGS_CHECKTYPES - #define Py_TPFLAGS_CHECKTYPES 0 -#endif -#ifndef Py_TPFLAGS_HAVE_INDEX - #define Py_TPFLAGS_HAVE_INDEX 0 -#endif -#ifndef Py_TPFLAGS_HAVE_NEWBUFFER - #define Py_TPFLAGS_HAVE_NEWBUFFER 0 -#endif -#ifndef Py_TPFLAGS_HAVE_FINALIZE - #define Py_TPFLAGS_HAVE_FINALIZE 0 -#endif -#ifndef METH_STACKLESS - #define METH_STACKLESS 0 -#endif -#if PY_VERSION_HEX <= 0x030700A3 || !defined(METH_FASTCALL) - #ifndef METH_FASTCALL - #define METH_FASTCALL 0x80 - #endif - typedef PyObject *(*__Pyx_PyCFunctionFast) (PyObject *self, PyObject *const *args, Py_ssize_t nargs); - typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject *const *args, - Py_ssize_t nargs, PyObject *kwnames); -#else - #define __Pyx_PyCFunctionFast _PyCFunctionFast - #define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords -#endif -#if CYTHON_FAST_PYCCALL -#define __Pyx_PyFastCFunction_Check(func)\ - ((PyCFunction_Check(func) && (METH_FASTCALL == (PyCFunction_GET_FLAGS(func) & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS | METH_STACKLESS))))) -#else -#define __Pyx_PyFastCFunction_Check(func) 0 -#endif -#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc) - #define PyObject_Malloc(s) PyMem_Malloc(s) - #define PyObject_Free(p) PyMem_Free(p) - #define PyObject_Realloc(p) PyMem_Realloc(p) -#endif -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030400A1 - #define PyMem_RawMalloc(n) PyMem_Malloc(n) - #define PyMem_RawRealloc(p, n) PyMem_Realloc(p, n) - #define PyMem_RawFree(p) PyMem_Free(p) -#endif -#if CYTHON_COMPILING_IN_PYSTON - #define __Pyx_PyCode_HasFreeVars(co) PyCode_HasFreeVars(co) - #define __Pyx_PyFrame_SetLineNumber(frame, lineno) PyFrame_SetLineNumber(frame, lineno) -#else - #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) - #define __Pyx_PyFrame_SetLineNumber(frame, lineno) (frame)->f_lineno = (lineno) -#endif -#if !CYTHON_FAST_THREAD_STATE || PY_VERSION_HEX < 0x02070000 - #define __Pyx_PyThreadState_Current PyThreadState_GET() -#elif PY_VERSION_HEX >= 0x03060000 - #define __Pyx_PyThreadState_Current _PyThreadState_UncheckedGet() -#elif PY_VERSION_HEX >= 0x03000000 - #define __Pyx_PyThreadState_Current PyThreadState_GET() -#else - #define __Pyx_PyThreadState_Current _PyThreadState_Current -#endif -#if PY_VERSION_HEX < 0x030700A2 && !defined(PyThread_tss_create) && !defined(Py_tss_NEEDS_INIT) -#include "pythread.h" -#define Py_tss_NEEDS_INIT 0 -typedef int Py_tss_t; -static CYTHON_INLINE int PyThread_tss_create(Py_tss_t *key) { - *key = PyThread_create_key(); - return 0; -} -static CYTHON_INLINE Py_tss_t * PyThread_tss_alloc(void) { - Py_tss_t *key = (Py_tss_t *)PyObject_Malloc(sizeof(Py_tss_t)); - *key = Py_tss_NEEDS_INIT; - return key; -} -static CYTHON_INLINE void PyThread_tss_free(Py_tss_t *key) { - PyObject_Free(key); -} -static CYTHON_INLINE int PyThread_tss_is_created(Py_tss_t *key) { - return *key != Py_tss_NEEDS_INIT; -} -static CYTHON_INLINE void PyThread_tss_delete(Py_tss_t *key) { - PyThread_delete_key(*key); - *key = Py_tss_NEEDS_INIT; -} -static CYTHON_INLINE int PyThread_tss_set(Py_tss_t *key, void *value) { - return PyThread_set_key_value(*key, value); -} -static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) { - return PyThread_get_key_value(*key); -} -#endif -#if CYTHON_COMPILING_IN_CPYTHON || defined(_PyDict_NewPresized) -#define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n)) -#else -#define __Pyx_PyDict_NewPresized(n) PyDict_New() -#endif -#if PY_MAJOR_VERSION >= 3 || CYTHON_FUTURE_DIVISION - #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y) - #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y) -#else - #define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y) - #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y) -#endif -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 && CYTHON_USE_UNICODE_INTERNALS -#define __Pyx_PyDict_GetItemStr(dict, name) _PyDict_GetItem_KnownHash(dict, name, ((PyASCIIObject *) name)->hash) -#else -#define __Pyx_PyDict_GetItemStr(dict, name) PyDict_GetItem(dict, name) -#endif -#if PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND) - #define CYTHON_PEP393_ENABLED 1 - #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\ - 0 : _PyUnicode_Ready((PyObject *)(op))) - #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u) - #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i) - #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u) - #define __Pyx_PyUnicode_KIND(u) PyUnicode_KIND(u) - #define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u) - #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i) - #define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, ch) - #if defined(PyUnicode_IS_READY) && defined(PyUnicode_GET_SIZE) - #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u))) - #else - #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_LENGTH(u)) - #endif -#else - #define CYTHON_PEP393_ENABLED 0 - #define PyUnicode_1BYTE_KIND 1 - #define PyUnicode_2BYTE_KIND 2 - #define PyUnicode_4BYTE_KIND 4 - #define __Pyx_PyUnicode_READY(op) (0) - #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u) - #define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i])) - #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((sizeof(Py_UNICODE) == 2) ? 65535 : 1114111) - #define __Pyx_PyUnicode_KIND(u) (sizeof(Py_UNICODE)) - #define __Pyx_PyUnicode_DATA(u) ((void*)PyUnicode_AS_UNICODE(u)) - #define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i])) - #define __Pyx_PyUnicode_WRITE(k, d, i, ch) (((void)(k)), ((Py_UNICODE*)d)[i] = ch) - #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_SIZE(u)) -#endif -#if CYTHON_COMPILING_IN_PYPY - #define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b) - #define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b) -#else - #define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b) - #define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ?\ - PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b)) -#endif -#if CYTHON_COMPILING_IN_PYPY && !defined(PyUnicode_Contains) - #define PyUnicode_Contains(u, s) PySequence_Contains(u, s) -#endif -#if CYTHON_COMPILING_IN_PYPY && !defined(PyByteArray_Check) - #define PyByteArray_Check(obj) PyObject_TypeCheck(obj, &PyByteArray_Type) -#endif -#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Format) - #define PyObject_Format(obj, fmt) PyObject_CallMethod(obj, "__format__", "O", fmt) -#endif -#define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyString_Check(b) && !PyString_CheckExact(b)))) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b)) -#define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyUnicode_Check(b) && !PyUnicode_CheckExact(b)))) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b)) -#if PY_MAJOR_VERSION >= 3 - #define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b) -#else - #define __Pyx_PyString_Format(a, b) PyString_Format(a, b) -#endif -#if PY_MAJOR_VERSION < 3 && !defined(PyObject_ASCII) - #define PyObject_ASCII(o) PyObject_Repr(o) -#endif -#if PY_MAJOR_VERSION >= 3 - #define PyBaseString_Type PyUnicode_Type - #define PyStringObject PyUnicodeObject - #define PyString_Type PyUnicode_Type - #define PyString_Check PyUnicode_Check - #define PyString_CheckExact PyUnicode_CheckExact -#ifndef PyObject_Unicode - #define PyObject_Unicode PyObject_Str -#endif -#endif -#if PY_MAJOR_VERSION >= 3 - #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj) - #define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj) -#else - #define __Pyx_PyBaseString_Check(obj) (PyString_Check(obj) || PyUnicode_Check(obj)) - #define __Pyx_PyBaseString_CheckExact(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj)) -#endif -#ifndef PySet_CheckExact - #define PySet_CheckExact(obj) (Py_TYPE(obj) == &PySet_Type) -#endif -#if PY_VERSION_HEX >= 0x030900A4 - #define __Pyx_SET_REFCNT(obj, refcnt) Py_SET_REFCNT(obj, refcnt) - #define __Pyx_SET_SIZE(obj, size) Py_SET_SIZE(obj, size) -#else - #define __Pyx_SET_REFCNT(obj, refcnt) Py_REFCNT(obj) = (refcnt) - #define __Pyx_SET_SIZE(obj, size) Py_SIZE(obj) = (size) -#endif -#if CYTHON_ASSUME_SAFE_MACROS - #define __Pyx_PySequence_SIZE(seq) Py_SIZE(seq) -#else - #define __Pyx_PySequence_SIZE(seq) PySequence_Size(seq) -#endif -#if PY_MAJOR_VERSION >= 3 - #define PyIntObject PyLongObject - #define PyInt_Type PyLong_Type - #define PyInt_Check(op) PyLong_Check(op) - #define PyInt_CheckExact(op) PyLong_CheckExact(op) - #define PyInt_FromString PyLong_FromString - #define PyInt_FromUnicode PyLong_FromUnicode - #define PyInt_FromLong PyLong_FromLong - #define PyInt_FromSize_t PyLong_FromSize_t - #define PyInt_FromSsize_t PyLong_FromSsize_t - #define PyInt_AsLong PyLong_AsLong - #define PyInt_AS_LONG PyLong_AS_LONG - #define PyInt_AsSsize_t PyLong_AsSsize_t - #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask - #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask - #define PyNumber_Int PyNumber_Long -#endif -#if PY_MAJOR_VERSION >= 3 - #define PyBoolObject PyLongObject -#endif -#if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_PYPY - #ifndef PyUnicode_InternFromString - #define PyUnicode_InternFromString(s) PyUnicode_FromString(s) - #endif -#endif -#if PY_VERSION_HEX < 0x030200A4 - typedef long Py_hash_t; - #define __Pyx_PyInt_FromHash_t PyInt_FromLong - #define __Pyx_PyInt_AsHash_t PyInt_AsLong -#else - #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t - #define __Pyx_PyInt_AsHash_t PyInt_AsSsize_t -#endif -#if PY_MAJOR_VERSION >= 3 - #define __Pyx_PyMethod_New(func, self, klass) ((self) ? ((void)(klass), PyMethod_New(func, self)) : __Pyx_NewRef(func)) -#else - #define __Pyx_PyMethod_New(func, self, klass) PyMethod_New(func, self, klass) -#endif -#if CYTHON_USE_ASYNC_SLOTS - #if PY_VERSION_HEX >= 0x030500B1 - #define __Pyx_PyAsyncMethodsStruct PyAsyncMethods - #define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async) - #else - #define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved)) - #endif -#else - #define __Pyx_PyType_AsAsync(obj) NULL -#endif -#ifndef __Pyx_PyAsyncMethodsStruct - typedef struct { - unaryfunc am_await; - unaryfunc am_aiter; - unaryfunc am_anext; - } __Pyx_PyAsyncMethodsStruct; -#endif - -#if defined(WIN32) || defined(MS_WINDOWS) - #define _USE_MATH_DEFINES -#endif -#include -#ifdef NAN -#define __PYX_NAN() ((float) NAN) -#else -static CYTHON_INLINE float __PYX_NAN() { - float value; - memset(&value, 0xFF, sizeof(value)); - return value; -} -#endif -#if defined(__CYGWIN__) && defined(_LDBL_EQ_DBL) -#define __Pyx_truncl trunc -#else -#define __Pyx_truncl truncl -#endif - -#define __PYX_MARK_ERR_POS(f_index, lineno) \ - { __pyx_filename = __pyx_f[f_index]; (void)__pyx_filename; __pyx_lineno = lineno; (void)__pyx_lineno; __pyx_clineno = __LINE__; (void)__pyx_clineno; } -#define __PYX_ERR(f_index, lineno, Ln_error) \ - { __PYX_MARK_ERR_POS(f_index, lineno) goto Ln_error; } - -#ifndef __PYX_EXTERN_C - #ifdef __cplusplus - #define __PYX_EXTERN_C extern "C" - #else - #define __PYX_EXTERN_C extern - #endif -#endif - -#define __PYX_HAVE__aiohttp___frozenlist -#define __PYX_HAVE_API__aiohttp___frozenlist -/* Early includes */ -#ifdef _OPENMP -#include -#endif /* _OPENMP */ - -#if defined(PYREX_WITHOUT_ASSERTIONS) && !defined(CYTHON_WITHOUT_ASSERTIONS) -#define CYTHON_WITHOUT_ASSERTIONS -#endif - -typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* encoding; - const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; - -#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0 -#define __PYX_DEFAULT_STRING_ENCODING_IS_UTF8 0 -#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT (PY_MAJOR_VERSION >= 3 && __PYX_DEFAULT_STRING_ENCODING_IS_UTF8) -#define __PYX_DEFAULT_STRING_ENCODING "" -#define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString -#define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize -#define __Pyx_uchar_cast(c) ((unsigned char)c) -#define __Pyx_long_cast(x) ((long)x) -#define __Pyx_fits_Py_ssize_t(v, type, is_signed) (\ - (sizeof(type) < sizeof(Py_ssize_t)) ||\ - (sizeof(type) > sizeof(Py_ssize_t) &&\ - likely(v < (type)PY_SSIZE_T_MAX ||\ - v == (type)PY_SSIZE_T_MAX) &&\ - (!is_signed || likely(v > (type)PY_SSIZE_T_MIN ||\ - v == (type)PY_SSIZE_T_MIN))) ||\ - (sizeof(type) == sizeof(Py_ssize_t) &&\ - (is_signed || likely(v < (type)PY_SSIZE_T_MAX ||\ - v == (type)PY_SSIZE_T_MAX))) ) -static CYTHON_INLINE int __Pyx_is_valid_index(Py_ssize_t i, Py_ssize_t limit) { - return (size_t) i < (size_t) limit; -} -#if defined (__cplusplus) && __cplusplus >= 201103L - #include - #define __Pyx_sst_abs(value) std::abs(value) -#elif SIZEOF_INT >= SIZEOF_SIZE_T - #define __Pyx_sst_abs(value) abs(value) -#elif SIZEOF_LONG >= SIZEOF_SIZE_T - #define __Pyx_sst_abs(value) labs(value) -#elif defined (_MSC_VER) - #define __Pyx_sst_abs(value) ((Py_ssize_t)_abs64(value)) -#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L - #define __Pyx_sst_abs(value) llabs(value) -#elif defined (__GNUC__) - #define __Pyx_sst_abs(value) __builtin_llabs(value) -#else - #define __Pyx_sst_abs(value) ((value<0) ? -value : value) -#endif -static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject*); -static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length); -#define __Pyx_PyByteArray_FromString(s) PyByteArray_FromStringAndSize((const char*)s, strlen((const char*)s)) -#define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l) -#define __Pyx_PyBytes_FromString PyBytes_FromString -#define __Pyx_PyBytes_FromStringAndSize PyBytes_FromStringAndSize -static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*); -#if PY_MAJOR_VERSION < 3 - #define __Pyx_PyStr_FromString __Pyx_PyBytes_FromString - #define __Pyx_PyStr_FromStringAndSize __Pyx_PyBytes_FromStringAndSize -#else - #define __Pyx_PyStr_FromString __Pyx_PyUnicode_FromString - #define __Pyx_PyStr_FromStringAndSize __Pyx_PyUnicode_FromStringAndSize -#endif -#define __Pyx_PyBytes_AsWritableString(s) ((char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsWritableSString(s) ((signed char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsWritableUString(s) ((unsigned char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsString(s) ((const char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsSString(s) ((const signed char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsUString(s) ((const unsigned char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyObject_AsWritableString(s) ((char*) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_AsWritableSString(s) ((signed char*) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_AsWritableUString(s) ((unsigned char*) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_AsSString(s) ((const signed char*) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_AsUString(s) ((const unsigned char*) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_FromCString(s) __Pyx_PyObject_FromString((const char*)s) -#define __Pyx_PyBytes_FromCString(s) __Pyx_PyBytes_FromString((const char*)s) -#define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s) -#define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s) -#define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s) -static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u) { - const Py_UNICODE *u_end = u; - while (*u_end++) ; - return (size_t)(u_end - u - 1); -} -#define __Pyx_PyUnicode_FromUnicode(u) PyUnicode_FromUnicode(u, __Pyx_Py_UNICODE_strlen(u)) -#define __Pyx_PyUnicode_FromUnicodeAndLength PyUnicode_FromUnicode -#define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode -#define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) -#define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) -static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b); -static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); -static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject*); -static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x); -#define __Pyx_PySequence_Tuple(obj)\ - (likely(PyTuple_CheckExact(obj)) ? __Pyx_NewRef(obj) : PySequence_Tuple(obj)) -static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*); -static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t); -#if CYTHON_ASSUME_SAFE_MACROS -#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x)) -#else -#define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x) -#endif -#define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x)) -#if PY_MAJOR_VERSION >= 3 -#define __Pyx_PyNumber_Int(x) (PyLong_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Long(x)) -#else -#define __Pyx_PyNumber_Int(x) (PyInt_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Int(x)) -#endif -#define __Pyx_PyNumber_Float(x) (PyFloat_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Float(x)) -#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII -static int __Pyx_sys_getdefaultencoding_not_ascii; -static int __Pyx_init_sys_getdefaultencoding_params(void) { - PyObject* sys; - PyObject* default_encoding = NULL; - PyObject* ascii_chars_u = NULL; - PyObject* ascii_chars_b = NULL; - const char* default_encoding_c; - sys = PyImport_ImportModule("sys"); - if (!sys) goto bad; - default_encoding = PyObject_CallMethod(sys, (char*) "getdefaultencoding", NULL); - Py_DECREF(sys); - if (!default_encoding) goto bad; - default_encoding_c = PyBytes_AsString(default_encoding); - if (!default_encoding_c) goto bad; - if (strcmp(default_encoding_c, "ascii") == 0) { - __Pyx_sys_getdefaultencoding_not_ascii = 0; - } else { - char ascii_chars[128]; - int c; - for (c = 0; c < 128; c++) { - ascii_chars[c] = c; - } - __Pyx_sys_getdefaultencoding_not_ascii = 1; - ascii_chars_u = PyUnicode_DecodeASCII(ascii_chars, 128, NULL); - if (!ascii_chars_u) goto bad; - ascii_chars_b = PyUnicode_AsEncodedString(ascii_chars_u, default_encoding_c, NULL); - if (!ascii_chars_b || !PyBytes_Check(ascii_chars_b) || memcmp(ascii_chars, PyBytes_AS_STRING(ascii_chars_b), 128) != 0) { - PyErr_Format( - PyExc_ValueError, - "This module compiled with c_string_encoding=ascii, but default encoding '%.200s' is not a superset of ascii.", - default_encoding_c); - goto bad; - } - Py_DECREF(ascii_chars_u); - Py_DECREF(ascii_chars_b); - } - Py_DECREF(default_encoding); - return 0; -bad: - Py_XDECREF(default_encoding); - Py_XDECREF(ascii_chars_u); - Py_XDECREF(ascii_chars_b); - return -1; -} -#endif -#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT && PY_MAJOR_VERSION >= 3 -#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeUTF8(c_str, size, NULL) -#else -#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_Decode(c_str, size, __PYX_DEFAULT_STRING_ENCODING, NULL) -#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT -static char* __PYX_DEFAULT_STRING_ENCODING; -static int __Pyx_init_sys_getdefaultencoding_params(void) { - PyObject* sys; - PyObject* default_encoding = NULL; - char* default_encoding_c; - sys = PyImport_ImportModule("sys"); - if (!sys) goto bad; - default_encoding = PyObject_CallMethod(sys, (char*) (const char*) "getdefaultencoding", NULL); - Py_DECREF(sys); - if (!default_encoding) goto bad; - default_encoding_c = PyBytes_AsString(default_encoding); - if (!default_encoding_c) goto bad; - __PYX_DEFAULT_STRING_ENCODING = (char*) malloc(strlen(default_encoding_c) + 1); - if (!__PYX_DEFAULT_STRING_ENCODING) goto bad; - strcpy(__PYX_DEFAULT_STRING_ENCODING, default_encoding_c); - Py_DECREF(default_encoding); - return 0; -bad: - Py_XDECREF(default_encoding); - return -1; -} -#endif -#endif - - -/* Test for GCC > 2.95 */ -#if defined(__GNUC__) && (__GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95))) - #define likely(x) __builtin_expect(!!(x), 1) - #define unlikely(x) __builtin_expect(!!(x), 0) -#else /* !__GNUC__ or GCC < 2.95 */ - #define likely(x) (x) - #define unlikely(x) (x) -#endif /* __GNUC__ */ -static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; } - -static PyObject *__pyx_m = NULL; -static PyObject *__pyx_d; -static PyObject *__pyx_b; -static PyObject *__pyx_cython_runtime = NULL; -static PyObject *__pyx_empty_tuple; -static PyObject *__pyx_empty_bytes; -static PyObject *__pyx_empty_unicode; -static int __pyx_lineno; -static int __pyx_clineno = 0; -static const char * __pyx_cfilenm= __FILE__; -static const char *__pyx_filename; - - -static const char *__pyx_f[] = { - "aiohttp/_frozenlist.pyx", - "stringsource", -}; - -/*--- Type declarations ---*/ -struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList; - -/* "aiohttp/_frozenlist.pyx":4 - * - * - * cdef class FrozenList: # <<<<<<<<<<<<<< - * - * cdef readonly bint frozen - */ -struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList { - PyObject_HEAD - struct __pyx_vtabstruct_7aiohttp_11_frozenlist_FrozenList *__pyx_vtab; - int frozen; - PyObject *_items; -}; - - - -struct __pyx_vtabstruct_7aiohttp_11_frozenlist_FrozenList { - PyObject *(*_check_frozen)(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *); - PyObject *(*_fast_len)(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *); -}; -static struct __pyx_vtabstruct_7aiohttp_11_frozenlist_FrozenList *__pyx_vtabptr_7aiohttp_11_frozenlist_FrozenList; -static CYTHON_INLINE PyObject *__pyx_f_7aiohttp_11_frozenlist_10FrozenList__fast_len(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *); - -/* --- Runtime support code (head) --- */ -/* Refnanny.proto */ -#ifndef CYTHON_REFNANNY - #define CYTHON_REFNANNY 0 -#endif -#if CYTHON_REFNANNY - typedef struct { - void (*INCREF)(void*, PyObject*, int); - void (*DECREF)(void*, PyObject*, int); - void (*GOTREF)(void*, PyObject*, int); - void (*GIVEREF)(void*, PyObject*, int); - void* (*SetupContext)(const char*, int, const char*); - void (*FinishContext)(void**); - } __Pyx_RefNannyAPIStruct; - static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL; - static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname); - #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL; -#ifdef WITH_THREAD - #define __Pyx_RefNannySetupContext(name, acquire_gil)\ - if (acquire_gil) {\ - PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ - __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ - PyGILState_Release(__pyx_gilstate_save);\ - } else {\ - __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ - } -#else - #define __Pyx_RefNannySetupContext(name, acquire_gil)\ - __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__) -#endif - #define __Pyx_RefNannyFinishContext()\ - __Pyx_RefNanny->FinishContext(&__pyx_refnanny) - #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), __LINE__) - #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), __LINE__) - #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), __LINE__) - #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), __LINE__) - #define __Pyx_XINCREF(r) do { if((r) != NULL) {__Pyx_INCREF(r); }} while(0) - #define __Pyx_XDECREF(r) do { if((r) != NULL) {__Pyx_DECREF(r); }} while(0) - #define __Pyx_XGOTREF(r) do { if((r) != NULL) {__Pyx_GOTREF(r); }} while(0) - #define __Pyx_XGIVEREF(r) do { if((r) != NULL) {__Pyx_GIVEREF(r);}} while(0) -#else - #define __Pyx_RefNannyDeclarations - #define __Pyx_RefNannySetupContext(name, acquire_gil) - #define __Pyx_RefNannyFinishContext() - #define __Pyx_INCREF(r) Py_INCREF(r) - #define __Pyx_DECREF(r) Py_DECREF(r) - #define __Pyx_GOTREF(r) - #define __Pyx_GIVEREF(r) - #define __Pyx_XINCREF(r) Py_XINCREF(r) - #define __Pyx_XDECREF(r) Py_XDECREF(r) - #define __Pyx_XGOTREF(r) - #define __Pyx_XGIVEREF(r) -#endif -#define __Pyx_XDECREF_SET(r, v) do {\ - PyObject *tmp = (PyObject *) r;\ - r = v; __Pyx_XDECREF(tmp);\ - } while (0) -#define __Pyx_DECREF_SET(r, v) do {\ - PyObject *tmp = (PyObject *) r;\ - r = v; __Pyx_DECREF(tmp);\ - } while (0) -#define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0) -#define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0) - -/* PyObjectGetAttrStr.proto */ -#if CYTHON_USE_TYPE_SLOTS -static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name); -#else -#define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n) -#endif - -/* GetBuiltinName.proto */ -static PyObject *__Pyx_GetBuiltinName(PyObject *name); - -/* RaiseDoubleKeywords.proto */ -static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name); - -/* ParseKeywords.proto */ -static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject **argnames[],\ - PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args,\ - const char* function_name); - -/* RaiseArgTupleInvalid.proto */ -static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact, - Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found); - -/* PyObjectCall.proto */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw); -#else -#define __Pyx_PyObject_Call(func, arg, kw) PyObject_Call(func, arg, kw) -#endif - -/* PyThreadStateGet.proto */ -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_PyThreadState_declare PyThreadState *__pyx_tstate; -#define __Pyx_PyThreadState_assign __pyx_tstate = __Pyx_PyThreadState_Current; -#define __Pyx_PyErr_Occurred() __pyx_tstate->curexc_type -#else -#define __Pyx_PyThreadState_declare -#define __Pyx_PyThreadState_assign -#define __Pyx_PyErr_Occurred() PyErr_Occurred() -#endif - -/* PyErrFetchRestore.proto */ -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_PyErr_Clear() __Pyx_ErrRestore(NULL, NULL, NULL) -#define __Pyx_ErrRestoreWithState(type, value, tb) __Pyx_ErrRestoreInState(PyThreadState_GET(), type, value, tb) -#define __Pyx_ErrFetchWithState(type, value, tb) __Pyx_ErrFetchInState(PyThreadState_GET(), type, value, tb) -#define __Pyx_ErrRestore(type, value, tb) __Pyx_ErrRestoreInState(__pyx_tstate, type, value, tb) -#define __Pyx_ErrFetch(type, value, tb) __Pyx_ErrFetchInState(__pyx_tstate, type, value, tb) -static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); -static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); -#if CYTHON_COMPILING_IN_CPYTHON -#define __Pyx_PyErr_SetNone(exc) (Py_INCREF(exc), __Pyx_ErrRestore((exc), NULL, NULL)) -#else -#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) -#endif -#else -#define __Pyx_PyErr_Clear() PyErr_Clear() -#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) -#define __Pyx_ErrRestoreWithState(type, value, tb) PyErr_Restore(type, value, tb) -#define __Pyx_ErrFetchWithState(type, value, tb) PyErr_Fetch(type, value, tb) -#define __Pyx_ErrRestoreInState(tstate, type, value, tb) PyErr_Restore(type, value, tb) -#define __Pyx_ErrFetchInState(tstate, type, value, tb) PyErr_Fetch(type, value, tb) -#define __Pyx_ErrRestore(type, value, tb) PyErr_Restore(type, value, tb) -#define __Pyx_ErrFetch(type, value, tb) PyErr_Fetch(type, value, tb) -#endif - -/* RaiseException.proto */ -static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause); - -/* GetItemInt.proto */ -#define __Pyx_GetItemInt(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ - (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ - __Pyx_GetItemInt_Fast(o, (Py_ssize_t)i, is_list, wraparound, boundscheck) :\ - (is_list ? (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL) :\ - __Pyx_GetItemInt_Generic(o, to_py_func(i)))) -#define __Pyx_GetItemInt_List(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ - (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ - __Pyx_GetItemInt_List_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ - (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL)) -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i, - int wraparound, int boundscheck); -#define __Pyx_GetItemInt_Tuple(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ - (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ - __Pyx_GetItemInt_Tuple_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ - (PyErr_SetString(PyExc_IndexError, "tuple index out of range"), (PyObject*)NULL)) -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i, - int wraparound, int boundscheck); -static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j); -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, - int is_list, int wraparound, int boundscheck); - -/* ObjectGetItem.proto */ -#if CYTHON_USE_TYPE_SLOTS -static CYTHON_INLINE PyObject *__Pyx_PyObject_GetItem(PyObject *obj, PyObject* key); -#else -#define __Pyx_PyObject_GetItem(obj, key) PyObject_GetItem(obj, key) -#endif - -/* PyFunctionFastCall.proto */ -#if CYTHON_FAST_PYCALL -#define __Pyx_PyFunction_FastCall(func, args, nargs)\ - __Pyx_PyFunction_FastCallDict((func), (args), (nargs), NULL) -#if 1 || PY_VERSION_HEX < 0x030600B1 -static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs); -#else -#define __Pyx_PyFunction_FastCallDict(func, args, nargs, kwargs) _PyFunction_FastCallDict(func, args, nargs, kwargs) -#endif -#define __Pyx_BUILD_ASSERT_EXPR(cond)\ - (sizeof(char [1 - 2*!(cond)]) - 1) -#ifndef Py_MEMBER_SIZE -#define Py_MEMBER_SIZE(type, member) sizeof(((type *)0)->member) -#endif - static size_t __pyx_pyframe_localsplus_offset = 0; - #include "frameobject.h" - #define __Pxy_PyFrame_Initialize_Offsets()\ - ((void)__Pyx_BUILD_ASSERT_EXPR(sizeof(PyFrameObject) == offsetof(PyFrameObject, f_localsplus) + Py_MEMBER_SIZE(PyFrameObject, f_localsplus)),\ - (void)(__pyx_pyframe_localsplus_offset = ((size_t)PyFrame_Type.tp_basicsize) - Py_MEMBER_SIZE(PyFrameObject, f_localsplus))) - #define __Pyx_PyFrame_GetLocalsplus(frame)\ - (assert(__pyx_pyframe_localsplus_offset), (PyObject **)(((char *)(frame)) + __pyx_pyframe_localsplus_offset)) -#endif - -/* PyObjectCallMethO.proto */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg); -#endif - -/* PyObjectCallNoArg.proto */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func); -#else -#define __Pyx_PyObject_CallNoArg(func) __Pyx_PyObject_Call(func, __pyx_empty_tuple, NULL) -#endif - -/* PyCFunctionFastCall.proto */ -#if CYTHON_FAST_PYCCALL -static CYTHON_INLINE PyObject *__Pyx_PyCFunction_FastCall(PyObject *func, PyObject **args, Py_ssize_t nargs); -#else -#define __Pyx_PyCFunction_FastCall(func, args, nargs) (assert(0), NULL) -#endif - -/* PyObjectCallOneArg.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg); - -/* PyIntCompare.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyInt_EqObjC(PyObject *op1, PyObject *op2, long intval, long inplace); - -/* PySequenceContains.proto */ -static CYTHON_INLINE int __Pyx_PySequence_ContainsTF(PyObject* item, PyObject* seq, int eq) { - int result = PySequence_Contains(seq, item); - return unlikely(result < 0) ? result : (result == (eq == Py_EQ)); -} - -/* PyObjectCall2Args.proto */ -static CYTHON_UNUSED PyObject* __Pyx_PyObject_Call2Args(PyObject* function, PyObject* arg1, PyObject* arg2); - -/* PyObjectGetMethod.proto */ -static int __Pyx_PyObject_GetMethod(PyObject *obj, PyObject *name, PyObject **method); - -/* PyObjectCallMethod1.proto */ -static PyObject* __Pyx_PyObject_CallMethod1(PyObject* obj, PyObject* method_name, PyObject* arg); - -/* pop_index.proto */ -static PyObject* __Pyx__PyObject_PopNewIndex(PyObject* L, PyObject* py_ix); -static PyObject* __Pyx__PyObject_PopIndex(PyObject* L, PyObject* py_ix); -#if CYTHON_USE_PYLIST_INTERNALS && CYTHON_ASSUME_SAFE_MACROS -static PyObject* __Pyx__PyList_PopIndex(PyObject* L, PyObject* py_ix, Py_ssize_t ix); -#define __Pyx_PyObject_PopIndex(L, py_ix, ix, is_signed, type, to_py_func) (\ - (likely(PyList_CheckExact(L) && __Pyx_fits_Py_ssize_t(ix, type, is_signed))) ?\ - __Pyx__PyList_PopIndex(L, py_ix, ix) : (\ - (unlikely((py_ix) == Py_None)) ? __Pyx__PyObject_PopNewIndex(L, to_py_func(ix)) :\ - __Pyx__PyObject_PopIndex(L, py_ix))) -#define __Pyx_PyList_PopIndex(L, py_ix, ix, is_signed, type, to_py_func) (\ - __Pyx_fits_Py_ssize_t(ix, type, is_signed) ?\ - __Pyx__PyList_PopIndex(L, py_ix, ix) : (\ - (unlikely((py_ix) == Py_None)) ? __Pyx__PyObject_PopNewIndex(L, to_py_func(ix)) :\ - __Pyx__PyObject_PopIndex(L, py_ix))) -#else -#define __Pyx_PyList_PopIndex(L, py_ix, ix, is_signed, type, to_py_func)\ - __Pyx_PyObject_PopIndex(L, py_ix, ix, is_signed, type, to_py_func) -#define __Pyx_PyObject_PopIndex(L, py_ix, ix, is_signed, type, to_py_func) (\ - (unlikely((py_ix) == Py_None)) ? __Pyx__PyObject_PopNewIndex(L, to_py_func(ix)) :\ - __Pyx__PyObject_PopIndex(L, py_ix)) -#endif - -/* ListAppend.proto */ -#if CYTHON_USE_PYLIST_INTERNALS && CYTHON_ASSUME_SAFE_MACROS -static CYTHON_INLINE int __Pyx_PyList_Append(PyObject* list, PyObject* x) { - PyListObject* L = (PyListObject*) list; - Py_ssize_t len = Py_SIZE(list); - if (likely(L->allocated > len) & likely(len > (L->allocated >> 1))) { - Py_INCREF(x); - PyList_SET_ITEM(list, len, x); - __Pyx_SET_SIZE(list, len + 1); - return 0; - } - return PyList_Append(list, x); -} -#else -#define __Pyx_PyList_Append(L,x) PyList_Append(L,x) -#endif - -/* PyErrExceptionMatches.proto */ -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_PyErr_ExceptionMatches(err) __Pyx_PyErr_ExceptionMatchesInState(__pyx_tstate, err) -static CYTHON_INLINE int __Pyx_PyErr_ExceptionMatchesInState(PyThreadState* tstate, PyObject* err); -#else -#define __Pyx_PyErr_ExceptionMatches(err) PyErr_ExceptionMatches(err) -#endif - -/* GetAttr.proto */ -static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *, PyObject *); - -/* GetAttr3.proto */ -static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *, PyObject *, PyObject *); - -/* PyDictVersioning.proto */ -#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS -#define __PYX_DICT_VERSION_INIT ((PY_UINT64_T) -1) -#define __PYX_GET_DICT_VERSION(dict) (((PyDictObject*)(dict))->ma_version_tag) -#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)\ - (version_var) = __PYX_GET_DICT_VERSION(dict);\ - (cache_var) = (value); -#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) {\ - static PY_UINT64_T __pyx_dict_version = 0;\ - static PyObject *__pyx_dict_cached_value = NULL;\ - if (likely(__PYX_GET_DICT_VERSION(DICT) == __pyx_dict_version)) {\ - (VAR) = __pyx_dict_cached_value;\ - } else {\ - (VAR) = __pyx_dict_cached_value = (LOOKUP);\ - __pyx_dict_version = __PYX_GET_DICT_VERSION(DICT);\ - }\ -} -static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj); -static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj); -static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version); -#else -#define __PYX_GET_DICT_VERSION(dict) (0) -#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var) -#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) (VAR) = (LOOKUP); -#endif - -/* GetModuleGlobalName.proto */ -#if CYTHON_USE_DICT_VERSIONS -#define __Pyx_GetModuleGlobalName(var, name) {\ - static PY_UINT64_T __pyx_dict_version = 0;\ - static PyObject *__pyx_dict_cached_value = NULL;\ - (var) = (likely(__pyx_dict_version == __PYX_GET_DICT_VERSION(__pyx_d))) ?\ - (likely(__pyx_dict_cached_value) ? __Pyx_NewRef(__pyx_dict_cached_value) : __Pyx_GetBuiltinName(name)) :\ - __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\ -} -#define __Pyx_GetModuleGlobalNameUncached(var, name) {\ - PY_UINT64_T __pyx_dict_version;\ - PyObject *__pyx_dict_cached_value;\ - (var) = __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\ -} -static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value); -#else -#define __Pyx_GetModuleGlobalName(var, name) (var) = __Pyx__GetModuleGlobalName(name) -#define __Pyx_GetModuleGlobalNameUncached(var, name) (var) = __Pyx__GetModuleGlobalName(name) -static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name); -#endif - -/* Import.proto */ -static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level); - -/* ImportFrom.proto */ -static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name); - -/* HasAttr.proto */ -static CYTHON_INLINE int __Pyx_HasAttr(PyObject *, PyObject *); - -/* PyObject_GenericGetAttrNoDict.proto */ -#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 -static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name); -#else -#define __Pyx_PyObject_GenericGetAttrNoDict PyObject_GenericGetAttr -#endif - -/* PyObject_GenericGetAttr.proto */ -#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 -static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name); -#else -#define __Pyx_PyObject_GenericGetAttr PyObject_GenericGetAttr -#endif - -/* SetVTable.proto */ -static int __Pyx_SetVtable(PyObject *dict, void *vtable); - -/* PyObjectGetAttrStrNoError.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name); - -/* SetupReduce.proto */ -static int __Pyx_setup_reduce(PyObject* type_obj); - -/* CLineInTraceback.proto */ -#ifdef CYTHON_CLINE_IN_TRACEBACK -#define __Pyx_CLineForTraceback(tstate, c_line) (((CYTHON_CLINE_IN_TRACEBACK)) ? c_line : 0) -#else -static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line); -#endif - -/* CodeObjectCache.proto */ -typedef struct { - PyCodeObject* code_object; - int code_line; -} __Pyx_CodeObjectCacheEntry; -struct __Pyx_CodeObjectCache { - int count; - int max_count; - __Pyx_CodeObjectCacheEntry* entries; -}; -static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL}; -static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line); -static PyCodeObject *__pyx_find_code_object(int code_line); -static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object); - -/* AddTraceback.proto */ -static void __Pyx_AddTraceback(const char *funcname, int c_line, - int py_line, const char *filename); - -/* CIntToPy.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyInt_From_int(int value); - -/* CIntToPy.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value); - -/* CIntFromPy.proto */ -static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *); - -/* CIntFromPy.proto */ -static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *); - -/* FastTypeChecks.proto */ -#if CYTHON_COMPILING_IN_CPYTHON -#define __Pyx_TypeCheck(obj, type) __Pyx_IsSubtype(Py_TYPE(obj), (PyTypeObject *)type) -static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b); -static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject *type); -static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *type1, PyObject *type2); -#else -#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type) -#define __Pyx_PyErr_GivenExceptionMatches(err, type) PyErr_GivenExceptionMatches(err, type) -#define __Pyx_PyErr_GivenExceptionMatches2(err, type1, type2) (PyErr_GivenExceptionMatches(err, type1) || PyErr_GivenExceptionMatches(err, type2)) -#endif -#define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception) - -/* CheckBinaryVersion.proto */ -static int __Pyx_check_binary_version(void); - -/* InitStrings.proto */ -static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); - -static PyObject *__pyx_f_7aiohttp_11_frozenlist_10FrozenList__check_frozen(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self); /* proto*/ -static CYTHON_INLINE PyObject *__pyx_f_7aiohttp_11_frozenlist_10FrozenList__fast_len(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self); /* proto*/ - -/* Module declarations from 'aiohttp._frozenlist' */ -static PyTypeObject *__pyx_ptype_7aiohttp_11_frozenlist_FrozenList = 0; -static PyObject *__pyx_f_7aiohttp_11_frozenlist___pyx_unpickle_FrozenList__set_state(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *, PyObject *); /*proto*/ -#define __Pyx_MODULE_NAME "aiohttp._frozenlist" -extern int __pyx_module_is_main_aiohttp___frozenlist; -int __pyx_module_is_main_aiohttp___frozenlist = 0; - -/* Implementation of 'aiohttp._frozenlist' */ -static PyObject *__pyx_builtin_RuntimeError; -static const char __pyx_k_new[] = "__new__"; -static const char __pyx_k_pop[] = "pop"; -static const char __pyx_k_pos[] = "pos"; -static const char __pyx_k_dict[] = "__dict__"; -static const char __pyx_k_item[] = "item"; -static const char __pyx_k_iter[] = "__iter__"; -static const char __pyx_k_main[] = "__main__"; -static const char __pyx_k_name[] = "__name__"; -static const char __pyx_k_test[] = "__test__"; -static const char __pyx_k_clear[] = "clear"; -static const char __pyx_k_count[] = "count"; -static const char __pyx_k_index[] = "index"; -static const char __pyx_k_items[] = "items"; -static const char __pyx_k_format[] = "format"; -static const char __pyx_k_import[] = "__import__"; -static const char __pyx_k_pickle[] = "pickle"; -static const char __pyx_k_reduce[] = "__reduce__"; -static const char __pyx_k_remove[] = "remove"; -static const char __pyx_k_update[] = "update"; -static const char __pyx_k_getstate[] = "__getstate__"; -static const char __pyx_k_pyx_type[] = "__pyx_type"; -static const char __pyx_k_register[] = "register"; -static const char __pyx_k_reversed[] = "__reversed__"; -static const char __pyx_k_setstate[] = "__setstate__"; -static const char __pyx_k_pyx_state[] = "__pyx_state"; -static const char __pyx_k_reduce_ex[] = "__reduce_ex__"; -static const char __pyx_k_FrozenList[] = "FrozenList"; -static const char __pyx_k_pyx_result[] = "__pyx_result"; -static const char __pyx_k_pyx_vtable[] = "__pyx_vtable__"; -static const char __pyx_k_PickleError[] = "PickleError"; -static const char __pyx_k_RuntimeError[] = "RuntimeError"; -static const char __pyx_k_pyx_checksum[] = "__pyx_checksum"; -static const char __pyx_k_stringsource[] = "stringsource"; -static const char __pyx_k_reduce_cython[] = "__reduce_cython__"; -static const char __pyx_k_MutableSequence[] = "MutableSequence"; -static const char __pyx_k_collections_abc[] = "collections.abc"; -static const char __pyx_k_pyx_PickleError[] = "__pyx_PickleError"; -static const char __pyx_k_setstate_cython[] = "__setstate_cython__"; -static const char __pyx_k_cline_in_traceback[] = "cline_in_traceback"; -static const char __pyx_k_FrozenList_frozen_r[] = ""; -static const char __pyx_k_aiohttp__frozenlist[] = "aiohttp._frozenlist"; -static const char __pyx_k_pyx_unpickle_FrozenList[] = "__pyx_unpickle_FrozenList"; -static const char __pyx_k_Cannot_modify_frozen_list[] = "Cannot modify frozen list."; -static const char __pyx_k_Incompatible_checksums_s_vs_0x94[] = "Incompatible checksums (%s vs 0x949a143 = (_items, frozen))"; -static PyObject *__pyx_kp_u_Cannot_modify_frozen_list; -static PyObject *__pyx_n_s_FrozenList; -static PyObject *__pyx_kp_u_FrozenList_frozen_r; -static PyObject *__pyx_kp_s_Incompatible_checksums_s_vs_0x94; -static PyObject *__pyx_n_s_MutableSequence; -static PyObject *__pyx_n_s_PickleError; -static PyObject *__pyx_n_s_RuntimeError; -static PyObject *__pyx_n_s_aiohttp__frozenlist; -static PyObject *__pyx_n_s_clear; -static PyObject *__pyx_n_s_cline_in_traceback; -static PyObject *__pyx_n_s_collections_abc; -static PyObject *__pyx_n_s_count; -static PyObject *__pyx_n_s_dict; -static PyObject *__pyx_n_s_format; -static PyObject *__pyx_n_s_getstate; -static PyObject *__pyx_n_s_import; -static PyObject *__pyx_n_s_index; -static PyObject *__pyx_n_s_item; -static PyObject *__pyx_n_s_items; -static PyObject *__pyx_n_s_iter; -static PyObject *__pyx_n_s_main; -static PyObject *__pyx_n_s_name; -static PyObject *__pyx_n_s_new; -static PyObject *__pyx_n_s_pickle; -static PyObject *__pyx_n_s_pop; -static PyObject *__pyx_n_s_pos; -static PyObject *__pyx_n_s_pyx_PickleError; -static PyObject *__pyx_n_s_pyx_checksum; -static PyObject *__pyx_n_s_pyx_result; -static PyObject *__pyx_n_s_pyx_state; -static PyObject *__pyx_n_s_pyx_type; -static PyObject *__pyx_n_s_pyx_unpickle_FrozenList; -static PyObject *__pyx_n_s_pyx_vtable; -static PyObject *__pyx_n_s_reduce; -static PyObject *__pyx_n_s_reduce_cython; -static PyObject *__pyx_n_s_reduce_ex; -static PyObject *__pyx_n_s_register; -static PyObject *__pyx_n_s_remove; -static PyObject *__pyx_n_s_reversed; -static PyObject *__pyx_n_s_setstate; -static PyObject *__pyx_n_s_setstate_cython; -static PyObject *__pyx_kp_s_stringsource; -static PyObject *__pyx_n_s_test; -static PyObject *__pyx_n_s_update; -static int __pyx_pf_7aiohttp_11_frozenlist_10FrozenList___init__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_items); /* proto */ -static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_2freeze(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_4__getitem__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_index); /* proto */ -static int __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_6__setitem__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_index, PyObject *__pyx_v_value); /* proto */ -static int __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_8__delitem__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_index); /* proto */ -static Py_ssize_t __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_10__len__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_12__iter__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_14__reversed__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_16__richcmp__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_other, PyObject *__pyx_v_op); /* proto */ -static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_18insert(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_pos, PyObject *__pyx_v_item); /* proto */ -static int __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_20__contains__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_item); /* proto */ -static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_22__iadd__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_items); /* proto */ -static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_24index(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_item); /* proto */ -static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_26remove(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_item); /* proto */ -static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_28clear(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_30extend(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_items); /* proto */ -static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_32reverse(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_34pop(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_index); /* proto */ -static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_36append(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_item); /* proto */ -static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_38count(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_item); /* proto */ -static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_40__repr__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_6frozen___get__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_42__reduce_cython__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_44__setstate_cython__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v___pyx_state); /* proto */ -static PyObject *__pyx_pf_7aiohttp_11_frozenlist___pyx_unpickle_FrozenList(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state); /* proto */ -static PyObject *__pyx_tp_new_7aiohttp_11_frozenlist_FrozenList(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/ -static PyObject *__pyx_int_0; -static PyObject *__pyx_int_1; -static PyObject *__pyx_int_2; -static PyObject *__pyx_int_3; -static PyObject *__pyx_int_4; -static PyObject *__pyx_int_5; -static PyObject *__pyx_int_155820355; -static PyObject *__pyx_int_neg_1; -static PyObject *__pyx_tuple_; -static PyObject *__pyx_tuple__2; -static PyObject *__pyx_codeobj__3; -/* Late includes */ - -/* "aiohttp/_frozenlist.pyx":9 - * cdef list _items - * - * def __init__(self, items=None): # <<<<<<<<<<<<<< - * self.frozen = False - * if items is not None: - */ - -/* Python wrapper */ -static int __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ -static int __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { - PyObject *__pyx_v_items = 0; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - int __pyx_r; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__init__ (wrapper)", 0); - { - static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_items,0}; - PyObject* values[1] = {0}; - values[0] = ((PyObject *)Py_None); - if (unlikely(__pyx_kwds)) { - Py_ssize_t kw_args; - const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); - switch (pos_args) { - case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = PyDict_Size(__pyx_kwds); - switch (pos_args) { - case 0: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_items); - if (value) { values[0] = value; kw_args--; } - } - } - if (unlikely(kw_args > 0)) { - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__init__") < 0)) __PYX_ERR(0, 9, __pyx_L3_error) - } - } else { - switch (PyTuple_GET_SIZE(__pyx_args)) { - case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - } - __pyx_v_items = values[0]; - } - goto __pyx_L4_argument_unpacking_done; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("__init__", 0, 0, 1, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 9, __pyx_L3_error) - __pyx_L3_error:; - __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return -1; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList___init__(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self), __pyx_v_items); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static int __pyx_pf_7aiohttp_11_frozenlist_10FrozenList___init__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_items) { - int __pyx_r; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__init__", 0); - __Pyx_INCREF(__pyx_v_items); - - /* "aiohttp/_frozenlist.pyx":10 - * - * def __init__(self, items=None): - * self.frozen = False # <<<<<<<<<<<<<< - * if items is not None: - * items = list(items) - */ - __pyx_v_self->frozen = 0; - - /* "aiohttp/_frozenlist.pyx":11 - * def __init__(self, items=None): - * self.frozen = False - * if items is not None: # <<<<<<<<<<<<<< - * items = list(items) - * else: - */ - __pyx_t_1 = (__pyx_v_items != Py_None); - __pyx_t_2 = (__pyx_t_1 != 0); - if (__pyx_t_2) { - - /* "aiohttp/_frozenlist.pyx":12 - * self.frozen = False - * if items is not None: - * items = list(items) # <<<<<<<<<<<<<< - * else: - * items = [] - */ - __pyx_t_3 = PySequence_List(__pyx_v_items); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 12, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF_SET(__pyx_v_items, __pyx_t_3); - __pyx_t_3 = 0; - - /* "aiohttp/_frozenlist.pyx":11 - * def __init__(self, items=None): - * self.frozen = False - * if items is not None: # <<<<<<<<<<<<<< - * items = list(items) - * else: - */ - goto __pyx_L3; - } - - /* "aiohttp/_frozenlist.pyx":14 - * items = list(items) - * else: - * items = [] # <<<<<<<<<<<<<< - * self._items = items - * - */ - /*else*/ { - __pyx_t_3 = PyList_New(0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 14, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF_SET(__pyx_v_items, __pyx_t_3); - __pyx_t_3 = 0; - } - __pyx_L3:; - - /* "aiohttp/_frozenlist.pyx":15 - * else: - * items = [] - * self._items = items # <<<<<<<<<<<<<< - * - * cdef object _check_frozen(self): - */ - if (!(likely(PyList_CheckExact(__pyx_v_items))||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "list", Py_TYPE(__pyx_v_items)->tp_name), 0))) __PYX_ERR(0, 15, __pyx_L1_error) - __pyx_t_3 = __pyx_v_items; - __Pyx_INCREF(__pyx_t_3); - __Pyx_GIVEREF(__pyx_t_3); - __Pyx_GOTREF(__pyx_v_self->_items); - __Pyx_DECREF(__pyx_v_self->_items); - __pyx_v_self->_items = ((PyObject*)__pyx_t_3); - __pyx_t_3 = 0; - - /* "aiohttp/_frozenlist.pyx":9 - * cdef list _items - * - * def __init__(self, items=None): # <<<<<<<<<<<<<< - * self.frozen = False - * if items is not None: - */ - - /* function exit code */ - __pyx_r = 0; - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = -1; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v_items); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_frozenlist.pyx":17 - * self._items = items - * - * cdef object _check_frozen(self): # <<<<<<<<<<<<<< - * if self.frozen: - * raise RuntimeError("Cannot modify frozen list.") - */ - -static PyObject *__pyx_f_7aiohttp_11_frozenlist_10FrozenList__check_frozen(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - PyObject *__pyx_t_2 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("_check_frozen", 0); - - /* "aiohttp/_frozenlist.pyx":18 - * - * cdef object _check_frozen(self): - * if self.frozen: # <<<<<<<<<<<<<< - * raise RuntimeError("Cannot modify frozen list.") - * - */ - __pyx_t_1 = (__pyx_v_self->frozen != 0); - if (unlikely(__pyx_t_1)) { - - /* "aiohttp/_frozenlist.pyx":19 - * cdef object _check_frozen(self): - * if self.frozen: - * raise RuntimeError("Cannot modify frozen list.") # <<<<<<<<<<<<<< - * - * cdef inline object _fast_len(self): - */ - __pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_RuntimeError, __pyx_tuple_, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 19, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_Raise(__pyx_t_2, 0, 0, 0); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __PYX_ERR(0, 19, __pyx_L1_error) - - /* "aiohttp/_frozenlist.pyx":18 - * - * cdef object _check_frozen(self): - * if self.frozen: # <<<<<<<<<<<<<< - * raise RuntimeError("Cannot modify frozen list.") - * - */ - } - - /* "aiohttp/_frozenlist.pyx":17 - * self._items = items - * - * cdef object _check_frozen(self): # <<<<<<<<<<<<<< - * if self.frozen: - * raise RuntimeError("Cannot modify frozen list.") - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList._check_frozen", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_frozenlist.pyx":21 - * raise RuntimeError("Cannot modify frozen list.") - * - * cdef inline object _fast_len(self): # <<<<<<<<<<<<<< - * return len(self._items) - * - */ - -static CYTHON_INLINE PyObject *__pyx_f_7aiohttp_11_frozenlist_10FrozenList__fast_len(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - Py_ssize_t __pyx_t_2; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("_fast_len", 0); - - /* "aiohttp/_frozenlist.pyx":22 - * - * cdef inline object _fast_len(self): - * return len(self._items) # <<<<<<<<<<<<<< - * - * def freeze(self): - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = __pyx_v_self->_items; - __Pyx_INCREF(__pyx_t_1); - if (unlikely(__pyx_t_1 == Py_None)) { - PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); - __PYX_ERR(0, 22, __pyx_L1_error) - } - __pyx_t_2 = PyList_GET_SIZE(__pyx_t_1); if (unlikely(__pyx_t_2 == ((Py_ssize_t)-1))) __PYX_ERR(0, 22, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyInt_FromSsize_t(__pyx_t_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 22, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* "aiohttp/_frozenlist.pyx":21 - * raise RuntimeError("Cannot modify frozen list.") - * - * cdef inline object _fast_len(self): # <<<<<<<<<<<<<< - * return len(self._items) - * - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList._fast_len", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_frozenlist.pyx":24 - * return len(self._items) - * - * def freeze(self): # <<<<<<<<<<<<<< - * self.frozen = True - * - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_3freeze(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ -static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_3freeze(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("freeze (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_2freeze(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_2freeze(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("freeze", 0); - - /* "aiohttp/_frozenlist.pyx":25 - * - * def freeze(self): - * self.frozen = True # <<<<<<<<<<<<<< - * - * def __getitem__(self, index): - */ - __pyx_v_self->frozen = 1; - - /* "aiohttp/_frozenlist.pyx":24 - * return len(self._items) - * - * def freeze(self): # <<<<<<<<<<<<<< - * self.frozen = True - * - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_frozenlist.pyx":27 - * self.frozen = True - * - * def __getitem__(self, index): # <<<<<<<<<<<<<< - * return self._items[index] - * - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_5__getitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_index); /*proto*/ -static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_5__getitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_index) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__getitem__ (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_4__getitem__(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self), ((PyObject *)__pyx_v_index)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_4__getitem__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_index) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__getitem__", 0); - - /* "aiohttp/_frozenlist.pyx":28 - * - * def __getitem__(self, index): - * return self._items[index] # <<<<<<<<<<<<<< - * - * def __setitem__(self, index, value): - */ - __Pyx_XDECREF(__pyx_r); - if (unlikely(__pyx_v_self->_items == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(0, 28, __pyx_L1_error) - } - __pyx_t_1 = __Pyx_PyObject_GetItem(__pyx_v_self->_items, __pyx_v_index); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 28, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* "aiohttp/_frozenlist.pyx":27 - * self.frozen = True - * - * def __getitem__(self, index): # <<<<<<<<<<<<<< - * return self._items[index] - * - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.__getitem__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_frozenlist.pyx":30 - * return self._items[index] - * - * def __setitem__(self, index, value): # <<<<<<<<<<<<<< - * self._check_frozen() - * self._items[index] = value - */ - -/* Python wrapper */ -static int __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_7__setitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_index, PyObject *__pyx_v_value); /*proto*/ -static int __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_7__setitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_index, PyObject *__pyx_v_value) { - int __pyx_r; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__setitem__ (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_6__setitem__(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self), ((PyObject *)__pyx_v_index), ((PyObject *)__pyx_v_value)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static int __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_6__setitem__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_index, PyObject *__pyx_v_value) { - int __pyx_r; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__setitem__", 0); - - /* "aiohttp/_frozenlist.pyx":31 - * - * def __setitem__(self, index, value): - * self._check_frozen() # <<<<<<<<<<<<<< - * self._items[index] = value - * - */ - __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self->__pyx_vtab)->_check_frozen(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 31, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_frozenlist.pyx":32 - * def __setitem__(self, index, value): - * self._check_frozen() - * self._items[index] = value # <<<<<<<<<<<<<< - * - * def __delitem__(self, index): - */ - if (unlikely(__pyx_v_self->_items == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(0, 32, __pyx_L1_error) - } - if (unlikely(PyObject_SetItem(__pyx_v_self->_items, __pyx_v_index, __pyx_v_value) < 0)) __PYX_ERR(0, 32, __pyx_L1_error) - - /* "aiohttp/_frozenlist.pyx":30 - * return self._items[index] - * - * def __setitem__(self, index, value): # <<<<<<<<<<<<<< - * self._check_frozen() - * self._items[index] = value - */ - - /* function exit code */ - __pyx_r = 0; - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.__setitem__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = -1; - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_frozenlist.pyx":34 - * self._items[index] = value - * - * def __delitem__(self, index): # <<<<<<<<<<<<<< - * self._check_frozen() - * del self._items[index] - */ - -/* Python wrapper */ -static int __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_9__delitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_index); /*proto*/ -static int __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_9__delitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_index) { - int __pyx_r; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__delitem__ (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_8__delitem__(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self), ((PyObject *)__pyx_v_index)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static int __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_8__delitem__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_index) { - int __pyx_r; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__delitem__", 0); - - /* "aiohttp/_frozenlist.pyx":35 - * - * def __delitem__(self, index): - * self._check_frozen() # <<<<<<<<<<<<<< - * del self._items[index] - * - */ - __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self->__pyx_vtab)->_check_frozen(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 35, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_frozenlist.pyx":36 - * def __delitem__(self, index): - * self._check_frozen() - * del self._items[index] # <<<<<<<<<<<<<< - * - * def __len__(self): - */ - if (unlikely(__pyx_v_self->_items == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(0, 36, __pyx_L1_error) - } - if (unlikely(PyObject_DelItem(__pyx_v_self->_items, __pyx_v_index) < 0)) __PYX_ERR(0, 36, __pyx_L1_error) - - /* "aiohttp/_frozenlist.pyx":34 - * self._items[index] = value - * - * def __delitem__(self, index): # <<<<<<<<<<<<<< - * self._check_frozen() - * del self._items[index] - */ - - /* function exit code */ - __pyx_r = 0; - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.__delitem__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = -1; - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_frozenlist.pyx":38 - * del self._items[index] - * - * def __len__(self): # <<<<<<<<<<<<<< - * return self._fast_len() - * - */ - -/* Python wrapper */ -static Py_ssize_t __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_11__len__(PyObject *__pyx_v_self); /*proto*/ -static Py_ssize_t __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_11__len__(PyObject *__pyx_v_self) { - Py_ssize_t __pyx_r; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__len__ (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_10__len__(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static Py_ssize_t __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_10__len__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self) { - Py_ssize_t __pyx_r; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - Py_ssize_t __pyx_t_2; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__len__", 0); - - /* "aiohttp/_frozenlist.pyx":39 - * - * def __len__(self): - * return self._fast_len() # <<<<<<<<<<<<<< - * - * def __iter__(self): - */ - __pyx_t_1 = __pyx_f_7aiohttp_11_frozenlist_10FrozenList__fast_len(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 39, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = __Pyx_PyIndex_AsSsize_t(__pyx_t_1); if (unlikely((__pyx_t_2 == (Py_ssize_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 39, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_r = __pyx_t_2; - goto __pyx_L0; - - /* "aiohttp/_frozenlist.pyx":38 - * del self._items[index] - * - * def __len__(self): # <<<<<<<<<<<<<< - * return self._fast_len() - * - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.__len__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = -1; - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_frozenlist.pyx":41 - * return self._fast_len() - * - * def __iter__(self): # <<<<<<<<<<<<<< - * return self._items.__iter__() - * - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_13__iter__(PyObject *__pyx_v_self); /*proto*/ -static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_13__iter__(PyObject *__pyx_v_self) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__iter__ (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_12__iter__(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_12__iter__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__iter__", 0); - - /* "aiohttp/_frozenlist.pyx":42 - * - * def __iter__(self): - * return self._items.__iter__() # <<<<<<<<<<<<<< - * - * def __reversed__(self): - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->_items, __pyx_n_s_iter); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 42, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = NULL; - if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_3)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_3); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - } - } - __pyx_t_1 = (__pyx_t_3) ? __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_3) : __Pyx_PyObject_CallNoArg(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 42, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* "aiohttp/_frozenlist.pyx":41 - * return self._fast_len() - * - * def __iter__(self): # <<<<<<<<<<<<<< - * return self._items.__iter__() - * - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.__iter__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_frozenlist.pyx":44 - * return self._items.__iter__() - * - * def __reversed__(self): # <<<<<<<<<<<<<< - * return self._items.__reversed__() - * - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_15__reversed__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ -static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_15__reversed__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__reversed__ (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_14__reversed__(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_14__reversed__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__reversed__", 0); - - /* "aiohttp/_frozenlist.pyx":45 - * - * def __reversed__(self): - * return self._items.__reversed__() # <<<<<<<<<<<<<< - * - * def __richcmp__(self, other, op): - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->_items, __pyx_n_s_reversed); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 45, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = NULL; - if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_3)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_3); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - } - } - __pyx_t_1 = (__pyx_t_3) ? __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_3) : __Pyx_PyObject_CallNoArg(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 45, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* "aiohttp/_frozenlist.pyx":44 - * return self._items.__iter__() - * - * def __reversed__(self): # <<<<<<<<<<<<<< - * return self._items.__reversed__() - * - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.__reversed__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_frozenlist.pyx":47 - * return self._items.__reversed__() - * - * def __richcmp__(self, other, op): # <<<<<<<<<<<<<< - * if op == 0: # < - * return list(self) < other - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_17__richcmp__(PyObject *__pyx_v_self, PyObject *__pyx_v_other, int __pyx_arg_op); /*proto*/ -static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_17__richcmp__(PyObject *__pyx_v_self, PyObject *__pyx_v_other, int __pyx_arg_op) { - PyObject *__pyx_v_op = 0; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__richcmp__ (wrapper)", 0); - __pyx_v_op = __Pyx_PyInt_From_int(__pyx_arg_op); if (unlikely(!__pyx_v_op)) __PYX_ERR(0, 47, __pyx_L3_error) - __Pyx_GOTREF(__pyx_v_op); - goto __pyx_L4_argument_unpacking_done; - __pyx_L3_error:; - __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.__richcmp__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_16__richcmp__(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self), ((PyObject *)__pyx_v_other), ((PyObject *)__pyx_v_op)); - - /* function exit code */ - __Pyx_XDECREF(__pyx_v_op); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_16__richcmp__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_other, PyObject *__pyx_v_op) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__richcmp__", 0); - - /* "aiohttp/_frozenlist.pyx":48 - * - * def __richcmp__(self, other, op): - * if op == 0: # < # <<<<<<<<<<<<<< - * return list(self) < other - * if op == 1: # <= - */ - __pyx_t_1 = __Pyx_PyInt_EqObjC(__pyx_v_op, __pyx_int_0, 0, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 48, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 48, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - if (__pyx_t_2) { - - /* "aiohttp/_frozenlist.pyx":49 - * def __richcmp__(self, other, op): - * if op == 0: # < - * return list(self) < other # <<<<<<<<<<<<<< - * if op == 1: # <= - * return list(self) <= other - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = PySequence_List(((PyObject *)__pyx_v_self)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 49, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = PyObject_RichCompare(__pyx_t_1, __pyx_v_other, Py_LT); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 49, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_r = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L0; - - /* "aiohttp/_frozenlist.pyx":48 - * - * def __richcmp__(self, other, op): - * if op == 0: # < # <<<<<<<<<<<<<< - * return list(self) < other - * if op == 1: # <= - */ - } - - /* "aiohttp/_frozenlist.pyx":50 - * if op == 0: # < - * return list(self) < other - * if op == 1: # <= # <<<<<<<<<<<<<< - * return list(self) <= other - * if op == 2: # == - */ - __pyx_t_3 = __Pyx_PyInt_EqObjC(__pyx_v_op, __pyx_int_1, 1, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 50, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 50, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (__pyx_t_2) { - - /* "aiohttp/_frozenlist.pyx":51 - * return list(self) < other - * if op == 1: # <= - * return list(self) <= other # <<<<<<<<<<<<<< - * if op == 2: # == - * return list(self) == other - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_3 = PySequence_List(((PyObject *)__pyx_v_self)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 51, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = PyObject_RichCompare(__pyx_t_3, __pyx_v_other, Py_LE); __Pyx_XGOTREF(__pyx_t_1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 51, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* "aiohttp/_frozenlist.pyx":50 - * if op == 0: # < - * return list(self) < other - * if op == 1: # <= # <<<<<<<<<<<<<< - * return list(self) <= other - * if op == 2: # == - */ - } - - /* "aiohttp/_frozenlist.pyx":52 - * if op == 1: # <= - * return list(self) <= other - * if op == 2: # == # <<<<<<<<<<<<<< - * return list(self) == other - * if op == 3: # != - */ - __pyx_t_1 = __Pyx_PyInt_EqObjC(__pyx_v_op, __pyx_int_2, 2, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 52, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 52, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - if (__pyx_t_2) { - - /* "aiohttp/_frozenlist.pyx":53 - * return list(self) <= other - * if op == 2: # == - * return list(self) == other # <<<<<<<<<<<<<< - * if op == 3: # != - * return list(self) != other - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = PySequence_List(((PyObject *)__pyx_v_self)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 53, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = PyObject_RichCompare(__pyx_t_1, __pyx_v_other, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 53, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_r = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L0; - - /* "aiohttp/_frozenlist.pyx":52 - * if op == 1: # <= - * return list(self) <= other - * if op == 2: # == # <<<<<<<<<<<<<< - * return list(self) == other - * if op == 3: # != - */ - } - - /* "aiohttp/_frozenlist.pyx":54 - * if op == 2: # == - * return list(self) == other - * if op == 3: # != # <<<<<<<<<<<<<< - * return list(self) != other - * if op == 4: # > - */ - __pyx_t_3 = __Pyx_PyInt_EqObjC(__pyx_v_op, __pyx_int_3, 3, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 54, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 54, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (__pyx_t_2) { - - /* "aiohttp/_frozenlist.pyx":55 - * return list(self) == other - * if op == 3: # != - * return list(self) != other # <<<<<<<<<<<<<< - * if op == 4: # > - * return list(self) > other - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_3 = PySequence_List(((PyObject *)__pyx_v_self)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 55, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = PyObject_RichCompare(__pyx_t_3, __pyx_v_other, Py_NE); __Pyx_XGOTREF(__pyx_t_1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 55, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* "aiohttp/_frozenlist.pyx":54 - * if op == 2: # == - * return list(self) == other - * if op == 3: # != # <<<<<<<<<<<<<< - * return list(self) != other - * if op == 4: # > - */ - } - - /* "aiohttp/_frozenlist.pyx":56 - * if op == 3: # != - * return list(self) != other - * if op == 4: # > # <<<<<<<<<<<<<< - * return list(self) > other - * if op == 5: # => - */ - __pyx_t_1 = __Pyx_PyInt_EqObjC(__pyx_v_op, __pyx_int_4, 4, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 56, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 56, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - if (__pyx_t_2) { - - /* "aiohttp/_frozenlist.pyx":57 - * return list(self) != other - * if op == 4: # > - * return list(self) > other # <<<<<<<<<<<<<< - * if op == 5: # => - * return list(self) >= other - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = PySequence_List(((PyObject *)__pyx_v_self)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 57, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = PyObject_RichCompare(__pyx_t_1, __pyx_v_other, Py_GT); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 57, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_r = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L0; - - /* "aiohttp/_frozenlist.pyx":56 - * if op == 3: # != - * return list(self) != other - * if op == 4: # > # <<<<<<<<<<<<<< - * return list(self) > other - * if op == 5: # => - */ - } - - /* "aiohttp/_frozenlist.pyx":58 - * if op == 4: # > - * return list(self) > other - * if op == 5: # => # <<<<<<<<<<<<<< - * return list(self) >= other - * - */ - __pyx_t_3 = __Pyx_PyInt_EqObjC(__pyx_v_op, __pyx_int_5, 5, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 58, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 58, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (__pyx_t_2) { - - /* "aiohttp/_frozenlist.pyx":59 - * return list(self) > other - * if op == 5: # => - * return list(self) >= other # <<<<<<<<<<<<<< - * - * def insert(self, pos, item): - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_3 = PySequence_List(((PyObject *)__pyx_v_self)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 59, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = PyObject_RichCompare(__pyx_t_3, __pyx_v_other, Py_GE); __Pyx_XGOTREF(__pyx_t_1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 59, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* "aiohttp/_frozenlist.pyx":58 - * if op == 4: # > - * return list(self) > other - * if op == 5: # => # <<<<<<<<<<<<<< - * return list(self) >= other - * - */ - } - - /* "aiohttp/_frozenlist.pyx":47 - * return self._items.__reversed__() - * - * def __richcmp__(self, other, op): # <<<<<<<<<<<<<< - * if op == 0: # < - * return list(self) < other - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.__richcmp__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_frozenlist.pyx":61 - * return list(self) >= other - * - * def insert(self, pos, item): # <<<<<<<<<<<<<< - * self._check_frozen() - * self._items.insert(pos, item) - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_19insert(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ -static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_19insert(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { - PyObject *__pyx_v_pos = 0; - PyObject *__pyx_v_item = 0; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("insert (wrapper)", 0); - { - static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_pos,&__pyx_n_s_item,0}; - PyObject* values[2] = {0,0}; - if (unlikely(__pyx_kwds)) { - Py_ssize_t kw_args; - const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); - switch (pos_args) { - case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); - CYTHON_FALLTHROUGH; - case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = PyDict_Size(__pyx_kwds); - switch (pos_args) { - case 0: - if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pos)) != 0)) kw_args--; - else goto __pyx_L5_argtuple_error; - CYTHON_FALLTHROUGH; - case 1: - if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_item)) != 0)) kw_args--; - else { - __Pyx_RaiseArgtupleInvalid("insert", 1, 2, 2, 1); __PYX_ERR(0, 61, __pyx_L3_error) - } - } - if (unlikely(kw_args > 0)) { - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "insert") < 0)) __PYX_ERR(0, 61, __pyx_L3_error) - } - } else if (PyTuple_GET_SIZE(__pyx_args) != 2) { - goto __pyx_L5_argtuple_error; - } else { - values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - values[1] = PyTuple_GET_ITEM(__pyx_args, 1); - } - __pyx_v_pos = values[0]; - __pyx_v_item = values[1]; - } - goto __pyx_L4_argument_unpacking_done; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("insert", 1, 2, 2, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 61, __pyx_L3_error) - __pyx_L3_error:; - __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.insert", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_18insert(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self), __pyx_v_pos, __pyx_v_item); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_18insert(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_pos, PyObject *__pyx_v_item) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - Py_ssize_t __pyx_t_2; - int __pyx_t_3; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("insert", 0); - - /* "aiohttp/_frozenlist.pyx":62 - * - * def insert(self, pos, item): - * self._check_frozen() # <<<<<<<<<<<<<< - * self._items.insert(pos, item) - * - */ - __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self->__pyx_vtab)->_check_frozen(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 62, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_frozenlist.pyx":63 - * def insert(self, pos, item): - * self._check_frozen() - * self._items.insert(pos, item) # <<<<<<<<<<<<<< - * - * def __contains__(self, item): - */ - if (unlikely(__pyx_v_self->_items == Py_None)) { - PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "insert"); - __PYX_ERR(0, 63, __pyx_L1_error) - } - __pyx_t_2 = __Pyx_PyIndex_AsSsize_t(__pyx_v_pos); if (unlikely((__pyx_t_2 == (Py_ssize_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 63, __pyx_L1_error) - __pyx_t_3 = PyList_Insert(__pyx_v_self->_items, __pyx_t_2, __pyx_v_item); if (unlikely(__pyx_t_3 == ((int)-1))) __PYX_ERR(0, 63, __pyx_L1_error) - - /* "aiohttp/_frozenlist.pyx":61 - * return list(self) >= other - * - * def insert(self, pos, item): # <<<<<<<<<<<<<< - * self._check_frozen() - * self._items.insert(pos, item) - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.insert", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_frozenlist.pyx":65 - * self._items.insert(pos, item) - * - * def __contains__(self, item): # <<<<<<<<<<<<<< - * return item in self._items - * - */ - -/* Python wrapper */ -static int __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_21__contains__(PyObject *__pyx_v_self, PyObject *__pyx_v_item); /*proto*/ -static int __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_21__contains__(PyObject *__pyx_v_self, PyObject *__pyx_v_item) { - int __pyx_r; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__contains__ (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_20__contains__(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self), ((PyObject *)__pyx_v_item)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static int __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_20__contains__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_item) { - int __pyx_r; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__contains__", 0); - - /* "aiohttp/_frozenlist.pyx":66 - * - * def __contains__(self, item): - * return item in self._items # <<<<<<<<<<<<<< - * - * def __iadd__(self, items): - */ - __pyx_t_1 = (__Pyx_PySequence_ContainsTF(__pyx_v_item, __pyx_v_self->_items, Py_EQ)); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(0, 66, __pyx_L1_error) - __pyx_r = __pyx_t_1; - goto __pyx_L0; - - /* "aiohttp/_frozenlist.pyx":65 - * self._items.insert(pos, item) - * - * def __contains__(self, item): # <<<<<<<<<<<<<< - * return item in self._items - * - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.__contains__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = -1; - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_frozenlist.pyx":68 - * return item in self._items - * - * def __iadd__(self, items): # <<<<<<<<<<<<<< - * self._check_frozen() - * self._items += list(items) - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_23__iadd__(PyObject *__pyx_v_self, PyObject *__pyx_v_items); /*proto*/ -static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_23__iadd__(PyObject *__pyx_v_self, PyObject *__pyx_v_items) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__iadd__ (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_22__iadd__(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self), ((PyObject *)__pyx_v_items)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_22__iadd__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_items) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__iadd__", 0); - - /* "aiohttp/_frozenlist.pyx":69 - * - * def __iadd__(self, items): - * self._check_frozen() # <<<<<<<<<<<<<< - * self._items += list(items) - * return self - */ - __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self->__pyx_vtab)->_check_frozen(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 69, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_frozenlist.pyx":70 - * def __iadd__(self, items): - * self._check_frozen() - * self._items += list(items) # <<<<<<<<<<<<<< - * return self - * - */ - __pyx_t_1 = PySequence_List(__pyx_v_items); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 70, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyNumber_InPlaceAdd(__pyx_v_self->_items, __pyx_t_1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 70, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_GIVEREF(__pyx_t_2); - __Pyx_GOTREF(__pyx_v_self->_items); - __Pyx_DECREF(__pyx_v_self->_items); - __pyx_v_self->_items = ((PyObject*)__pyx_t_2); - __pyx_t_2 = 0; - - /* "aiohttp/_frozenlist.pyx":71 - * self._check_frozen() - * self._items += list(items) - * return self # <<<<<<<<<<<<<< - * - * def index(self, item): - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(((PyObject *)__pyx_v_self)); - __pyx_r = ((PyObject *)__pyx_v_self); - goto __pyx_L0; - - /* "aiohttp/_frozenlist.pyx":68 - * return item in self._items - * - * def __iadd__(self, items): # <<<<<<<<<<<<<< - * self._check_frozen() - * self._items += list(items) - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.__iadd__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_frozenlist.pyx":73 - * return self - * - * def index(self, item): # <<<<<<<<<<<<<< - * return self._items.index(item) - * - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_25index(PyObject *__pyx_v_self, PyObject *__pyx_v_item); /*proto*/ -static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_25index(PyObject *__pyx_v_self, PyObject *__pyx_v_item) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("index (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_24index(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self), ((PyObject *)__pyx_v_item)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_24index(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_item) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("index", 0); - - /* "aiohttp/_frozenlist.pyx":74 - * - * def index(self, item): - * return self._items.index(item) # <<<<<<<<<<<<<< - * - * def remove(self, item): - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->_items, __pyx_n_s_index); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 74, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = NULL; - if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_3)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_3); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - } - } - __pyx_t_1 = (__pyx_t_3) ? __Pyx_PyObject_Call2Args(__pyx_t_2, __pyx_t_3, __pyx_v_item) : __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_v_item); - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 74, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* "aiohttp/_frozenlist.pyx":73 - * return self - * - * def index(self, item): # <<<<<<<<<<<<<< - * return self._items.index(item) - * - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.index", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_frozenlist.pyx":76 - * return self._items.index(item) - * - * def remove(self, item): # <<<<<<<<<<<<<< - * self._check_frozen() - * self._items.remove(item) - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_27remove(PyObject *__pyx_v_self, PyObject *__pyx_v_item); /*proto*/ -static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_27remove(PyObject *__pyx_v_self, PyObject *__pyx_v_item) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("remove (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_26remove(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self), ((PyObject *)__pyx_v_item)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_26remove(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_item) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("remove", 0); - - /* "aiohttp/_frozenlist.pyx":77 - * - * def remove(self, item): - * self._check_frozen() # <<<<<<<<<<<<<< - * self._items.remove(item) - * - */ - __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self->__pyx_vtab)->_check_frozen(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 77, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_frozenlist.pyx":78 - * def remove(self, item): - * self._check_frozen() - * self._items.remove(item) # <<<<<<<<<<<<<< - * - * def clear(self): - */ - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->_items, __pyx_n_s_remove); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 78, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = NULL; - if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_3)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_3); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - } - } - __pyx_t_1 = (__pyx_t_3) ? __Pyx_PyObject_Call2Args(__pyx_t_2, __pyx_t_3, __pyx_v_item) : __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_v_item); - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 78, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_frozenlist.pyx":76 - * return self._items.index(item) - * - * def remove(self, item): # <<<<<<<<<<<<<< - * self._check_frozen() - * self._items.remove(item) - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.remove", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_frozenlist.pyx":80 - * self._items.remove(item) - * - * def clear(self): # <<<<<<<<<<<<<< - * self._check_frozen() - * self._items.clear() - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_29clear(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ -static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_29clear(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("clear (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_28clear(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_28clear(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("clear", 0); - - /* "aiohttp/_frozenlist.pyx":81 - * - * def clear(self): - * self._check_frozen() # <<<<<<<<<<<<<< - * self._items.clear() - * - */ - __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self->__pyx_vtab)->_check_frozen(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 81, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_frozenlist.pyx":82 - * def clear(self): - * self._check_frozen() - * self._items.clear() # <<<<<<<<<<<<<< - * - * def extend(self, items): - */ - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->_items, __pyx_n_s_clear); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 82, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = NULL; - if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_3)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_3); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - } - } - __pyx_t_1 = (__pyx_t_3) ? __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_3) : __Pyx_PyObject_CallNoArg(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 82, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_frozenlist.pyx":80 - * self._items.remove(item) - * - * def clear(self): # <<<<<<<<<<<<<< - * self._check_frozen() - * self._items.clear() - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.clear", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_frozenlist.pyx":84 - * self._items.clear() - * - * def extend(self, items): # <<<<<<<<<<<<<< - * self._check_frozen() - * self._items += list(items) - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_31extend(PyObject *__pyx_v_self, PyObject *__pyx_v_items); /*proto*/ -static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_31extend(PyObject *__pyx_v_self, PyObject *__pyx_v_items) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("extend (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_30extend(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self), ((PyObject *)__pyx_v_items)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_30extend(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_items) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("extend", 0); - - /* "aiohttp/_frozenlist.pyx":85 - * - * def extend(self, items): - * self._check_frozen() # <<<<<<<<<<<<<< - * self._items += list(items) - * - */ - __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self->__pyx_vtab)->_check_frozen(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 85, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_frozenlist.pyx":86 - * def extend(self, items): - * self._check_frozen() - * self._items += list(items) # <<<<<<<<<<<<<< - * - * def reverse(self): - */ - __pyx_t_1 = PySequence_List(__pyx_v_items); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 86, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyNumber_InPlaceAdd(__pyx_v_self->_items, __pyx_t_1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 86, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_GIVEREF(__pyx_t_2); - __Pyx_GOTREF(__pyx_v_self->_items); - __Pyx_DECREF(__pyx_v_self->_items); - __pyx_v_self->_items = ((PyObject*)__pyx_t_2); - __pyx_t_2 = 0; - - /* "aiohttp/_frozenlist.pyx":84 - * self._items.clear() - * - * def extend(self, items): # <<<<<<<<<<<<<< - * self._check_frozen() - * self._items += list(items) - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.extend", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_frozenlist.pyx":88 - * self._items += list(items) - * - * def reverse(self): # <<<<<<<<<<<<<< - * self._check_frozen() - * self._items.reverse() - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_33reverse(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ -static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_33reverse(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("reverse (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_32reverse(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_32reverse(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_t_2; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("reverse", 0); - - /* "aiohttp/_frozenlist.pyx":89 - * - * def reverse(self): - * self._check_frozen() # <<<<<<<<<<<<<< - * self._items.reverse() - * - */ - __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self->__pyx_vtab)->_check_frozen(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 89, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_frozenlist.pyx":90 - * def reverse(self): - * self._check_frozen() - * self._items.reverse() # <<<<<<<<<<<<<< - * - * def pop(self, index=-1): - */ - if (unlikely(__pyx_v_self->_items == Py_None)) { - PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "reverse"); - __PYX_ERR(0, 90, __pyx_L1_error) - } - __pyx_t_2 = PyList_Reverse(__pyx_v_self->_items); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 90, __pyx_L1_error) - - /* "aiohttp/_frozenlist.pyx":88 - * self._items += list(items) - * - * def reverse(self): # <<<<<<<<<<<<<< - * self._check_frozen() - * self._items.reverse() - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.reverse", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_frozenlist.pyx":92 - * self._items.reverse() - * - * def pop(self, index=-1): # <<<<<<<<<<<<<< - * self._check_frozen() - * return self._items.pop(index) - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_35pop(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ -static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_35pop(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { - PyObject *__pyx_v_index = 0; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("pop (wrapper)", 0); - { - static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_index,0}; - PyObject* values[1] = {0}; - values[0] = ((PyObject *)__pyx_int_neg_1); - if (unlikely(__pyx_kwds)) { - Py_ssize_t kw_args; - const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); - switch (pos_args) { - case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = PyDict_Size(__pyx_kwds); - switch (pos_args) { - case 0: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_index); - if (value) { values[0] = value; kw_args--; } - } - } - if (unlikely(kw_args > 0)) { - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "pop") < 0)) __PYX_ERR(0, 92, __pyx_L3_error) - } - } else { - switch (PyTuple_GET_SIZE(__pyx_args)) { - case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - } - __pyx_v_index = values[0]; - } - goto __pyx_L4_argument_unpacking_done; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("pop", 0, 0, 1, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 92, __pyx_L3_error) - __pyx_L3_error:; - __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.pop", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_34pop(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self), __pyx_v_index); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_34pop(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_index) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - Py_ssize_t __pyx_t_2; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("pop", 0); - - /* "aiohttp/_frozenlist.pyx":93 - * - * def pop(self, index=-1): - * self._check_frozen() # <<<<<<<<<<<<<< - * return self._items.pop(index) - * - */ - __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self->__pyx_vtab)->_check_frozen(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 93, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_frozenlist.pyx":94 - * def pop(self, index=-1): - * self._check_frozen() - * return self._items.pop(index) # <<<<<<<<<<<<<< - * - * def append(self, item): - */ - __Pyx_XDECREF(__pyx_r); - if (unlikely(__pyx_v_self->_items == Py_None)) { - PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "pop"); - __PYX_ERR(0, 94, __pyx_L1_error) - } - __pyx_t_2 = __Pyx_PyIndex_AsSsize_t(__pyx_v_index); if (unlikely((__pyx_t_2 == (Py_ssize_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 94, __pyx_L1_error) - __pyx_t_1 = __Pyx_PyList_PopIndex(__pyx_v_self->_items, __pyx_v_index, __pyx_t_2, 1, Py_ssize_t, PyInt_FromSsize_t); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 94, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* "aiohttp/_frozenlist.pyx":92 - * self._items.reverse() - * - * def pop(self, index=-1): # <<<<<<<<<<<<<< - * self._check_frozen() - * return self._items.pop(index) - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.pop", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_frozenlist.pyx":96 - * return self._items.pop(index) - * - * def append(self, item): # <<<<<<<<<<<<<< - * self._check_frozen() - * return self._items.append(item) - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_37append(PyObject *__pyx_v_self, PyObject *__pyx_v_item); /*proto*/ -static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_37append(PyObject *__pyx_v_self, PyObject *__pyx_v_item) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("append (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_36append(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self), ((PyObject *)__pyx_v_item)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_36append(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_item) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_t_2; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("append", 0); - - /* "aiohttp/_frozenlist.pyx":97 - * - * def append(self, item): - * self._check_frozen() # <<<<<<<<<<<<<< - * return self._items.append(item) - * - */ - __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self->__pyx_vtab)->_check_frozen(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 97, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_frozenlist.pyx":98 - * def append(self, item): - * self._check_frozen() - * return self._items.append(item) # <<<<<<<<<<<<<< - * - * def count(self, item): - */ - __Pyx_XDECREF(__pyx_r); - if (unlikely(__pyx_v_self->_items == Py_None)) { - PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "append"); - __PYX_ERR(0, 98, __pyx_L1_error) - } - __pyx_t_2 = __Pyx_PyList_Append(__pyx_v_self->_items, __pyx_v_item); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 98, __pyx_L1_error) - __pyx_t_1 = __Pyx_Owned_Py_None(__pyx_t_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 98, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* "aiohttp/_frozenlist.pyx":96 - * return self._items.pop(index) - * - * def append(self, item): # <<<<<<<<<<<<<< - * self._check_frozen() - * return self._items.append(item) - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.append", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_frozenlist.pyx":100 - * return self._items.append(item) - * - * def count(self, item): # <<<<<<<<<<<<<< - * return self._items.count(item) - * - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_39count(PyObject *__pyx_v_self, PyObject *__pyx_v_item); /*proto*/ -static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_39count(PyObject *__pyx_v_self, PyObject *__pyx_v_item) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("count (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_38count(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self), ((PyObject *)__pyx_v_item)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_38count(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_item) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("count", 0); - - /* "aiohttp/_frozenlist.pyx":101 - * - * def count(self, item): - * return self._items.count(item) # <<<<<<<<<<<<<< - * - * def __repr__(self): - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->_items, __pyx_n_s_count); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 101, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = NULL; - if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_3)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_3); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - } - } - __pyx_t_1 = (__pyx_t_3) ? __Pyx_PyObject_Call2Args(__pyx_t_2, __pyx_t_3, __pyx_v_item) : __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_v_item); - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 101, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* "aiohttp/_frozenlist.pyx":100 - * return self._items.append(item) - * - * def count(self, item): # <<<<<<<<<<<<<< - * return self._items.count(item) - * - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.count", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_frozenlist.pyx":103 - * return self._items.count(item) - * - * def __repr__(self): # <<<<<<<<<<<<<< - * return ''.format(self.frozen, - * self._items) - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_41__repr__(PyObject *__pyx_v_self); /*proto*/ -static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_41__repr__(PyObject *__pyx_v_self) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__repr__ (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_40__repr__(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_40__repr__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - int __pyx_t_5; - PyObject *__pyx_t_6 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__repr__", 0); - - /* "aiohttp/_frozenlist.pyx":104 - * - * def __repr__(self): - * return ''.format(self.frozen, # <<<<<<<<<<<<<< - * self._items) - * - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_kp_u_FrozenList_frozen_r, __pyx_n_s_format); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 104, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = __Pyx_PyBool_FromLong(__pyx_v_self->frozen); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 104, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - - /* "aiohttp/_frozenlist.pyx":105 - * def __repr__(self): - * return ''.format(self.frozen, - * self._items) # <<<<<<<<<<<<<< - * - * - */ - __pyx_t_4 = NULL; - __pyx_t_5 = 0; - if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_4)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_4); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - __pyx_t_5 = 1; - } - } - #if CYTHON_FAST_PYCALL - if (PyFunction_Check(__pyx_t_2)) { - PyObject *__pyx_temp[3] = {__pyx_t_4, __pyx_t_3, __pyx_v_self->_items}; - __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_2, __pyx_temp+1-__pyx_t_5, 2+__pyx_t_5); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 104, __pyx_L1_error) - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - } else - #endif - #if CYTHON_FAST_PYCCALL - if (__Pyx_PyFastCFunction_Check(__pyx_t_2)) { - PyObject *__pyx_temp[3] = {__pyx_t_4, __pyx_t_3, __pyx_v_self->_items}; - __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_2, __pyx_temp+1-__pyx_t_5, 2+__pyx_t_5); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 104, __pyx_L1_error) - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - } else - #endif - { - __pyx_t_6 = PyTuple_New(2+__pyx_t_5); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 104, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_6); - if (__pyx_t_4) { - __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_4); __pyx_t_4 = NULL; - } - __Pyx_GIVEREF(__pyx_t_3); - PyTuple_SET_ITEM(__pyx_t_6, 0+__pyx_t_5, __pyx_t_3); - __Pyx_INCREF(__pyx_v_self->_items); - __Pyx_GIVEREF(__pyx_v_self->_items); - PyTuple_SET_ITEM(__pyx_t_6, 1+__pyx_t_5, __pyx_v_self->_items); - __pyx_t_3 = 0; - __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_t_6, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 104, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - } - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* "aiohttp/_frozenlist.pyx":103 - * return self._items.count(item) - * - * def __repr__(self): # <<<<<<<<<<<<<< - * return ''.format(self.frozen, - * self._items) - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_XDECREF(__pyx_t_6); - __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.__repr__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_frozenlist.pyx":6 - * cdef class FrozenList: - * - * cdef readonly bint frozen # <<<<<<<<<<<<<< - * cdef list _items - * - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_6frozen_1__get__(PyObject *__pyx_v_self); /*proto*/ -static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_6frozen_1__get__(PyObject *__pyx_v_self) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_6frozen___get__(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_6frozen___get__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__get__", 0); - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_self->frozen); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 6, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.frozen.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "(tree fragment)":1 - * def __reduce_cython__(self): # <<<<<<<<<<<<<< - * cdef tuple state - * cdef object _dict - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_43__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ -static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_43__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_42__reduce_cython__(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_42__reduce_cython__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self) { - PyObject *__pyx_v_state = 0; - PyObject *__pyx_v__dict = 0; - int __pyx_v_use_setstate; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - int __pyx_t_3; - int __pyx_t_4; - PyObject *__pyx_t_5 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__reduce_cython__", 0); - - /* "(tree fragment)":5 - * cdef object _dict - * cdef bint use_setstate - * state = (self._items, self.frozen) # <<<<<<<<<<<<<< - * _dict = getattr(self, '__dict__', None) - * if _dict is not None: - */ - __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_self->frozen); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 5, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyTuple_New(2); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 5, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(__pyx_v_self->_items); - __Pyx_GIVEREF(__pyx_v_self->_items); - PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_v_self->_items); - __Pyx_GIVEREF(__pyx_t_1); - PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_t_1); - __pyx_t_1 = 0; - __pyx_v_state = ((PyObject*)__pyx_t_2); - __pyx_t_2 = 0; - - /* "(tree fragment)":6 - * cdef bint use_setstate - * state = (self._items, self.frozen) - * _dict = getattr(self, '__dict__', None) # <<<<<<<<<<<<<< - * if _dict is not None: - * state += (_dict,) - */ - __pyx_t_2 = __Pyx_GetAttr3(((PyObject *)__pyx_v_self), __pyx_n_s_dict, Py_None); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 6, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_v__dict = __pyx_t_2; - __pyx_t_2 = 0; - - /* "(tree fragment)":7 - * state = (self._items, self.frozen) - * _dict = getattr(self, '__dict__', None) - * if _dict is not None: # <<<<<<<<<<<<<< - * state += (_dict,) - * use_setstate = True - */ - __pyx_t_3 = (__pyx_v__dict != Py_None); - __pyx_t_4 = (__pyx_t_3 != 0); - if (__pyx_t_4) { - - /* "(tree fragment)":8 - * _dict = getattr(self, '__dict__', None) - * if _dict is not None: - * state += (_dict,) # <<<<<<<<<<<<<< - * use_setstate = True - * else: - */ - __pyx_t_2 = PyTuple_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 8, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(__pyx_v__dict); - __Pyx_GIVEREF(__pyx_v__dict); - PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_v__dict); - __pyx_t_1 = PyNumber_InPlaceAdd(__pyx_v_state, __pyx_t_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 8, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF_SET(__pyx_v_state, ((PyObject*)__pyx_t_1)); - __pyx_t_1 = 0; - - /* "(tree fragment)":9 - * if _dict is not None: - * state += (_dict,) - * use_setstate = True # <<<<<<<<<<<<<< - * else: - * use_setstate = self._items is not None - */ - __pyx_v_use_setstate = 1; - - /* "(tree fragment)":7 - * state = (self._items, self.frozen) - * _dict = getattr(self, '__dict__', None) - * if _dict is not None: # <<<<<<<<<<<<<< - * state += (_dict,) - * use_setstate = True - */ - goto __pyx_L3; - } - - /* "(tree fragment)":11 - * use_setstate = True - * else: - * use_setstate = self._items is not None # <<<<<<<<<<<<<< - * if use_setstate: - * return __pyx_unpickle_FrozenList, (type(self), 0x949a143, None), state - */ - /*else*/ { - __pyx_t_4 = (__pyx_v_self->_items != ((PyObject*)Py_None)); - __pyx_v_use_setstate = __pyx_t_4; - } - __pyx_L3:; - - /* "(tree fragment)":12 - * else: - * use_setstate = self._items is not None - * if use_setstate: # <<<<<<<<<<<<<< - * return __pyx_unpickle_FrozenList, (type(self), 0x949a143, None), state - * else: - */ - __pyx_t_4 = (__pyx_v_use_setstate != 0); - if (__pyx_t_4) { - - /* "(tree fragment)":13 - * use_setstate = self._items is not None - * if use_setstate: - * return __pyx_unpickle_FrozenList, (type(self), 0x949a143, None), state # <<<<<<<<<<<<<< - * else: - * return __pyx_unpickle_FrozenList, (type(self), 0x949a143, state) - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_pyx_unpickle_FrozenList); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyTuple_New(3); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); - __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); - PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); - __Pyx_INCREF(__pyx_int_155820355); - __Pyx_GIVEREF(__pyx_int_155820355); - PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_int_155820355); - __Pyx_INCREF(Py_None); - __Pyx_GIVEREF(Py_None); - PyTuple_SET_ITEM(__pyx_t_2, 2, Py_None); - __pyx_t_5 = PyTuple_New(3); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_5); - __Pyx_GIVEREF(__pyx_t_1); - PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_1); - __Pyx_GIVEREF(__pyx_t_2); - PyTuple_SET_ITEM(__pyx_t_5, 1, __pyx_t_2); - __Pyx_INCREF(__pyx_v_state); - __Pyx_GIVEREF(__pyx_v_state); - PyTuple_SET_ITEM(__pyx_t_5, 2, __pyx_v_state); - __pyx_t_1 = 0; - __pyx_t_2 = 0; - __pyx_r = __pyx_t_5; - __pyx_t_5 = 0; - goto __pyx_L0; - - /* "(tree fragment)":12 - * else: - * use_setstate = self._items is not None - * if use_setstate: # <<<<<<<<<<<<<< - * return __pyx_unpickle_FrozenList, (type(self), 0x949a143, None), state - * else: - */ - } - - /* "(tree fragment)":15 - * return __pyx_unpickle_FrozenList, (type(self), 0x949a143, None), state - * else: - * return __pyx_unpickle_FrozenList, (type(self), 0x949a143, state) # <<<<<<<<<<<<<< - * def __setstate_cython__(self, __pyx_state): - * __pyx_unpickle_FrozenList__set_state(self, __pyx_state) - */ - /*else*/ { - __Pyx_XDECREF(__pyx_r); - __Pyx_GetModuleGlobalName(__pyx_t_5, __pyx_n_s_pyx_unpickle_FrozenList); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_5); - __pyx_t_2 = PyTuple_New(3); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); - __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); - PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); - __Pyx_INCREF(__pyx_int_155820355); - __Pyx_GIVEREF(__pyx_int_155820355); - PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_int_155820355); - __Pyx_INCREF(__pyx_v_state); - __Pyx_GIVEREF(__pyx_v_state); - PyTuple_SET_ITEM(__pyx_t_2, 2, __pyx_v_state); - __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_GIVEREF(__pyx_t_5); - PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_t_5); - __Pyx_GIVEREF(__pyx_t_2); - PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_t_2); - __pyx_t_5 = 0; - __pyx_t_2 = 0; - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - } - - /* "(tree fragment)":1 - * def __reduce_cython__(self): # <<<<<<<<<<<<<< - * cdef tuple state - * cdef object _dict - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_5); - __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v_state); - __Pyx_XDECREF(__pyx_v__dict); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "(tree fragment)":16 - * else: - * return __pyx_unpickle_FrozenList, (type(self), 0x949a143, state) - * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< - * __pyx_unpickle_FrozenList__set_state(self, __pyx_state) - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_45__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state); /*proto*/ -static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_45__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_44__setstate_cython__(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self), ((PyObject *)__pyx_v___pyx_state)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_44__setstate_cython__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v___pyx_state) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__setstate_cython__", 0); - - /* "(tree fragment)":17 - * return __pyx_unpickle_FrozenList, (type(self), 0x949a143, state) - * def __setstate_cython__(self, __pyx_state): - * __pyx_unpickle_FrozenList__set_state(self, __pyx_state) # <<<<<<<<<<<<<< - */ - if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v___pyx_state)->tp_name), 0))) __PYX_ERR(1, 17, __pyx_L1_error) - __pyx_t_1 = __pyx_f_7aiohttp_11_frozenlist___pyx_unpickle_FrozenList__set_state(__pyx_v_self, ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 17, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "(tree fragment)":16 - * else: - * return __pyx_unpickle_FrozenList, (type(self), 0x949a143, state) - * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< - * __pyx_unpickle_FrozenList__set_state(self, __pyx_state) - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "(tree fragment)":1 - * def __pyx_unpickle_FrozenList(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< - * cdef object __pyx_PickleError - * cdef object __pyx_result - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_11_frozenlist_1__pyx_unpickle_FrozenList(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ -static PyMethodDef __pyx_mdef_7aiohttp_11_frozenlist_1__pyx_unpickle_FrozenList = {"__pyx_unpickle_FrozenList", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_7aiohttp_11_frozenlist_1__pyx_unpickle_FrozenList, METH_VARARGS|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_7aiohttp_11_frozenlist_1__pyx_unpickle_FrozenList(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { - PyObject *__pyx_v___pyx_type = 0; - long __pyx_v___pyx_checksum; - PyObject *__pyx_v___pyx_state = 0; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__pyx_unpickle_FrozenList (wrapper)", 0); - { - static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_pyx_type,&__pyx_n_s_pyx_checksum,&__pyx_n_s_pyx_state,0}; - PyObject* values[3] = {0,0,0}; - if (unlikely(__pyx_kwds)) { - Py_ssize_t kw_args; - const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); - switch (pos_args) { - case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); - CYTHON_FALLTHROUGH; - case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); - CYTHON_FALLTHROUGH; - case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = PyDict_Size(__pyx_kwds); - switch (pos_args) { - case 0: - if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_type)) != 0)) kw_args--; - else goto __pyx_L5_argtuple_error; - CYTHON_FALLTHROUGH; - case 1: - if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_checksum)) != 0)) kw_args--; - else { - __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_FrozenList", 1, 3, 3, 1); __PYX_ERR(1, 1, __pyx_L3_error) - } - CYTHON_FALLTHROUGH; - case 2: - if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_state)) != 0)) kw_args--; - else { - __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_FrozenList", 1, 3, 3, 2); __PYX_ERR(1, 1, __pyx_L3_error) - } - } - if (unlikely(kw_args > 0)) { - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__pyx_unpickle_FrozenList") < 0)) __PYX_ERR(1, 1, __pyx_L3_error) - } - } else if (PyTuple_GET_SIZE(__pyx_args) != 3) { - goto __pyx_L5_argtuple_error; - } else { - values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - values[1] = PyTuple_GET_ITEM(__pyx_args, 1); - values[2] = PyTuple_GET_ITEM(__pyx_args, 2); - } - __pyx_v___pyx_type = values[0]; - __pyx_v___pyx_checksum = __Pyx_PyInt_As_long(values[1]); if (unlikely((__pyx_v___pyx_checksum == (long)-1) && PyErr_Occurred())) __PYX_ERR(1, 1, __pyx_L3_error) - __pyx_v___pyx_state = values[2]; - } - goto __pyx_L4_argument_unpacking_done; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_FrozenList", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(1, 1, __pyx_L3_error) - __pyx_L3_error:; - __Pyx_AddTraceback("aiohttp._frozenlist.__pyx_unpickle_FrozenList", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_7aiohttp_11_frozenlist___pyx_unpickle_FrozenList(__pyx_self, __pyx_v___pyx_type, __pyx_v___pyx_checksum, __pyx_v___pyx_state); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_11_frozenlist___pyx_unpickle_FrozenList(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state) { - PyObject *__pyx_v___pyx_PickleError = 0; - PyObject *__pyx_v___pyx_result = 0; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - PyObject *__pyx_t_5 = NULL; - int __pyx_t_6; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__pyx_unpickle_FrozenList", 0); - - /* "(tree fragment)":4 - * cdef object __pyx_PickleError - * cdef object __pyx_result - * if __pyx_checksum != 0x949a143: # <<<<<<<<<<<<<< - * from pickle import PickleError as __pyx_PickleError - * raise __pyx_PickleError("Incompatible checksums (%s vs 0x949a143 = (_items, frozen))" % __pyx_checksum) - */ - __pyx_t_1 = ((__pyx_v___pyx_checksum != 0x949a143) != 0); - if (__pyx_t_1) { - - /* "(tree fragment)":5 - * cdef object __pyx_result - * if __pyx_checksum != 0x949a143: - * from pickle import PickleError as __pyx_PickleError # <<<<<<<<<<<<<< - * raise __pyx_PickleError("Incompatible checksums (%s vs 0x949a143 = (_items, frozen))" % __pyx_checksum) - * __pyx_result = FrozenList.__new__(__pyx_type) - */ - __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 5, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(__pyx_n_s_PickleError); - __Pyx_GIVEREF(__pyx_n_s_PickleError); - PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_PickleError); - __pyx_t_3 = __Pyx_Import(__pyx_n_s_pickle, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 5, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_PickleError); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 5, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(__pyx_t_2); - __pyx_v___pyx_PickleError = __pyx_t_2; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "(tree fragment)":6 - * if __pyx_checksum != 0x949a143: - * from pickle import PickleError as __pyx_PickleError - * raise __pyx_PickleError("Incompatible checksums (%s vs 0x949a143 = (_items, frozen))" % __pyx_checksum) # <<<<<<<<<<<<<< - * __pyx_result = FrozenList.__new__(__pyx_type) - * if __pyx_state is not None: - */ - __pyx_t_2 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 6, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_4 = __Pyx_PyString_Format(__pyx_kp_s_Incompatible_checksums_s_vs_0x94, __pyx_t_2); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 6, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_INCREF(__pyx_v___pyx_PickleError); - __pyx_t_2 = __pyx_v___pyx_PickleError; __pyx_t_5 = NULL; - if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_5)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_5); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - } - } - __pyx_t_3 = (__pyx_t_5) ? __Pyx_PyObject_Call2Args(__pyx_t_2, __pyx_t_5, __pyx_t_4) : __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_4); - __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 6, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_Raise(__pyx_t_3, 0, 0, 0); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __PYX_ERR(1, 6, __pyx_L1_error) - - /* "(tree fragment)":4 - * cdef object __pyx_PickleError - * cdef object __pyx_result - * if __pyx_checksum != 0x949a143: # <<<<<<<<<<<<<< - * from pickle import PickleError as __pyx_PickleError - * raise __pyx_PickleError("Incompatible checksums (%s vs 0x949a143 = (_items, frozen))" % __pyx_checksum) - */ - } - - /* "(tree fragment)":7 - * from pickle import PickleError as __pyx_PickleError - * raise __pyx_PickleError("Incompatible checksums (%s vs 0x949a143 = (_items, frozen))" % __pyx_checksum) - * __pyx_result = FrozenList.__new__(__pyx_type) # <<<<<<<<<<<<<< - * if __pyx_state is not None: - * __pyx_unpickle_FrozenList__set_state( __pyx_result, __pyx_state) - */ - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_ptype_7aiohttp_11_frozenlist_FrozenList), __pyx_n_s_new); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 7, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_4 = NULL; - if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_4)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_4); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - } - } - __pyx_t_3 = (__pyx_t_4) ? __Pyx_PyObject_Call2Args(__pyx_t_2, __pyx_t_4, __pyx_v___pyx_type) : __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_v___pyx_type); - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 7, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_v___pyx_result = __pyx_t_3; - __pyx_t_3 = 0; - - /* "(tree fragment)":8 - * raise __pyx_PickleError("Incompatible checksums (%s vs 0x949a143 = (_items, frozen))" % __pyx_checksum) - * __pyx_result = FrozenList.__new__(__pyx_type) - * if __pyx_state is not None: # <<<<<<<<<<<<<< - * __pyx_unpickle_FrozenList__set_state( __pyx_result, __pyx_state) - * return __pyx_result - */ - __pyx_t_1 = (__pyx_v___pyx_state != Py_None); - __pyx_t_6 = (__pyx_t_1 != 0); - if (__pyx_t_6) { - - /* "(tree fragment)":9 - * __pyx_result = FrozenList.__new__(__pyx_type) - * if __pyx_state is not None: - * __pyx_unpickle_FrozenList__set_state( __pyx_result, __pyx_state) # <<<<<<<<<<<<<< - * return __pyx_result - * cdef __pyx_unpickle_FrozenList__set_state(FrozenList __pyx_result, tuple __pyx_state): - */ - if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v___pyx_state)->tp_name), 0))) __PYX_ERR(1, 9, __pyx_L1_error) - __pyx_t_3 = __pyx_f_7aiohttp_11_frozenlist___pyx_unpickle_FrozenList__set_state(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v___pyx_result), ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 9, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "(tree fragment)":8 - * raise __pyx_PickleError("Incompatible checksums (%s vs 0x949a143 = (_items, frozen))" % __pyx_checksum) - * __pyx_result = FrozenList.__new__(__pyx_type) - * if __pyx_state is not None: # <<<<<<<<<<<<<< - * __pyx_unpickle_FrozenList__set_state( __pyx_result, __pyx_state) - * return __pyx_result - */ - } - - /* "(tree fragment)":10 - * if __pyx_state is not None: - * __pyx_unpickle_FrozenList__set_state( __pyx_result, __pyx_state) - * return __pyx_result # <<<<<<<<<<<<<< - * cdef __pyx_unpickle_FrozenList__set_state(FrozenList __pyx_result, tuple __pyx_state): - * __pyx_result._items = __pyx_state[0]; __pyx_result.frozen = __pyx_state[1] - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v___pyx_result); - __pyx_r = __pyx_v___pyx_result; - goto __pyx_L0; - - /* "(tree fragment)":1 - * def __pyx_unpickle_FrozenList(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< - * cdef object __pyx_PickleError - * cdef object __pyx_result - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_XDECREF(__pyx_t_5); - __Pyx_AddTraceback("aiohttp._frozenlist.__pyx_unpickle_FrozenList", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v___pyx_PickleError); - __Pyx_XDECREF(__pyx_v___pyx_result); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "(tree fragment)":11 - * __pyx_unpickle_FrozenList__set_state( __pyx_result, __pyx_state) - * return __pyx_result - * cdef __pyx_unpickle_FrozenList__set_state(FrozenList __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< - * __pyx_result._items = __pyx_state[0]; __pyx_result.frozen = __pyx_state[1] - * if len(__pyx_state) > 2 and hasattr(__pyx_result, '__dict__'): - */ - -static PyObject *__pyx_f_7aiohttp_11_frozenlist___pyx_unpickle_FrozenList__set_state(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v___pyx_result, PyObject *__pyx_v___pyx_state) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_t_2; - Py_ssize_t __pyx_t_3; - int __pyx_t_4; - int __pyx_t_5; - PyObject *__pyx_t_6 = NULL; - PyObject *__pyx_t_7 = NULL; - PyObject *__pyx_t_8 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__pyx_unpickle_FrozenList__set_state", 0); - - /* "(tree fragment)":12 - * return __pyx_result - * cdef __pyx_unpickle_FrozenList__set_state(FrozenList __pyx_result, tuple __pyx_state): - * __pyx_result._items = __pyx_state[0]; __pyx_result.frozen = __pyx_state[1] # <<<<<<<<<<<<<< - * if len(__pyx_state) > 2 and hasattr(__pyx_result, '__dict__'): - * __pyx_result.__dict__.update(__pyx_state[2]) - */ - if (unlikely(__pyx_v___pyx_state == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(1, 12, __pyx_L1_error) - } - __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (!(likely(PyList_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "list", Py_TYPE(__pyx_t_1)->tp_name), 0))) __PYX_ERR(1, 12, __pyx_L1_error) - __Pyx_GIVEREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_v___pyx_result->_items); - __Pyx_DECREF(__pyx_v___pyx_result->_items); - __pyx_v___pyx_result->_items = ((PyObject*)__pyx_t_1); - __pyx_t_1 = 0; - if (unlikely(__pyx_v___pyx_state == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(1, 12, __pyx_L1_error) - } - __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(1, 12, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_v___pyx_result->frozen = __pyx_t_2; - - /* "(tree fragment)":13 - * cdef __pyx_unpickle_FrozenList__set_state(FrozenList __pyx_result, tuple __pyx_state): - * __pyx_result._items = __pyx_state[0]; __pyx_result.frozen = __pyx_state[1] - * if len(__pyx_state) > 2 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< - * __pyx_result.__dict__.update(__pyx_state[2]) - */ - if (unlikely(__pyx_v___pyx_state == Py_None)) { - PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); - __PYX_ERR(1, 13, __pyx_L1_error) - } - __pyx_t_3 = PyTuple_GET_SIZE(__pyx_v___pyx_state); if (unlikely(__pyx_t_3 == ((Py_ssize_t)-1))) __PYX_ERR(1, 13, __pyx_L1_error) - __pyx_t_4 = ((__pyx_t_3 > 2) != 0); - if (__pyx_t_4) { - } else { - __pyx_t_2 = __pyx_t_4; - goto __pyx_L4_bool_binop_done; - } - __pyx_t_4 = __Pyx_HasAttr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(1, 13, __pyx_L1_error) - __pyx_t_5 = (__pyx_t_4 != 0); - __pyx_t_2 = __pyx_t_5; - __pyx_L4_bool_binop_done:; - if (__pyx_t_2) { - - /* "(tree fragment)":14 - * __pyx_result._items = __pyx_state[0]; __pyx_result.frozen = __pyx_state[1] - * if len(__pyx_state) > 2 and hasattr(__pyx_result, '__dict__'): - * __pyx_result.__dict__.update(__pyx_state[2]) # <<<<<<<<<<<<<< - */ - __pyx_t_6 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 14, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_6); - __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_t_6, __pyx_n_s_update); if (unlikely(!__pyx_t_7)) __PYX_ERR(1, 14, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_7); - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - if (unlikely(__pyx_v___pyx_state == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(1, 14, __pyx_L1_error) - } - __pyx_t_6 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 2, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 14, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_6); - __pyx_t_8 = NULL; - if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_7))) { - __pyx_t_8 = PyMethod_GET_SELF(__pyx_t_7); - if (likely(__pyx_t_8)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7); - __Pyx_INCREF(__pyx_t_8); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_7, function); - } - } - __pyx_t_1 = (__pyx_t_8) ? __Pyx_PyObject_Call2Args(__pyx_t_7, __pyx_t_8, __pyx_t_6) : __Pyx_PyObject_CallOneArg(__pyx_t_7, __pyx_t_6); - __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 14, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "(tree fragment)":13 - * cdef __pyx_unpickle_FrozenList__set_state(FrozenList __pyx_result, tuple __pyx_state): - * __pyx_result._items = __pyx_state[0]; __pyx_result.frozen = __pyx_state[1] - * if len(__pyx_state) > 2 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< - * __pyx_result.__dict__.update(__pyx_state[2]) - */ - } - - /* "(tree fragment)":11 - * __pyx_unpickle_FrozenList__set_state( __pyx_result, __pyx_state) - * return __pyx_result - * cdef __pyx_unpickle_FrozenList__set_state(FrozenList __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< - * __pyx_result._items = __pyx_state[0]; __pyx_result.frozen = __pyx_state[1] - * if len(__pyx_state) > 2 and hasattr(__pyx_result, '__dict__'): - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_6); - __Pyx_XDECREF(__pyx_t_7); - __Pyx_XDECREF(__pyx_t_8); - __Pyx_AddTraceback("aiohttp._frozenlist.__pyx_unpickle_FrozenList__set_state", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} -static struct __pyx_vtabstruct_7aiohttp_11_frozenlist_FrozenList __pyx_vtable_7aiohttp_11_frozenlist_FrozenList; - -static PyObject *__pyx_tp_new_7aiohttp_11_frozenlist_FrozenList(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { - struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *p; - PyObject *o; - if (likely((t->tp_flags & Py_TPFLAGS_IS_ABSTRACT) == 0)) { - o = (*t->tp_alloc)(t, 0); - } else { - o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0); - } - if (unlikely(!o)) return 0; - p = ((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)o); - p->__pyx_vtab = __pyx_vtabptr_7aiohttp_11_frozenlist_FrozenList; - p->_items = ((PyObject*)Py_None); Py_INCREF(Py_None); - return o; -} - -static void __pyx_tp_dealloc_7aiohttp_11_frozenlist_FrozenList(PyObject *o) { - struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *p = (struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)o; - #if CYTHON_USE_TP_FINALIZE - if (unlikely(PyType_HasFeature(Py_TYPE(o), Py_TPFLAGS_HAVE_FINALIZE) && Py_TYPE(o)->tp_finalize) && !_PyGC_FINALIZED(o)) { - if (PyObject_CallFinalizerFromDealloc(o)) return; - } - #endif - PyObject_GC_UnTrack(o); - Py_CLEAR(p->_items); - (*Py_TYPE(o)->tp_free)(o); -} - -static int __pyx_tp_traverse_7aiohttp_11_frozenlist_FrozenList(PyObject *o, visitproc v, void *a) { - int e; - struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *p = (struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)o; - if (p->_items) { - e = (*v)(p->_items, a); if (e) return e; - } - return 0; -} - -static int __pyx_tp_clear_7aiohttp_11_frozenlist_FrozenList(PyObject *o) { - PyObject* tmp; - struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *p = (struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)o; - tmp = ((PyObject*)p->_items); - p->_items = ((PyObject*)Py_None); Py_INCREF(Py_None); - Py_XDECREF(tmp); - return 0; -} -static PyObject *__pyx_sq_item_7aiohttp_11_frozenlist_FrozenList(PyObject *o, Py_ssize_t i) { - PyObject *r; - PyObject *x = PyInt_FromSsize_t(i); if(!x) return 0; - r = Py_TYPE(o)->tp_as_mapping->mp_subscript(o, x); - Py_DECREF(x); - return r; -} - -static int __pyx_mp_ass_subscript_7aiohttp_11_frozenlist_FrozenList(PyObject *o, PyObject *i, PyObject *v) { - if (v) { - return __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_7__setitem__(o, i, v); - } - else { - return __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_9__delitem__(o, i); - } -} - -static PyObject *__pyx_getprop_7aiohttp_11_frozenlist_10FrozenList_frozen(PyObject *o, CYTHON_UNUSED void *x) { - return __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_6frozen_1__get__(o); -} - -static PyMethodDef __pyx_methods_7aiohttp_11_frozenlist_FrozenList[] = { - {"freeze", (PyCFunction)__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_3freeze, METH_NOARGS, 0}, - {"__reversed__", (PyCFunction)__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_15__reversed__, METH_NOARGS, 0}, - {"insert", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_19insert, METH_VARARGS|METH_KEYWORDS, 0}, - {"index", (PyCFunction)__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_25index, METH_O, 0}, - {"remove", (PyCFunction)__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_27remove, METH_O, 0}, - {"clear", (PyCFunction)__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_29clear, METH_NOARGS, 0}, - {"extend", (PyCFunction)__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_31extend, METH_O, 0}, - {"reverse", (PyCFunction)__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_33reverse, METH_NOARGS, 0}, - {"pop", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_35pop, METH_VARARGS|METH_KEYWORDS, 0}, - {"append", (PyCFunction)__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_37append, METH_O, 0}, - {"count", (PyCFunction)__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_39count, METH_O, 0}, - {"__reduce_cython__", (PyCFunction)__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_43__reduce_cython__, METH_NOARGS, 0}, - {"__setstate_cython__", (PyCFunction)__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_45__setstate_cython__, METH_O, 0}, - {0, 0, 0, 0} -}; - -static struct PyGetSetDef __pyx_getsets_7aiohttp_11_frozenlist_FrozenList[] = { - {(char *)"frozen", __pyx_getprop_7aiohttp_11_frozenlist_10FrozenList_frozen, 0, (char *)0, 0}, - {0, 0, 0, 0, 0} -}; - -static PyNumberMethods __pyx_tp_as_number_FrozenList = { - 0, /*nb_add*/ - 0, /*nb_subtract*/ - 0, /*nb_multiply*/ - #if PY_MAJOR_VERSION < 3 || (CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x03050000) - 0, /*nb_divide*/ - #endif - 0, /*nb_remainder*/ - 0, /*nb_divmod*/ - 0, /*nb_power*/ - 0, /*nb_negative*/ - 0, /*nb_positive*/ - 0, /*nb_absolute*/ - 0, /*nb_nonzero*/ - 0, /*nb_invert*/ - 0, /*nb_lshift*/ - 0, /*nb_rshift*/ - 0, /*nb_and*/ - 0, /*nb_xor*/ - 0, /*nb_or*/ - #if PY_MAJOR_VERSION < 3 || (CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x03050000) - 0, /*nb_coerce*/ - #endif - 0, /*nb_int*/ - #if PY_MAJOR_VERSION < 3 - 0, /*nb_long*/ - #else - 0, /*reserved*/ - #endif - 0, /*nb_float*/ - #if PY_MAJOR_VERSION < 3 || (CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x03050000) - 0, /*nb_oct*/ - #endif - #if PY_MAJOR_VERSION < 3 || (CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x03050000) - 0, /*nb_hex*/ - #endif - __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_23__iadd__, /*nb_inplace_add*/ - 0, /*nb_inplace_subtract*/ - 0, /*nb_inplace_multiply*/ - #if PY_MAJOR_VERSION < 3 || (CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x03050000) - 0, /*nb_inplace_divide*/ - #endif - 0, /*nb_inplace_remainder*/ - 0, /*nb_inplace_power*/ - 0, /*nb_inplace_lshift*/ - 0, /*nb_inplace_rshift*/ - 0, /*nb_inplace_and*/ - 0, /*nb_inplace_xor*/ - 0, /*nb_inplace_or*/ - 0, /*nb_floor_divide*/ - 0, /*nb_true_divide*/ - 0, /*nb_inplace_floor_divide*/ - 0, /*nb_inplace_true_divide*/ - 0, /*nb_index*/ - #if PY_VERSION_HEX >= 0x03050000 - 0, /*nb_matrix_multiply*/ - #endif - #if PY_VERSION_HEX >= 0x03050000 - 0, /*nb_inplace_matrix_multiply*/ - #endif -}; - -static PySequenceMethods __pyx_tp_as_sequence_FrozenList = { - __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_11__len__, /*sq_length*/ - 0, /*sq_concat*/ - 0, /*sq_repeat*/ - __pyx_sq_item_7aiohttp_11_frozenlist_FrozenList, /*sq_item*/ - 0, /*sq_slice*/ - 0, /*sq_ass_item*/ - 0, /*sq_ass_slice*/ - __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_21__contains__, /*sq_contains*/ - 0, /*sq_inplace_concat*/ - 0, /*sq_inplace_repeat*/ -}; - -static PyMappingMethods __pyx_tp_as_mapping_FrozenList = { - __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_11__len__, /*mp_length*/ - __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_5__getitem__, /*mp_subscript*/ - __pyx_mp_ass_subscript_7aiohttp_11_frozenlist_FrozenList, /*mp_ass_subscript*/ -}; - -static PyTypeObject __pyx_type_7aiohttp_11_frozenlist_FrozenList = { - PyVarObject_HEAD_INIT(0, 0) - "aiohttp._frozenlist.FrozenList", /*tp_name*/ - sizeof(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList), /*tp_basicsize*/ - 0, /*tp_itemsize*/ - __pyx_tp_dealloc_7aiohttp_11_frozenlist_FrozenList, /*tp_dealloc*/ - #if PY_VERSION_HEX < 0x030800b4 - 0, /*tp_print*/ - #endif - #if PY_VERSION_HEX >= 0x030800b4 - 0, /*tp_vectorcall_offset*/ - #endif - 0, /*tp_getattr*/ - 0, /*tp_setattr*/ - #if PY_MAJOR_VERSION < 3 - 0, /*tp_compare*/ - #endif - #if PY_MAJOR_VERSION >= 3 - 0, /*tp_as_async*/ - #endif - __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_41__repr__, /*tp_repr*/ - &__pyx_tp_as_number_FrozenList, /*tp_as_number*/ - &__pyx_tp_as_sequence_FrozenList, /*tp_as_sequence*/ - &__pyx_tp_as_mapping_FrozenList, /*tp_as_mapping*/ - 0, /*tp_hash*/ - 0, /*tp_call*/ - 0, /*tp_str*/ - 0, /*tp_getattro*/ - 0, /*tp_setattro*/ - 0, /*tp_as_buffer*/ - Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ - 0, /*tp_doc*/ - __pyx_tp_traverse_7aiohttp_11_frozenlist_FrozenList, /*tp_traverse*/ - __pyx_tp_clear_7aiohttp_11_frozenlist_FrozenList, /*tp_clear*/ - __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_17__richcmp__, /*tp_richcompare*/ - 0, /*tp_weaklistoffset*/ - __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_13__iter__, /*tp_iter*/ - 0, /*tp_iternext*/ - __pyx_methods_7aiohttp_11_frozenlist_FrozenList, /*tp_methods*/ - 0, /*tp_members*/ - __pyx_getsets_7aiohttp_11_frozenlist_FrozenList, /*tp_getset*/ - 0, /*tp_base*/ - 0, /*tp_dict*/ - 0, /*tp_descr_get*/ - 0, /*tp_descr_set*/ - 0, /*tp_dictoffset*/ - __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_1__init__, /*tp_init*/ - 0, /*tp_alloc*/ - __pyx_tp_new_7aiohttp_11_frozenlist_FrozenList, /*tp_new*/ - 0, /*tp_free*/ - 0, /*tp_is_gc*/ - 0, /*tp_bases*/ - 0, /*tp_mro*/ - 0, /*tp_cache*/ - 0, /*tp_subclasses*/ - 0, /*tp_weaklist*/ - 0, /*tp_del*/ - 0, /*tp_version_tag*/ - #if PY_VERSION_HEX >= 0x030400a1 - 0, /*tp_finalize*/ - #endif - #if PY_VERSION_HEX >= 0x030800b1 - 0, /*tp_vectorcall*/ - #endif - #if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 - 0, /*tp_print*/ - #endif -}; - -static PyMethodDef __pyx_methods[] = { - {0, 0, 0, 0} -}; - -#if PY_MAJOR_VERSION >= 3 -#if CYTHON_PEP489_MULTI_PHASE_INIT -static PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def); /*proto*/ -static int __pyx_pymod_exec__frozenlist(PyObject* module); /*proto*/ -static PyModuleDef_Slot __pyx_moduledef_slots[] = { - {Py_mod_create, (void*)__pyx_pymod_create}, - {Py_mod_exec, (void*)__pyx_pymod_exec__frozenlist}, - {0, NULL} -}; -#endif - -static struct PyModuleDef __pyx_moduledef = { - PyModuleDef_HEAD_INIT, - "_frozenlist", - 0, /* m_doc */ - #if CYTHON_PEP489_MULTI_PHASE_INIT - 0, /* m_size */ - #else - -1, /* m_size */ - #endif - __pyx_methods /* m_methods */, - #if CYTHON_PEP489_MULTI_PHASE_INIT - __pyx_moduledef_slots, /* m_slots */ - #else - NULL, /* m_reload */ - #endif - NULL, /* m_traverse */ - NULL, /* m_clear */ - NULL /* m_free */ -}; -#endif -#ifndef CYTHON_SMALL_CODE -#if defined(__clang__) - #define CYTHON_SMALL_CODE -#elif defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3)) - #define CYTHON_SMALL_CODE __attribute__((cold)) -#else - #define CYTHON_SMALL_CODE -#endif -#endif - -static __Pyx_StringTabEntry __pyx_string_tab[] = { - {&__pyx_kp_u_Cannot_modify_frozen_list, __pyx_k_Cannot_modify_frozen_list, sizeof(__pyx_k_Cannot_modify_frozen_list), 0, 1, 0, 0}, - {&__pyx_n_s_FrozenList, __pyx_k_FrozenList, sizeof(__pyx_k_FrozenList), 0, 0, 1, 1}, - {&__pyx_kp_u_FrozenList_frozen_r, __pyx_k_FrozenList_frozen_r, sizeof(__pyx_k_FrozenList_frozen_r), 0, 1, 0, 0}, - {&__pyx_kp_s_Incompatible_checksums_s_vs_0x94, __pyx_k_Incompatible_checksums_s_vs_0x94, sizeof(__pyx_k_Incompatible_checksums_s_vs_0x94), 0, 0, 1, 0}, - {&__pyx_n_s_MutableSequence, __pyx_k_MutableSequence, sizeof(__pyx_k_MutableSequence), 0, 0, 1, 1}, - {&__pyx_n_s_PickleError, __pyx_k_PickleError, sizeof(__pyx_k_PickleError), 0, 0, 1, 1}, - {&__pyx_n_s_RuntimeError, __pyx_k_RuntimeError, sizeof(__pyx_k_RuntimeError), 0, 0, 1, 1}, - {&__pyx_n_s_aiohttp__frozenlist, __pyx_k_aiohttp__frozenlist, sizeof(__pyx_k_aiohttp__frozenlist), 0, 0, 1, 1}, - {&__pyx_n_s_clear, __pyx_k_clear, sizeof(__pyx_k_clear), 0, 0, 1, 1}, - {&__pyx_n_s_cline_in_traceback, __pyx_k_cline_in_traceback, sizeof(__pyx_k_cline_in_traceback), 0, 0, 1, 1}, - {&__pyx_n_s_collections_abc, __pyx_k_collections_abc, sizeof(__pyx_k_collections_abc), 0, 0, 1, 1}, - {&__pyx_n_s_count, __pyx_k_count, sizeof(__pyx_k_count), 0, 0, 1, 1}, - {&__pyx_n_s_dict, __pyx_k_dict, sizeof(__pyx_k_dict), 0, 0, 1, 1}, - {&__pyx_n_s_format, __pyx_k_format, sizeof(__pyx_k_format), 0, 0, 1, 1}, - {&__pyx_n_s_getstate, __pyx_k_getstate, sizeof(__pyx_k_getstate), 0, 0, 1, 1}, - {&__pyx_n_s_import, __pyx_k_import, sizeof(__pyx_k_import), 0, 0, 1, 1}, - {&__pyx_n_s_index, __pyx_k_index, sizeof(__pyx_k_index), 0, 0, 1, 1}, - {&__pyx_n_s_item, __pyx_k_item, sizeof(__pyx_k_item), 0, 0, 1, 1}, - {&__pyx_n_s_items, __pyx_k_items, sizeof(__pyx_k_items), 0, 0, 1, 1}, - {&__pyx_n_s_iter, __pyx_k_iter, sizeof(__pyx_k_iter), 0, 0, 1, 1}, - {&__pyx_n_s_main, __pyx_k_main, sizeof(__pyx_k_main), 0, 0, 1, 1}, - {&__pyx_n_s_name, __pyx_k_name, sizeof(__pyx_k_name), 0, 0, 1, 1}, - {&__pyx_n_s_new, __pyx_k_new, sizeof(__pyx_k_new), 0, 0, 1, 1}, - {&__pyx_n_s_pickle, __pyx_k_pickle, sizeof(__pyx_k_pickle), 0, 0, 1, 1}, - {&__pyx_n_s_pop, __pyx_k_pop, sizeof(__pyx_k_pop), 0, 0, 1, 1}, - {&__pyx_n_s_pos, __pyx_k_pos, sizeof(__pyx_k_pos), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_PickleError, __pyx_k_pyx_PickleError, sizeof(__pyx_k_pyx_PickleError), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_checksum, __pyx_k_pyx_checksum, sizeof(__pyx_k_pyx_checksum), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_result, __pyx_k_pyx_result, sizeof(__pyx_k_pyx_result), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_state, __pyx_k_pyx_state, sizeof(__pyx_k_pyx_state), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_type, __pyx_k_pyx_type, sizeof(__pyx_k_pyx_type), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_unpickle_FrozenList, __pyx_k_pyx_unpickle_FrozenList, sizeof(__pyx_k_pyx_unpickle_FrozenList), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_vtable, __pyx_k_pyx_vtable, sizeof(__pyx_k_pyx_vtable), 0, 0, 1, 1}, - {&__pyx_n_s_reduce, __pyx_k_reduce, sizeof(__pyx_k_reduce), 0, 0, 1, 1}, - {&__pyx_n_s_reduce_cython, __pyx_k_reduce_cython, sizeof(__pyx_k_reduce_cython), 0, 0, 1, 1}, - {&__pyx_n_s_reduce_ex, __pyx_k_reduce_ex, sizeof(__pyx_k_reduce_ex), 0, 0, 1, 1}, - {&__pyx_n_s_register, __pyx_k_register, sizeof(__pyx_k_register), 0, 0, 1, 1}, - {&__pyx_n_s_remove, __pyx_k_remove, sizeof(__pyx_k_remove), 0, 0, 1, 1}, - {&__pyx_n_s_reversed, __pyx_k_reversed, sizeof(__pyx_k_reversed), 0, 0, 1, 1}, - {&__pyx_n_s_setstate, __pyx_k_setstate, sizeof(__pyx_k_setstate), 0, 0, 1, 1}, - {&__pyx_n_s_setstate_cython, __pyx_k_setstate_cython, sizeof(__pyx_k_setstate_cython), 0, 0, 1, 1}, - {&__pyx_kp_s_stringsource, __pyx_k_stringsource, sizeof(__pyx_k_stringsource), 0, 0, 1, 0}, - {&__pyx_n_s_test, __pyx_k_test, sizeof(__pyx_k_test), 0, 0, 1, 1}, - {&__pyx_n_s_update, __pyx_k_update, sizeof(__pyx_k_update), 0, 0, 1, 1}, - {0, 0, 0, 0, 0, 0, 0} -}; -static CYTHON_SMALL_CODE int __Pyx_InitCachedBuiltins(void) { - __pyx_builtin_RuntimeError = __Pyx_GetBuiltinName(__pyx_n_s_RuntimeError); if (!__pyx_builtin_RuntimeError) __PYX_ERR(0, 19, __pyx_L1_error) - return 0; - __pyx_L1_error:; - return -1; -} - -static CYTHON_SMALL_CODE int __Pyx_InitCachedConstants(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0); - - /* "aiohttp/_frozenlist.pyx":19 - * cdef object _check_frozen(self): - * if self.frozen: - * raise RuntimeError("Cannot modify frozen list.") # <<<<<<<<<<<<<< - * - * cdef inline object _fast_len(self): - */ - __pyx_tuple_ = PyTuple_Pack(1, __pyx_kp_u_Cannot_modify_frozen_list); if (unlikely(!__pyx_tuple_)) __PYX_ERR(0, 19, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple_); - __Pyx_GIVEREF(__pyx_tuple_); - - /* "(tree fragment)":1 - * def __pyx_unpickle_FrozenList(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< - * cdef object __pyx_PickleError - * cdef object __pyx_result - */ - __pyx_tuple__2 = PyTuple_Pack(5, __pyx_n_s_pyx_type, __pyx_n_s_pyx_checksum, __pyx_n_s_pyx_state, __pyx_n_s_pyx_PickleError, __pyx_n_s_pyx_result); if (unlikely(!__pyx_tuple__2)) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__2); - __Pyx_GIVEREF(__pyx_tuple__2); - __pyx_codeobj__3 = (PyObject*)__Pyx_PyCode_New(3, 0, 5, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__2, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_pyx_unpickle_FrozenList, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__3)) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_RefNannyFinishContext(); - return 0; - __pyx_L1_error:; - __Pyx_RefNannyFinishContext(); - return -1; -} - -static CYTHON_SMALL_CODE int __Pyx_InitGlobals(void) { - if (__Pyx_InitStrings(__pyx_string_tab) < 0) __PYX_ERR(0, 1, __pyx_L1_error); - __pyx_int_0 = PyInt_FromLong(0); if (unlikely(!__pyx_int_0)) __PYX_ERR(0, 1, __pyx_L1_error) - __pyx_int_1 = PyInt_FromLong(1); if (unlikely(!__pyx_int_1)) __PYX_ERR(0, 1, __pyx_L1_error) - __pyx_int_2 = PyInt_FromLong(2); if (unlikely(!__pyx_int_2)) __PYX_ERR(0, 1, __pyx_L1_error) - __pyx_int_3 = PyInt_FromLong(3); if (unlikely(!__pyx_int_3)) __PYX_ERR(0, 1, __pyx_L1_error) - __pyx_int_4 = PyInt_FromLong(4); if (unlikely(!__pyx_int_4)) __PYX_ERR(0, 1, __pyx_L1_error) - __pyx_int_5 = PyInt_FromLong(5); if (unlikely(!__pyx_int_5)) __PYX_ERR(0, 1, __pyx_L1_error) - __pyx_int_155820355 = PyInt_FromLong(155820355L); if (unlikely(!__pyx_int_155820355)) __PYX_ERR(0, 1, __pyx_L1_error) - __pyx_int_neg_1 = PyInt_FromLong(-1); if (unlikely(!__pyx_int_neg_1)) __PYX_ERR(0, 1, __pyx_L1_error) - return 0; - __pyx_L1_error:; - return -1; -} - -static CYTHON_SMALL_CODE int __Pyx_modinit_global_init_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_variable_export_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_function_export_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_type_init_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_type_import_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_variable_import_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_function_import_code(void); /*proto*/ - -static int __Pyx_modinit_global_init_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_global_init_code", 0); - /*--- Global init code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_variable_export_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_variable_export_code", 0); - /*--- Variable export code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_function_export_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_function_export_code", 0); - /*--- Function export code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_type_init_code(void) { - __Pyx_RefNannyDeclarations - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__Pyx_modinit_type_init_code", 0); - /*--- Type init code ---*/ - __pyx_vtabptr_7aiohttp_11_frozenlist_FrozenList = &__pyx_vtable_7aiohttp_11_frozenlist_FrozenList; - __pyx_vtable_7aiohttp_11_frozenlist_FrozenList._check_frozen = (PyObject *(*)(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *))__pyx_f_7aiohttp_11_frozenlist_10FrozenList__check_frozen; - __pyx_vtable_7aiohttp_11_frozenlist_FrozenList._fast_len = (PyObject *(*)(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *))__pyx_f_7aiohttp_11_frozenlist_10FrozenList__fast_len; - if (PyType_Ready(&__pyx_type_7aiohttp_11_frozenlist_FrozenList) < 0) __PYX_ERR(0, 4, __pyx_L1_error) - #if PY_VERSION_HEX < 0x030800B1 - __pyx_type_7aiohttp_11_frozenlist_FrozenList.tp_print = 0; - #endif - if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type_7aiohttp_11_frozenlist_FrozenList.tp_dictoffset && __pyx_type_7aiohttp_11_frozenlist_FrozenList.tp_getattro == PyObject_GenericGetAttr)) { - __pyx_type_7aiohttp_11_frozenlist_FrozenList.tp_getattro = __Pyx_PyObject_GenericGetAttr; - } - if (__Pyx_SetVtable(__pyx_type_7aiohttp_11_frozenlist_FrozenList.tp_dict, __pyx_vtabptr_7aiohttp_11_frozenlist_FrozenList) < 0) __PYX_ERR(0, 4, __pyx_L1_error) - if (PyObject_SetAttr(__pyx_m, __pyx_n_s_FrozenList, (PyObject *)&__pyx_type_7aiohttp_11_frozenlist_FrozenList) < 0) __PYX_ERR(0, 4, __pyx_L1_error) - if (__Pyx_setup_reduce((PyObject*)&__pyx_type_7aiohttp_11_frozenlist_FrozenList) < 0) __PYX_ERR(0, 4, __pyx_L1_error) - __pyx_ptype_7aiohttp_11_frozenlist_FrozenList = &__pyx_type_7aiohttp_11_frozenlist_FrozenList; - __Pyx_RefNannyFinishContext(); - return 0; - __pyx_L1_error:; - __Pyx_RefNannyFinishContext(); - return -1; -} - -static int __Pyx_modinit_type_import_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_type_import_code", 0); - /*--- Type import code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_variable_import_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_variable_import_code", 0); - /*--- Variable import code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_function_import_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_function_import_code", 0); - /*--- Function import code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - - -#ifndef CYTHON_NO_PYINIT_EXPORT -#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC -#elif PY_MAJOR_VERSION < 3 -#ifdef __cplusplus -#define __Pyx_PyMODINIT_FUNC extern "C" void -#else -#define __Pyx_PyMODINIT_FUNC void -#endif -#else -#ifdef __cplusplus -#define __Pyx_PyMODINIT_FUNC extern "C" PyObject * -#else -#define __Pyx_PyMODINIT_FUNC PyObject * -#endif -#endif - - -#if PY_MAJOR_VERSION < 3 -__Pyx_PyMODINIT_FUNC init_frozenlist(void) CYTHON_SMALL_CODE; /*proto*/ -__Pyx_PyMODINIT_FUNC init_frozenlist(void) -#else -__Pyx_PyMODINIT_FUNC PyInit__frozenlist(void) CYTHON_SMALL_CODE; /*proto*/ -__Pyx_PyMODINIT_FUNC PyInit__frozenlist(void) -#if CYTHON_PEP489_MULTI_PHASE_INIT -{ - return PyModuleDef_Init(&__pyx_moduledef); -} -static CYTHON_SMALL_CODE int __Pyx_check_single_interpreter(void) { - #if PY_VERSION_HEX >= 0x030700A1 - static PY_INT64_T main_interpreter_id = -1; - PY_INT64_T current_id = PyInterpreterState_GetID(PyThreadState_Get()->interp); - if (main_interpreter_id == -1) { - main_interpreter_id = current_id; - return (unlikely(current_id == -1)) ? -1 : 0; - } else if (unlikely(main_interpreter_id != current_id)) - #else - static PyInterpreterState *main_interpreter = NULL; - PyInterpreterState *current_interpreter = PyThreadState_Get()->interp; - if (!main_interpreter) { - main_interpreter = current_interpreter; - } else if (unlikely(main_interpreter != current_interpreter)) - #endif - { - PyErr_SetString( - PyExc_ImportError, - "Interpreter change detected - this module can only be loaded into one interpreter per process."); - return -1; - } - return 0; -} -static CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *moddict, const char* from_name, const char* to_name, int allow_none) { - PyObject *value = PyObject_GetAttrString(spec, from_name); - int result = 0; - if (likely(value)) { - if (allow_none || value != Py_None) { - result = PyDict_SetItemString(moddict, to_name, value); - } - Py_DECREF(value); - } else if (PyErr_ExceptionMatches(PyExc_AttributeError)) { - PyErr_Clear(); - } else { - result = -1; - } - return result; -} -static CYTHON_SMALL_CODE PyObject* __pyx_pymod_create(PyObject *spec, CYTHON_UNUSED PyModuleDef *def) { - PyObject *module = NULL, *moddict, *modname; - if (__Pyx_check_single_interpreter()) - return NULL; - if (__pyx_m) - return __Pyx_NewRef(__pyx_m); - modname = PyObject_GetAttrString(spec, "name"); - if (unlikely(!modname)) goto bad; - module = PyModule_NewObject(modname); - Py_DECREF(modname); - if (unlikely(!module)) goto bad; - moddict = PyModule_GetDict(module); - if (unlikely(!moddict)) goto bad; - if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "loader", "__loader__", 1) < 0)) goto bad; - if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "origin", "__file__", 1) < 0)) goto bad; - if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "parent", "__package__", 1) < 0)) goto bad; - if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "submodule_search_locations", "__path__", 0) < 0)) goto bad; - return module; -bad: - Py_XDECREF(module); - return NULL; -} - - -static CYTHON_SMALL_CODE int __pyx_pymod_exec__frozenlist(PyObject *__pyx_pyinit_module) -#endif -#endif -{ - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannyDeclarations - #if CYTHON_PEP489_MULTI_PHASE_INIT - if (__pyx_m) { - if (__pyx_m == __pyx_pyinit_module) return 0; - PyErr_SetString(PyExc_RuntimeError, "Module '_frozenlist' has already been imported. Re-initialisation is not supported."); - return -1; - } - #elif PY_MAJOR_VERSION >= 3 - if (__pyx_m) return __Pyx_NewRef(__pyx_m); - #endif - #if CYTHON_REFNANNY -__Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny"); -if (!__Pyx_RefNanny) { - PyErr_Clear(); - __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny"); - if (!__Pyx_RefNanny) - Py_FatalError("failed to import 'refnanny' module"); -} -#endif - __Pyx_RefNannySetupContext("__Pyx_PyMODINIT_FUNC PyInit__frozenlist(void)", 0); - if (__Pyx_check_binary_version() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #ifdef __Pxy_PyFrame_Initialize_Offsets - __Pxy_PyFrame_Initialize_Offsets(); - #endif - __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) __PYX_ERR(0, 1, __pyx_L1_error) - __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) __PYX_ERR(0, 1, __pyx_L1_error) - __pyx_empty_unicode = PyUnicode_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_unicode)) __PYX_ERR(0, 1, __pyx_L1_error) - #ifdef __Pyx_CyFunction_USED - if (__pyx_CyFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_FusedFunction_USED - if (__pyx_FusedFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_Coroutine_USED - if (__pyx_Coroutine_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_Generator_USED - if (__pyx_Generator_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_AsyncGen_USED - if (__pyx_AsyncGen_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_StopAsyncIteration_USED - if (__pyx_StopAsyncIteration_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - /*--- Library function declarations ---*/ - /*--- Threads initialization code ---*/ - #if defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS - #ifdef WITH_THREAD /* Python build with threading support? */ - PyEval_InitThreads(); - #endif - #endif - /*--- Module creation code ---*/ - #if CYTHON_PEP489_MULTI_PHASE_INIT - __pyx_m = __pyx_pyinit_module; - Py_INCREF(__pyx_m); - #else - #if PY_MAJOR_VERSION < 3 - __pyx_m = Py_InitModule4("_frozenlist", __pyx_methods, 0, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m); - #else - __pyx_m = PyModule_Create(&__pyx_moduledef); - #endif - if (unlikely(!__pyx_m)) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - __pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(0, 1, __pyx_L1_error) - Py_INCREF(__pyx_d); - __pyx_b = PyImport_AddModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(0, 1, __pyx_L1_error) - Py_INCREF(__pyx_b); - __pyx_cython_runtime = PyImport_AddModule((char *) "cython_runtime"); if (unlikely(!__pyx_cython_runtime)) __PYX_ERR(0, 1, __pyx_L1_error) - Py_INCREF(__pyx_cython_runtime); - if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(0, 1, __pyx_L1_error); - /*--- Initialize various global constants etc. ---*/ - if (__Pyx_InitGlobals() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #if PY_MAJOR_VERSION < 3 && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT) - if (__Pyx_init_sys_getdefaultencoding_params() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - if (__pyx_module_is_main_aiohttp___frozenlist) { - if (PyObject_SetAttr(__pyx_m, __pyx_n_s_name, __pyx_n_s_main) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - } - #if PY_MAJOR_VERSION >= 3 - { - PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) __PYX_ERR(0, 1, __pyx_L1_error) - if (!PyDict_GetItemString(modules, "aiohttp._frozenlist")) { - if (unlikely(PyDict_SetItemString(modules, "aiohttp._frozenlist", __pyx_m) < 0)) __PYX_ERR(0, 1, __pyx_L1_error) - } - } - #endif - /*--- Builtin init code ---*/ - if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - /*--- Constants init code ---*/ - if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - /*--- Global type/function init code ---*/ - (void)__Pyx_modinit_global_init_code(); - (void)__Pyx_modinit_variable_export_code(); - (void)__Pyx_modinit_function_export_code(); - if (unlikely(__Pyx_modinit_type_init_code() < 0)) __PYX_ERR(0, 1, __pyx_L1_error) - (void)__Pyx_modinit_type_import_code(); - (void)__Pyx_modinit_variable_import_code(); - (void)__Pyx_modinit_function_import_code(); - /*--- Execution code ---*/ - #if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) - if (__Pyx_patch_abc() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - - /* "aiohttp/_frozenlist.pyx":1 - * from collections.abc import MutableSequence # <<<<<<<<<<<<<< - * - * - */ - __pyx_t_1 = PyList_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF(__pyx_n_s_MutableSequence); - __Pyx_GIVEREF(__pyx_n_s_MutableSequence); - PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_MutableSequence); - __pyx_t_2 = __Pyx_Import(__pyx_n_s_collections_abc, __pyx_t_1, 0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_MutableSequence); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_MutableSequence, __pyx_t_1) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "aiohttp/_frozenlist.pyx":108 - * - * - * MutableSequence.register(FrozenList) # <<<<<<<<<<<<<< - */ - __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_MutableSequence); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 108, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_register); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 108, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_PyObject_CallOneArg(__pyx_t_1, ((PyObject *)__pyx_ptype_7aiohttp_11_frozenlist_FrozenList)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 108, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "(tree fragment)":1 - * def __pyx_unpickle_FrozenList(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< - * cdef object __pyx_PickleError - * cdef object __pyx_result - */ - __pyx_t_2 = PyCFunction_NewEx(&__pyx_mdef_7aiohttp_11_frozenlist_1__pyx_unpickle_FrozenList, NULL, __pyx_n_s_aiohttp__frozenlist); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_pyx_unpickle_FrozenList, __pyx_t_2) < 0) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "aiohttp/_frozenlist.pyx":1 - * from collections.abc import MutableSequence # <<<<<<<<<<<<<< - * - * - */ - __pyx_t_2 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_2) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /*--- Wrapped vars code ---*/ - - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - if (__pyx_m) { - if (__pyx_d) { - __Pyx_AddTraceback("init aiohttp._frozenlist", __pyx_clineno, __pyx_lineno, __pyx_filename); - } - Py_CLEAR(__pyx_m); - } else if (!PyErr_Occurred()) { - PyErr_SetString(PyExc_ImportError, "init aiohttp._frozenlist"); - } - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - #if CYTHON_PEP489_MULTI_PHASE_INIT - return (__pyx_m != NULL) ? 0 : -1; - #elif PY_MAJOR_VERSION >= 3 - return __pyx_m; - #else - return; - #endif -} - -/* --- Runtime support code --- */ -/* Refnanny */ -#if CYTHON_REFNANNY -static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) { - PyObject *m = NULL, *p = NULL; - void *r = NULL; - m = PyImport_ImportModule(modname); - if (!m) goto end; - p = PyObject_GetAttrString(m, "RefNannyAPI"); - if (!p) goto end; - r = PyLong_AsVoidPtr(p); -end: - Py_XDECREF(p); - Py_XDECREF(m); - return (__Pyx_RefNannyAPIStruct *)r; -} -#endif - -/* PyObjectGetAttrStr */ -#if CYTHON_USE_TYPE_SLOTS -static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name) { - PyTypeObject* tp = Py_TYPE(obj); - if (likely(tp->tp_getattro)) - return tp->tp_getattro(obj, attr_name); -#if PY_MAJOR_VERSION < 3 - if (likely(tp->tp_getattr)) - return tp->tp_getattr(obj, PyString_AS_STRING(attr_name)); -#endif - return PyObject_GetAttr(obj, attr_name); -} -#endif - -/* GetBuiltinName */ -static PyObject *__Pyx_GetBuiltinName(PyObject *name) { - PyObject* result = __Pyx_PyObject_GetAttrStr(__pyx_b, name); - if (unlikely(!result)) { - PyErr_Format(PyExc_NameError, -#if PY_MAJOR_VERSION >= 3 - "name '%U' is not defined", name); -#else - "name '%.200s' is not defined", PyString_AS_STRING(name)); -#endif - } - return result; -} - -/* RaiseDoubleKeywords */ -static void __Pyx_RaiseDoubleKeywordsError( - const char* func_name, - PyObject* kw_name) -{ - PyErr_Format(PyExc_TypeError, - #if PY_MAJOR_VERSION >= 3 - "%s() got multiple values for keyword argument '%U'", func_name, kw_name); - #else - "%s() got multiple values for keyword argument '%s'", func_name, - PyString_AsString(kw_name)); - #endif -} - -/* ParseKeywords */ -static int __Pyx_ParseOptionalKeywords( - PyObject *kwds, - PyObject **argnames[], - PyObject *kwds2, - PyObject *values[], - Py_ssize_t num_pos_args, - const char* function_name) -{ - PyObject *key = 0, *value = 0; - Py_ssize_t pos = 0; - PyObject*** name; - PyObject*** first_kw_arg = argnames + num_pos_args; - while (PyDict_Next(kwds, &pos, &key, &value)) { - name = first_kw_arg; - while (*name && (**name != key)) name++; - if (*name) { - values[name-argnames] = value; - continue; - } - name = first_kw_arg; - #if PY_MAJOR_VERSION < 3 - if (likely(PyString_Check(key))) { - while (*name) { - if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key)) - && _PyString_Eq(**name, key)) { - values[name-argnames] = value; - break; - } - name++; - } - if (*name) continue; - else { - PyObject*** argname = argnames; - while (argname != first_kw_arg) { - if ((**argname == key) || ( - (CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**argname) == PyString_GET_SIZE(key)) - && _PyString_Eq(**argname, key))) { - goto arg_passed_twice; - } - argname++; - } - } - } else - #endif - if (likely(PyUnicode_Check(key))) { - while (*name) { - int cmp = (**name == key) ? 0 : - #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 - (__Pyx_PyUnicode_GET_LENGTH(**name) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 : - #endif - PyUnicode_Compare(**name, key); - if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; - if (cmp == 0) { - values[name-argnames] = value; - break; - } - name++; - } - if (*name) continue; - else { - PyObject*** argname = argnames; - while (argname != first_kw_arg) { - int cmp = (**argname == key) ? 0 : - #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 - (__Pyx_PyUnicode_GET_LENGTH(**argname) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 : - #endif - PyUnicode_Compare(**argname, key); - if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; - if (cmp == 0) goto arg_passed_twice; - argname++; - } - } - } else - goto invalid_keyword_type; - if (kwds2) { - if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad; - } else { - goto invalid_keyword; - } - } - return 0; -arg_passed_twice: - __Pyx_RaiseDoubleKeywordsError(function_name, key); - goto bad; -invalid_keyword_type: - PyErr_Format(PyExc_TypeError, - "%.200s() keywords must be strings", function_name); - goto bad; -invalid_keyword: - PyErr_Format(PyExc_TypeError, - #if PY_MAJOR_VERSION < 3 - "%.200s() got an unexpected keyword argument '%.200s'", - function_name, PyString_AsString(key)); - #else - "%s() got an unexpected keyword argument '%U'", - function_name, key); - #endif -bad: - return -1; -} - -/* RaiseArgTupleInvalid */ -static void __Pyx_RaiseArgtupleInvalid( - const char* func_name, - int exact, - Py_ssize_t num_min, - Py_ssize_t num_max, - Py_ssize_t num_found) -{ - Py_ssize_t num_expected; - const char *more_or_less; - if (num_found < num_min) { - num_expected = num_min; - more_or_less = "at least"; - } else { - num_expected = num_max; - more_or_less = "at most"; - } - if (exact) { - more_or_less = "exactly"; - } - PyErr_Format(PyExc_TypeError, - "%.200s() takes %.8s %" CYTHON_FORMAT_SSIZE_T "d positional argument%.1s (%" CYTHON_FORMAT_SSIZE_T "d given)", - func_name, more_or_less, num_expected, - (num_expected == 1) ? "" : "s", num_found); -} - -/* PyObjectCall */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) { - PyObject *result; - ternaryfunc call = func->ob_type->tp_call; - if (unlikely(!call)) - return PyObject_Call(func, arg, kw); - if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) - return NULL; - result = (*call)(func, arg, kw); - Py_LeaveRecursiveCall(); - if (unlikely(!result) && unlikely(!PyErr_Occurred())) { - PyErr_SetString( - PyExc_SystemError, - "NULL result without error in PyObject_Call"); - } - return result; -} -#endif - -/* PyErrFetchRestore */ -#if CYTHON_FAST_THREAD_STATE -static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { - PyObject *tmp_type, *tmp_value, *tmp_tb; - tmp_type = tstate->curexc_type; - tmp_value = tstate->curexc_value; - tmp_tb = tstate->curexc_traceback; - tstate->curexc_type = type; - tstate->curexc_value = value; - tstate->curexc_traceback = tb; - Py_XDECREF(tmp_type); - Py_XDECREF(tmp_value); - Py_XDECREF(tmp_tb); -} -static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { - *type = tstate->curexc_type; - *value = tstate->curexc_value; - *tb = tstate->curexc_traceback; - tstate->curexc_type = 0; - tstate->curexc_value = 0; - tstate->curexc_traceback = 0; -} -#endif - -/* RaiseException */ -#if PY_MAJOR_VERSION < 3 -static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, - CYTHON_UNUSED PyObject *cause) { - __Pyx_PyThreadState_declare - Py_XINCREF(type); - if (!value || value == Py_None) - value = NULL; - else - Py_INCREF(value); - if (!tb || tb == Py_None) - tb = NULL; - else { - Py_INCREF(tb); - if (!PyTraceBack_Check(tb)) { - PyErr_SetString(PyExc_TypeError, - "raise: arg 3 must be a traceback or None"); - goto raise_error; - } - } - if (PyType_Check(type)) { -#if CYTHON_COMPILING_IN_PYPY - if (!value) { - Py_INCREF(Py_None); - value = Py_None; - } -#endif - PyErr_NormalizeException(&type, &value, &tb); - } else { - if (value) { - PyErr_SetString(PyExc_TypeError, - "instance exception may not have a separate value"); - goto raise_error; - } - value = type; - type = (PyObject*) Py_TYPE(type); - Py_INCREF(type); - if (!PyType_IsSubtype((PyTypeObject *)type, (PyTypeObject *)PyExc_BaseException)) { - PyErr_SetString(PyExc_TypeError, - "raise: exception class must be a subclass of BaseException"); - goto raise_error; - } - } - __Pyx_PyThreadState_assign - __Pyx_ErrRestore(type, value, tb); - return; -raise_error: - Py_XDECREF(value); - Py_XDECREF(type); - Py_XDECREF(tb); - return; -} -#else -static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) { - PyObject* owned_instance = NULL; - if (tb == Py_None) { - tb = 0; - } else if (tb && !PyTraceBack_Check(tb)) { - PyErr_SetString(PyExc_TypeError, - "raise: arg 3 must be a traceback or None"); - goto bad; - } - if (value == Py_None) - value = 0; - if (PyExceptionInstance_Check(type)) { - if (value) { - PyErr_SetString(PyExc_TypeError, - "instance exception may not have a separate value"); - goto bad; - } - value = type; - type = (PyObject*) Py_TYPE(value); - } else if (PyExceptionClass_Check(type)) { - PyObject *instance_class = NULL; - if (value && PyExceptionInstance_Check(value)) { - instance_class = (PyObject*) Py_TYPE(value); - if (instance_class != type) { - int is_subclass = PyObject_IsSubclass(instance_class, type); - if (!is_subclass) { - instance_class = NULL; - } else if (unlikely(is_subclass == -1)) { - goto bad; - } else { - type = instance_class; - } - } - } - if (!instance_class) { - PyObject *args; - if (!value) - args = PyTuple_New(0); - else if (PyTuple_Check(value)) { - Py_INCREF(value); - args = value; - } else - args = PyTuple_Pack(1, value); - if (!args) - goto bad; - owned_instance = PyObject_Call(type, args, NULL); - Py_DECREF(args); - if (!owned_instance) - goto bad; - value = owned_instance; - if (!PyExceptionInstance_Check(value)) { - PyErr_Format(PyExc_TypeError, - "calling %R should have returned an instance of " - "BaseException, not %R", - type, Py_TYPE(value)); - goto bad; - } - } - } else { - PyErr_SetString(PyExc_TypeError, - "raise: exception class must be a subclass of BaseException"); - goto bad; - } - if (cause) { - PyObject *fixed_cause; - if (cause == Py_None) { - fixed_cause = NULL; - } else if (PyExceptionClass_Check(cause)) { - fixed_cause = PyObject_CallObject(cause, NULL); - if (fixed_cause == NULL) - goto bad; - } else if (PyExceptionInstance_Check(cause)) { - fixed_cause = cause; - Py_INCREF(fixed_cause); - } else { - PyErr_SetString(PyExc_TypeError, - "exception causes must derive from " - "BaseException"); - goto bad; - } - PyException_SetCause(value, fixed_cause); - } - PyErr_SetObject(type, value); - if (tb) { -#if CYTHON_COMPILING_IN_PYPY - PyObject *tmp_type, *tmp_value, *tmp_tb; - PyErr_Fetch(&tmp_type, &tmp_value, &tmp_tb); - Py_INCREF(tb); - PyErr_Restore(tmp_type, tmp_value, tb); - Py_XDECREF(tmp_tb); -#else - PyThreadState *tstate = __Pyx_PyThreadState_Current; - PyObject* tmp_tb = tstate->curexc_traceback; - if (tb != tmp_tb) { - Py_INCREF(tb); - tstate->curexc_traceback = tb; - Py_XDECREF(tmp_tb); - } -#endif - } -bad: - Py_XDECREF(owned_instance); - return; -} -#endif - -/* GetItemInt */ -static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j) { - PyObject *r; - if (!j) return NULL; - r = PyObject_GetItem(o, j); - Py_DECREF(j); - return r; -} -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i, - CYTHON_NCP_UNUSED int wraparound, - CYTHON_NCP_UNUSED int boundscheck) { -#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - Py_ssize_t wrapped_i = i; - if (wraparound & unlikely(i < 0)) { - wrapped_i += PyList_GET_SIZE(o); - } - if ((!boundscheck) || likely(__Pyx_is_valid_index(wrapped_i, PyList_GET_SIZE(o)))) { - PyObject *r = PyList_GET_ITEM(o, wrapped_i); - Py_INCREF(r); - return r; - } - return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); -#else - return PySequence_GetItem(o, i); -#endif -} -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i, - CYTHON_NCP_UNUSED int wraparound, - CYTHON_NCP_UNUSED int boundscheck) { -#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - Py_ssize_t wrapped_i = i; - if (wraparound & unlikely(i < 0)) { - wrapped_i += PyTuple_GET_SIZE(o); - } - if ((!boundscheck) || likely(__Pyx_is_valid_index(wrapped_i, PyTuple_GET_SIZE(o)))) { - PyObject *r = PyTuple_GET_ITEM(o, wrapped_i); - Py_INCREF(r); - return r; - } - return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); -#else - return PySequence_GetItem(o, i); -#endif -} -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, int is_list, - CYTHON_NCP_UNUSED int wraparound, - CYTHON_NCP_UNUSED int boundscheck) { -#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS && CYTHON_USE_TYPE_SLOTS - if (is_list || PyList_CheckExact(o)) { - Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyList_GET_SIZE(o); - if ((!boundscheck) || (likely(__Pyx_is_valid_index(n, PyList_GET_SIZE(o))))) { - PyObject *r = PyList_GET_ITEM(o, n); - Py_INCREF(r); - return r; - } - } - else if (PyTuple_CheckExact(o)) { - Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyTuple_GET_SIZE(o); - if ((!boundscheck) || likely(__Pyx_is_valid_index(n, PyTuple_GET_SIZE(o)))) { - PyObject *r = PyTuple_GET_ITEM(o, n); - Py_INCREF(r); - return r; - } - } else { - PySequenceMethods *m = Py_TYPE(o)->tp_as_sequence; - if (likely(m && m->sq_item)) { - if (wraparound && unlikely(i < 0) && likely(m->sq_length)) { - Py_ssize_t l = m->sq_length(o); - if (likely(l >= 0)) { - i += l; - } else { - if (!PyErr_ExceptionMatches(PyExc_OverflowError)) - return NULL; - PyErr_Clear(); - } - } - return m->sq_item(o, i); - } - } -#else - if (is_list || PySequence_Check(o)) { - return PySequence_GetItem(o, i); - } -#endif - return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); -} - -/* ObjectGetItem */ -#if CYTHON_USE_TYPE_SLOTS -static PyObject *__Pyx_PyObject_GetIndex(PyObject *obj, PyObject* index) { - PyObject *runerr; - Py_ssize_t key_value; - PySequenceMethods *m = Py_TYPE(obj)->tp_as_sequence; - if (unlikely(!(m && m->sq_item))) { - PyErr_Format(PyExc_TypeError, "'%.200s' object is not subscriptable", Py_TYPE(obj)->tp_name); - return NULL; - } - key_value = __Pyx_PyIndex_AsSsize_t(index); - if (likely(key_value != -1 || !(runerr = PyErr_Occurred()))) { - return __Pyx_GetItemInt_Fast(obj, key_value, 0, 1, 1); - } - if (PyErr_GivenExceptionMatches(runerr, PyExc_OverflowError)) { - PyErr_Clear(); - PyErr_Format(PyExc_IndexError, "cannot fit '%.200s' into an index-sized integer", Py_TYPE(index)->tp_name); - } - return NULL; -} -static PyObject *__Pyx_PyObject_GetItem(PyObject *obj, PyObject* key) { - PyMappingMethods *m = Py_TYPE(obj)->tp_as_mapping; - if (likely(m && m->mp_subscript)) { - return m->mp_subscript(obj, key); - } - return __Pyx_PyObject_GetIndex(obj, key); -} -#endif - -/* PyFunctionFastCall */ -#if CYTHON_FAST_PYCALL -static PyObject* __Pyx_PyFunction_FastCallNoKw(PyCodeObject *co, PyObject **args, Py_ssize_t na, - PyObject *globals) { - PyFrameObject *f; - PyThreadState *tstate = __Pyx_PyThreadState_Current; - PyObject **fastlocals; - Py_ssize_t i; - PyObject *result; - assert(globals != NULL); - /* XXX Perhaps we should create a specialized - PyFrame_New() that doesn't take locals, but does - take builtins without sanity checking them. - */ - assert(tstate != NULL); - f = PyFrame_New(tstate, co, globals, NULL); - if (f == NULL) { - return NULL; - } - fastlocals = __Pyx_PyFrame_GetLocalsplus(f); - for (i = 0; i < na; i++) { - Py_INCREF(*args); - fastlocals[i] = *args++; - } - result = PyEval_EvalFrameEx(f,0); - ++tstate->recursion_depth; - Py_DECREF(f); - --tstate->recursion_depth; - return result; -} -#if 1 || PY_VERSION_HEX < 0x030600B1 -static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs) { - PyCodeObject *co = (PyCodeObject *)PyFunction_GET_CODE(func); - PyObject *globals = PyFunction_GET_GLOBALS(func); - PyObject *argdefs = PyFunction_GET_DEFAULTS(func); - PyObject *closure; -#if PY_MAJOR_VERSION >= 3 - PyObject *kwdefs; -#endif - PyObject *kwtuple, **k; - PyObject **d; - Py_ssize_t nd; - Py_ssize_t nk; - PyObject *result; - assert(kwargs == NULL || PyDict_Check(kwargs)); - nk = kwargs ? PyDict_Size(kwargs) : 0; - if (Py_EnterRecursiveCall((char*)" while calling a Python object")) { - return NULL; - } - if ( -#if PY_MAJOR_VERSION >= 3 - co->co_kwonlyargcount == 0 && -#endif - likely(kwargs == NULL || nk == 0) && - co->co_flags == (CO_OPTIMIZED | CO_NEWLOCALS | CO_NOFREE)) { - if (argdefs == NULL && co->co_argcount == nargs) { - result = __Pyx_PyFunction_FastCallNoKw(co, args, nargs, globals); - goto done; - } - else if (nargs == 0 && argdefs != NULL - && co->co_argcount == Py_SIZE(argdefs)) { - /* function called with no arguments, but all parameters have - a default value: use default values as arguments .*/ - args = &PyTuple_GET_ITEM(argdefs, 0); - result =__Pyx_PyFunction_FastCallNoKw(co, args, Py_SIZE(argdefs), globals); - goto done; - } - } - if (kwargs != NULL) { - Py_ssize_t pos, i; - kwtuple = PyTuple_New(2 * nk); - if (kwtuple == NULL) { - result = NULL; - goto done; - } - k = &PyTuple_GET_ITEM(kwtuple, 0); - pos = i = 0; - while (PyDict_Next(kwargs, &pos, &k[i], &k[i+1])) { - Py_INCREF(k[i]); - Py_INCREF(k[i+1]); - i += 2; - } - nk = i / 2; - } - else { - kwtuple = NULL; - k = NULL; - } - closure = PyFunction_GET_CLOSURE(func); -#if PY_MAJOR_VERSION >= 3 - kwdefs = PyFunction_GET_KW_DEFAULTS(func); -#endif - if (argdefs != NULL) { - d = &PyTuple_GET_ITEM(argdefs, 0); - nd = Py_SIZE(argdefs); - } - else { - d = NULL; - nd = 0; - } -#if PY_MAJOR_VERSION >= 3 - result = PyEval_EvalCodeEx((PyObject*)co, globals, (PyObject *)NULL, - args, (int)nargs, - k, (int)nk, - d, (int)nd, kwdefs, closure); -#else - result = PyEval_EvalCodeEx(co, globals, (PyObject *)NULL, - args, (int)nargs, - k, (int)nk, - d, (int)nd, closure); -#endif - Py_XDECREF(kwtuple); -done: - Py_LeaveRecursiveCall(); - return result; -} -#endif -#endif - -/* PyObjectCallMethO */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg) { - PyObject *self, *result; - PyCFunction cfunc; - cfunc = PyCFunction_GET_FUNCTION(func); - self = PyCFunction_GET_SELF(func); - if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) - return NULL; - result = cfunc(self, arg); - Py_LeaveRecursiveCall(); - if (unlikely(!result) && unlikely(!PyErr_Occurred())) { - PyErr_SetString( - PyExc_SystemError, - "NULL result without error in PyObject_Call"); - } - return result; -} -#endif - -/* PyObjectCallNoArg */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func) { -#if CYTHON_FAST_PYCALL - if (PyFunction_Check(func)) { - return __Pyx_PyFunction_FastCall(func, NULL, 0); - } -#endif -#ifdef __Pyx_CyFunction_USED - if (likely(PyCFunction_Check(func) || __Pyx_CyFunction_Check(func))) -#else - if (likely(PyCFunction_Check(func))) -#endif - { - if (likely(PyCFunction_GET_FLAGS(func) & METH_NOARGS)) { - return __Pyx_PyObject_CallMethO(func, NULL); - } - } - return __Pyx_PyObject_Call(func, __pyx_empty_tuple, NULL); -} -#endif - -/* PyCFunctionFastCall */ -#if CYTHON_FAST_PYCCALL -static CYTHON_INLINE PyObject * __Pyx_PyCFunction_FastCall(PyObject *func_obj, PyObject **args, Py_ssize_t nargs) { - PyCFunctionObject *func = (PyCFunctionObject*)func_obj; - PyCFunction meth = PyCFunction_GET_FUNCTION(func); - PyObject *self = PyCFunction_GET_SELF(func); - int flags = PyCFunction_GET_FLAGS(func); - assert(PyCFunction_Check(func)); - assert(METH_FASTCALL == (flags & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS | METH_STACKLESS))); - assert(nargs >= 0); - assert(nargs == 0 || args != NULL); - /* _PyCFunction_FastCallDict() must not be called with an exception set, - because it may clear it (directly or indirectly) and so the - caller loses its exception */ - assert(!PyErr_Occurred()); - if ((PY_VERSION_HEX < 0x030700A0) || unlikely(flags & METH_KEYWORDS)) { - return (*((__Pyx_PyCFunctionFastWithKeywords)(void*)meth)) (self, args, nargs, NULL); - } else { - return (*((__Pyx_PyCFunctionFast)(void*)meth)) (self, args, nargs); - } -} -#endif - -/* PyObjectCallOneArg */ -#if CYTHON_COMPILING_IN_CPYTHON -static PyObject* __Pyx__PyObject_CallOneArg(PyObject *func, PyObject *arg) { - PyObject *result; - PyObject *args = PyTuple_New(1); - if (unlikely(!args)) return NULL; - Py_INCREF(arg); - PyTuple_SET_ITEM(args, 0, arg); - result = __Pyx_PyObject_Call(func, args, NULL); - Py_DECREF(args); - return result; -} -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { -#if CYTHON_FAST_PYCALL - if (PyFunction_Check(func)) { - return __Pyx_PyFunction_FastCall(func, &arg, 1); - } -#endif - if (likely(PyCFunction_Check(func))) { - if (likely(PyCFunction_GET_FLAGS(func) & METH_O)) { - return __Pyx_PyObject_CallMethO(func, arg); -#if CYTHON_FAST_PYCCALL - } else if (PyCFunction_GET_FLAGS(func) & METH_FASTCALL) { - return __Pyx_PyCFunction_FastCall(func, &arg, 1); -#endif - } - } - return __Pyx__PyObject_CallOneArg(func, arg); -} -#else -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { - PyObject *result; - PyObject *args = PyTuple_Pack(1, arg); - if (unlikely(!args)) return NULL; - result = __Pyx_PyObject_Call(func, args, NULL); - Py_DECREF(args); - return result; -} -#endif - -/* PyIntCompare */ -static CYTHON_INLINE PyObject* __Pyx_PyInt_EqObjC(PyObject *op1, PyObject *op2, CYTHON_UNUSED long intval, CYTHON_UNUSED long inplace) { - if (op1 == op2) { - Py_RETURN_TRUE; - } - #if PY_MAJOR_VERSION < 3 - if (likely(PyInt_CheckExact(op1))) { - const long b = intval; - long a = PyInt_AS_LONG(op1); - if (a == b) Py_RETURN_TRUE; else Py_RETURN_FALSE; - } - #endif - #if CYTHON_USE_PYLONG_INTERNALS - if (likely(PyLong_CheckExact(op1))) { - int unequal; - unsigned long uintval; - Py_ssize_t size = Py_SIZE(op1); - const digit* digits = ((PyLongObject*)op1)->ob_digit; - if (intval == 0) { - if (size == 0) Py_RETURN_TRUE; else Py_RETURN_FALSE; - } else if (intval < 0) { - if (size >= 0) - Py_RETURN_FALSE; - intval = -intval; - size = -size; - } else { - if (size <= 0) - Py_RETURN_FALSE; - } - uintval = (unsigned long) intval; -#if PyLong_SHIFT * 4 < SIZEOF_LONG*8 - if (uintval >> (PyLong_SHIFT * 4)) { - unequal = (size != 5) || (digits[0] != (uintval & (unsigned long) PyLong_MASK)) - | (digits[1] != ((uintval >> (1 * PyLong_SHIFT)) & (unsigned long) PyLong_MASK)) | (digits[2] != ((uintval >> (2 * PyLong_SHIFT)) & (unsigned long) PyLong_MASK)) | (digits[3] != ((uintval >> (3 * PyLong_SHIFT)) & (unsigned long) PyLong_MASK)) | (digits[4] != ((uintval >> (4 * PyLong_SHIFT)) & (unsigned long) PyLong_MASK)); - } else -#endif -#if PyLong_SHIFT * 3 < SIZEOF_LONG*8 - if (uintval >> (PyLong_SHIFT * 3)) { - unequal = (size != 4) || (digits[0] != (uintval & (unsigned long) PyLong_MASK)) - | (digits[1] != ((uintval >> (1 * PyLong_SHIFT)) & (unsigned long) PyLong_MASK)) | (digits[2] != ((uintval >> (2 * PyLong_SHIFT)) & (unsigned long) PyLong_MASK)) | (digits[3] != ((uintval >> (3 * PyLong_SHIFT)) & (unsigned long) PyLong_MASK)); - } else -#endif -#if PyLong_SHIFT * 2 < SIZEOF_LONG*8 - if (uintval >> (PyLong_SHIFT * 2)) { - unequal = (size != 3) || (digits[0] != (uintval & (unsigned long) PyLong_MASK)) - | (digits[1] != ((uintval >> (1 * PyLong_SHIFT)) & (unsigned long) PyLong_MASK)) | (digits[2] != ((uintval >> (2 * PyLong_SHIFT)) & (unsigned long) PyLong_MASK)); - } else -#endif -#if PyLong_SHIFT * 1 < SIZEOF_LONG*8 - if (uintval >> (PyLong_SHIFT * 1)) { - unequal = (size != 2) || (digits[0] != (uintval & (unsigned long) PyLong_MASK)) - | (digits[1] != ((uintval >> (1 * PyLong_SHIFT)) & (unsigned long) PyLong_MASK)); - } else -#endif - unequal = (size != 1) || (((unsigned long) digits[0]) != (uintval & (unsigned long) PyLong_MASK)); - if (unequal == 0) Py_RETURN_TRUE; else Py_RETURN_FALSE; - } - #endif - if (PyFloat_CheckExact(op1)) { - const long b = intval; - double a = PyFloat_AS_DOUBLE(op1); - if ((double)a == (double)b) Py_RETURN_TRUE; else Py_RETURN_FALSE; - } - return ( - PyObject_RichCompare(op1, op2, Py_EQ)); -} - -/* PyObjectCall2Args */ -static CYTHON_UNUSED PyObject* __Pyx_PyObject_Call2Args(PyObject* function, PyObject* arg1, PyObject* arg2) { - PyObject *args, *result = NULL; - #if CYTHON_FAST_PYCALL - if (PyFunction_Check(function)) { - PyObject *args[2] = {arg1, arg2}; - return __Pyx_PyFunction_FastCall(function, args, 2); - } - #endif - #if CYTHON_FAST_PYCCALL - if (__Pyx_PyFastCFunction_Check(function)) { - PyObject *args[2] = {arg1, arg2}; - return __Pyx_PyCFunction_FastCall(function, args, 2); - } - #endif - args = PyTuple_New(2); - if (unlikely(!args)) goto done; - Py_INCREF(arg1); - PyTuple_SET_ITEM(args, 0, arg1); - Py_INCREF(arg2); - PyTuple_SET_ITEM(args, 1, arg2); - Py_INCREF(function); - result = __Pyx_PyObject_Call(function, args, NULL); - Py_DECREF(args); - Py_DECREF(function); -done: - return result; -} - -/* PyObjectGetMethod */ -static int __Pyx_PyObject_GetMethod(PyObject *obj, PyObject *name, PyObject **method) { - PyObject *attr; -#if CYTHON_UNPACK_METHODS && CYTHON_COMPILING_IN_CPYTHON && CYTHON_USE_PYTYPE_LOOKUP - PyTypeObject *tp = Py_TYPE(obj); - PyObject *descr; - descrgetfunc f = NULL; - PyObject **dictptr, *dict; - int meth_found = 0; - assert (*method == NULL); - if (unlikely(tp->tp_getattro != PyObject_GenericGetAttr)) { - attr = __Pyx_PyObject_GetAttrStr(obj, name); - goto try_unpack; - } - if (unlikely(tp->tp_dict == NULL) && unlikely(PyType_Ready(tp) < 0)) { - return 0; - } - descr = _PyType_Lookup(tp, name); - if (likely(descr != NULL)) { - Py_INCREF(descr); -#if PY_MAJOR_VERSION >= 3 - #ifdef __Pyx_CyFunction_USED - if (likely(PyFunction_Check(descr) || (Py_TYPE(descr) == &PyMethodDescr_Type) || __Pyx_CyFunction_Check(descr))) - #else - if (likely(PyFunction_Check(descr) || (Py_TYPE(descr) == &PyMethodDescr_Type))) - #endif -#else - #ifdef __Pyx_CyFunction_USED - if (likely(PyFunction_Check(descr) || __Pyx_CyFunction_Check(descr))) - #else - if (likely(PyFunction_Check(descr))) - #endif -#endif - { - meth_found = 1; - } else { - f = Py_TYPE(descr)->tp_descr_get; - if (f != NULL && PyDescr_IsData(descr)) { - attr = f(descr, obj, (PyObject *)Py_TYPE(obj)); - Py_DECREF(descr); - goto try_unpack; - } - } - } - dictptr = _PyObject_GetDictPtr(obj); - if (dictptr != NULL && (dict = *dictptr) != NULL) { - Py_INCREF(dict); - attr = __Pyx_PyDict_GetItemStr(dict, name); - if (attr != NULL) { - Py_INCREF(attr); - Py_DECREF(dict); - Py_XDECREF(descr); - goto try_unpack; - } - Py_DECREF(dict); - } - if (meth_found) { - *method = descr; - return 1; - } - if (f != NULL) { - attr = f(descr, obj, (PyObject *)Py_TYPE(obj)); - Py_DECREF(descr); - goto try_unpack; - } - if (descr != NULL) { - *method = descr; - return 0; - } - PyErr_Format(PyExc_AttributeError, -#if PY_MAJOR_VERSION >= 3 - "'%.50s' object has no attribute '%U'", - tp->tp_name, name); -#else - "'%.50s' object has no attribute '%.400s'", - tp->tp_name, PyString_AS_STRING(name)); -#endif - return 0; -#else - attr = __Pyx_PyObject_GetAttrStr(obj, name); - goto try_unpack; -#endif -try_unpack: -#if CYTHON_UNPACK_METHODS - if (likely(attr) && PyMethod_Check(attr) && likely(PyMethod_GET_SELF(attr) == obj)) { - PyObject *function = PyMethod_GET_FUNCTION(attr); - Py_INCREF(function); - Py_DECREF(attr); - *method = function; - return 1; - } -#endif - *method = attr; - return 0; -} - -/* PyObjectCallMethod1 */ -static PyObject* __Pyx__PyObject_CallMethod1(PyObject* method, PyObject* arg) { - PyObject *result = __Pyx_PyObject_CallOneArg(method, arg); - Py_DECREF(method); - return result; -} -static PyObject* __Pyx_PyObject_CallMethod1(PyObject* obj, PyObject* method_name, PyObject* arg) { - PyObject *method = NULL, *result; - int is_method = __Pyx_PyObject_GetMethod(obj, method_name, &method); - if (likely(is_method)) { - result = __Pyx_PyObject_Call2Args(method, obj, arg); - Py_DECREF(method); - return result; - } - if (unlikely(!method)) return NULL; - return __Pyx__PyObject_CallMethod1(method, arg); -} - -/* pop_index */ -static PyObject* __Pyx__PyObject_PopNewIndex(PyObject* L, PyObject* py_ix) { - PyObject *r; - if (unlikely(!py_ix)) return NULL; - r = __Pyx__PyObject_PopIndex(L, py_ix); - Py_DECREF(py_ix); - return r; -} -static PyObject* __Pyx__PyObject_PopIndex(PyObject* L, PyObject* py_ix) { - return __Pyx_PyObject_CallMethod1(L, __pyx_n_s_pop, py_ix); -} -#if CYTHON_USE_PYLIST_INTERNALS && CYTHON_ASSUME_SAFE_MACROS -static PyObject* __Pyx__PyList_PopIndex(PyObject* L, PyObject* py_ix, Py_ssize_t ix) { - Py_ssize_t size = PyList_GET_SIZE(L); - if (likely(size > (((PyListObject*)L)->allocated >> 1))) { - Py_ssize_t cix = ix; - if (cix < 0) { - cix += size; - } - if (likely(__Pyx_is_valid_index(cix, size))) { - PyObject* v = PyList_GET_ITEM(L, cix); - __Pyx_SET_SIZE(L, Py_SIZE(L) - 1); - size -= 1; - memmove(&PyList_GET_ITEM(L, cix), &PyList_GET_ITEM(L, cix+1), (size_t)(size-cix)*sizeof(PyObject*)); - return v; - } - } - if (py_ix == Py_None) { - return __Pyx__PyObject_PopNewIndex(L, PyInt_FromSsize_t(ix)); - } else { - return __Pyx__PyObject_PopIndex(L, py_ix); - } -} -#endif - -/* PyErrExceptionMatches */ -#if CYTHON_FAST_THREAD_STATE -static int __Pyx_PyErr_ExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { - Py_ssize_t i, n; - n = PyTuple_GET_SIZE(tuple); -#if PY_MAJOR_VERSION >= 3 - for (i=0; icurexc_type; - if (exc_type == err) return 1; - if (unlikely(!exc_type)) return 0; - if (unlikely(PyTuple_Check(err))) - return __Pyx_PyErr_ExceptionMatchesTuple(exc_type, err); - return __Pyx_PyErr_GivenExceptionMatches(exc_type, err); -} -#endif - -/* GetAttr */ -static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *o, PyObject *n) { -#if CYTHON_USE_TYPE_SLOTS -#if PY_MAJOR_VERSION >= 3 - if (likely(PyUnicode_Check(n))) -#else - if (likely(PyString_Check(n))) -#endif - return __Pyx_PyObject_GetAttrStr(o, n); -#endif - return PyObject_GetAttr(o, n); -} - -/* GetAttr3 */ -static PyObject *__Pyx_GetAttr3Default(PyObject *d) { - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - if (unlikely(!__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError))) - return NULL; - __Pyx_PyErr_Clear(); - Py_INCREF(d); - return d; -} -static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *o, PyObject *n, PyObject *d) { - PyObject *r = __Pyx_GetAttr(o, n); - return (likely(r)) ? r : __Pyx_GetAttr3Default(d); -} - -/* PyDictVersioning */ -#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS -static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj) { - PyObject *dict = Py_TYPE(obj)->tp_dict; - return likely(dict) ? __PYX_GET_DICT_VERSION(dict) : 0; -} -static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj) { - PyObject **dictptr = NULL; - Py_ssize_t offset = Py_TYPE(obj)->tp_dictoffset; - if (offset) { -#if CYTHON_COMPILING_IN_CPYTHON - dictptr = (likely(offset > 0)) ? (PyObject **) ((char *)obj + offset) : _PyObject_GetDictPtr(obj); -#else - dictptr = _PyObject_GetDictPtr(obj); -#endif - } - return (dictptr && *dictptr) ? __PYX_GET_DICT_VERSION(*dictptr) : 0; -} -static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version) { - PyObject *dict = Py_TYPE(obj)->tp_dict; - if (unlikely(!dict) || unlikely(tp_dict_version != __PYX_GET_DICT_VERSION(dict))) - return 0; - return obj_dict_version == __Pyx_get_object_dict_version(obj); -} -#endif - -/* GetModuleGlobalName */ -#if CYTHON_USE_DICT_VERSIONS -static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value) -#else -static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name) -#endif -{ - PyObject *result; -#if !CYTHON_AVOID_BORROWED_REFS -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 - result = _PyDict_GetItem_KnownHash(__pyx_d, name, ((PyASCIIObject *) name)->hash); - __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) - if (likely(result)) { - return __Pyx_NewRef(result); - } else if (unlikely(PyErr_Occurred())) { - return NULL; - } -#else - result = PyDict_GetItem(__pyx_d, name); - __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) - if (likely(result)) { - return __Pyx_NewRef(result); - } -#endif -#else - result = PyObject_GetItem(__pyx_d, name); - __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) - if (likely(result)) { - return __Pyx_NewRef(result); - } - PyErr_Clear(); -#endif - return __Pyx_GetBuiltinName(name); -} - -/* Import */ -static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level) { - PyObject *empty_list = 0; - PyObject *module = 0; - PyObject *global_dict = 0; - PyObject *empty_dict = 0; - PyObject *list; - #if PY_MAJOR_VERSION < 3 - PyObject *py_import; - py_import = __Pyx_PyObject_GetAttrStr(__pyx_b, __pyx_n_s_import); - if (!py_import) - goto bad; - #endif - if (from_list) - list = from_list; - else { - empty_list = PyList_New(0); - if (!empty_list) - goto bad; - list = empty_list; - } - global_dict = PyModule_GetDict(__pyx_m); - if (!global_dict) - goto bad; - empty_dict = PyDict_New(); - if (!empty_dict) - goto bad; - { - #if PY_MAJOR_VERSION >= 3 - if (level == -1) { - if ((1) && (strchr(__Pyx_MODULE_NAME, '.'))) { - module = PyImport_ImportModuleLevelObject( - name, global_dict, empty_dict, list, 1); - if (!module) { - if (!PyErr_ExceptionMatches(PyExc_ImportError)) - goto bad; - PyErr_Clear(); - } - } - level = 0; - } - #endif - if (!module) { - #if PY_MAJOR_VERSION < 3 - PyObject *py_level = PyInt_FromLong(level); - if (!py_level) - goto bad; - module = PyObject_CallFunctionObjArgs(py_import, - name, global_dict, empty_dict, list, py_level, (PyObject *)NULL); - Py_DECREF(py_level); - #else - module = PyImport_ImportModuleLevelObject( - name, global_dict, empty_dict, list, level); - #endif - } - } -bad: - #if PY_MAJOR_VERSION < 3 - Py_XDECREF(py_import); - #endif - Py_XDECREF(empty_list); - Py_XDECREF(empty_dict); - return module; -} - -/* ImportFrom */ -static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name) { - PyObject* value = __Pyx_PyObject_GetAttrStr(module, name); - if (unlikely(!value) && PyErr_ExceptionMatches(PyExc_AttributeError)) { - PyErr_Format(PyExc_ImportError, - #if PY_MAJOR_VERSION < 3 - "cannot import name %.230s", PyString_AS_STRING(name)); - #else - "cannot import name %S", name); - #endif - } - return value; -} - -/* HasAttr */ -static CYTHON_INLINE int __Pyx_HasAttr(PyObject *o, PyObject *n) { - PyObject *r; - if (unlikely(!__Pyx_PyBaseString_Check(n))) { - PyErr_SetString(PyExc_TypeError, - "hasattr(): attribute name must be string"); - return -1; - } - r = __Pyx_GetAttr(o, n); - if (unlikely(!r)) { - PyErr_Clear(); - return 0; - } else { - Py_DECREF(r); - return 1; - } -} - -/* PyObject_GenericGetAttrNoDict */ -#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 -static PyObject *__Pyx_RaiseGenericGetAttributeError(PyTypeObject *tp, PyObject *attr_name) { - PyErr_Format(PyExc_AttributeError, -#if PY_MAJOR_VERSION >= 3 - "'%.50s' object has no attribute '%U'", - tp->tp_name, attr_name); -#else - "'%.50s' object has no attribute '%.400s'", - tp->tp_name, PyString_AS_STRING(attr_name)); -#endif - return NULL; -} -static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name) { - PyObject *descr; - PyTypeObject *tp = Py_TYPE(obj); - if (unlikely(!PyString_Check(attr_name))) { - return PyObject_GenericGetAttr(obj, attr_name); - } - assert(!tp->tp_dictoffset); - descr = _PyType_Lookup(tp, attr_name); - if (unlikely(!descr)) { - return __Pyx_RaiseGenericGetAttributeError(tp, attr_name); - } - Py_INCREF(descr); - #if PY_MAJOR_VERSION < 3 - if (likely(PyType_HasFeature(Py_TYPE(descr), Py_TPFLAGS_HAVE_CLASS))) - #endif - { - descrgetfunc f = Py_TYPE(descr)->tp_descr_get; - if (unlikely(f)) { - PyObject *res = f(descr, obj, (PyObject *)tp); - Py_DECREF(descr); - return res; - } - } - return descr; -} -#endif - -/* PyObject_GenericGetAttr */ -#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 -static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name) { - if (unlikely(Py_TYPE(obj)->tp_dictoffset)) { - return PyObject_GenericGetAttr(obj, attr_name); - } - return __Pyx_PyObject_GenericGetAttrNoDict(obj, attr_name); -} -#endif - -/* SetVTable */ -static int __Pyx_SetVtable(PyObject *dict, void *vtable) { -#if PY_VERSION_HEX >= 0x02070000 - PyObject *ob = PyCapsule_New(vtable, 0, 0); -#else - PyObject *ob = PyCObject_FromVoidPtr(vtable, 0); -#endif - if (!ob) - goto bad; - if (PyDict_SetItem(dict, __pyx_n_s_pyx_vtable, ob) < 0) - goto bad; - Py_DECREF(ob); - return 0; -bad: - Py_XDECREF(ob); - return -1; -} - -/* PyObjectGetAttrStrNoError */ -static void __Pyx_PyObject_GetAttrStr_ClearAttributeError(void) { - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - if (likely(__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError))) - __Pyx_PyErr_Clear(); -} -static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name) { - PyObject *result; -#if CYTHON_COMPILING_IN_CPYTHON && CYTHON_USE_TYPE_SLOTS && PY_VERSION_HEX >= 0x030700B1 - PyTypeObject* tp = Py_TYPE(obj); - if (likely(tp->tp_getattro == PyObject_GenericGetAttr)) { - return _PyObject_GenericGetAttrWithDict(obj, attr_name, NULL, 1); - } -#endif - result = __Pyx_PyObject_GetAttrStr(obj, attr_name); - if (unlikely(!result)) { - __Pyx_PyObject_GetAttrStr_ClearAttributeError(); - } - return result; -} - -/* SetupReduce */ -static int __Pyx_setup_reduce_is_named(PyObject* meth, PyObject* name) { - int ret; - PyObject *name_attr; - name_attr = __Pyx_PyObject_GetAttrStr(meth, __pyx_n_s_name); - if (likely(name_attr)) { - ret = PyObject_RichCompareBool(name_attr, name, Py_EQ); - } else { - ret = -1; - } - if (unlikely(ret < 0)) { - PyErr_Clear(); - ret = 0; - } - Py_XDECREF(name_attr); - return ret; -} -static int __Pyx_setup_reduce(PyObject* type_obj) { - int ret = 0; - PyObject *object_reduce = NULL; - PyObject *object_reduce_ex = NULL; - PyObject *reduce = NULL; - PyObject *reduce_ex = NULL; - PyObject *reduce_cython = NULL; - PyObject *setstate = NULL; - PyObject *setstate_cython = NULL; -#if CYTHON_USE_PYTYPE_LOOKUP - if (_PyType_Lookup((PyTypeObject*)type_obj, __pyx_n_s_getstate)) goto __PYX_GOOD; -#else - if (PyObject_HasAttr(type_obj, __pyx_n_s_getstate)) goto __PYX_GOOD; -#endif -#if CYTHON_USE_PYTYPE_LOOKUP - object_reduce_ex = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto __PYX_BAD; -#else - object_reduce_ex = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto __PYX_BAD; -#endif - reduce_ex = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce_ex); if (unlikely(!reduce_ex)) goto __PYX_BAD; - if (reduce_ex == object_reduce_ex) { -#if CYTHON_USE_PYTYPE_LOOKUP - object_reduce = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto __PYX_BAD; -#else - object_reduce = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto __PYX_BAD; -#endif - reduce = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce); if (unlikely(!reduce)) goto __PYX_BAD; - if (reduce == object_reduce || __Pyx_setup_reduce_is_named(reduce, __pyx_n_s_reduce_cython)) { - reduce_cython = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_reduce_cython); - if (likely(reduce_cython)) { - ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce, reduce_cython); if (unlikely(ret < 0)) goto __PYX_BAD; - ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce_cython); if (unlikely(ret < 0)) goto __PYX_BAD; - } else if (reduce == object_reduce || PyErr_Occurred()) { - goto __PYX_BAD; - } - setstate = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_setstate); - if (!setstate) PyErr_Clear(); - if (!setstate || __Pyx_setup_reduce_is_named(setstate, __pyx_n_s_setstate_cython)) { - setstate_cython = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_setstate_cython); - if (likely(setstate_cython)) { - ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate, setstate_cython); if (unlikely(ret < 0)) goto __PYX_BAD; - ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate_cython); if (unlikely(ret < 0)) goto __PYX_BAD; - } else if (!setstate || PyErr_Occurred()) { - goto __PYX_BAD; - } - } - PyType_Modified((PyTypeObject*)type_obj); - } - } - goto __PYX_GOOD; -__PYX_BAD: - if (!PyErr_Occurred()) - PyErr_Format(PyExc_RuntimeError, "Unable to initialize pickling for %s", ((PyTypeObject*)type_obj)->tp_name); - ret = -1; -__PYX_GOOD: -#if !CYTHON_USE_PYTYPE_LOOKUP - Py_XDECREF(object_reduce); - Py_XDECREF(object_reduce_ex); -#endif - Py_XDECREF(reduce); - Py_XDECREF(reduce_ex); - Py_XDECREF(reduce_cython); - Py_XDECREF(setstate); - Py_XDECREF(setstate_cython); - return ret; -} - -/* CLineInTraceback */ -#ifndef CYTHON_CLINE_IN_TRACEBACK -static int __Pyx_CLineForTraceback(CYTHON_NCP_UNUSED PyThreadState *tstate, int c_line) { - PyObject *use_cline; - PyObject *ptype, *pvalue, *ptraceback; -#if CYTHON_COMPILING_IN_CPYTHON - PyObject **cython_runtime_dict; -#endif - if (unlikely(!__pyx_cython_runtime)) { - return c_line; - } - __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); -#if CYTHON_COMPILING_IN_CPYTHON - cython_runtime_dict = _PyObject_GetDictPtr(__pyx_cython_runtime); - if (likely(cython_runtime_dict)) { - __PYX_PY_DICT_LOOKUP_IF_MODIFIED( - use_cline, *cython_runtime_dict, - __Pyx_PyDict_GetItemStr(*cython_runtime_dict, __pyx_n_s_cline_in_traceback)) - } else -#endif - { - PyObject *use_cline_obj = __Pyx_PyObject_GetAttrStr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback); - if (use_cline_obj) { - use_cline = PyObject_Not(use_cline_obj) ? Py_False : Py_True; - Py_DECREF(use_cline_obj); - } else { - PyErr_Clear(); - use_cline = NULL; - } - } - if (!use_cline) { - c_line = 0; - PyObject_SetAttr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback, Py_False); - } - else if (use_cline == Py_False || (use_cline != Py_True && PyObject_Not(use_cline) != 0)) { - c_line = 0; - } - __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); - return c_line; -} -#endif - -/* CodeObjectCache */ -static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) { - int start = 0, mid = 0, end = count - 1; - if (end >= 0 && code_line > entries[end].code_line) { - return count; - } - while (start < end) { - mid = start + (end - start) / 2; - if (code_line < entries[mid].code_line) { - end = mid; - } else if (code_line > entries[mid].code_line) { - start = mid + 1; - } else { - return mid; - } - } - if (code_line <= entries[mid].code_line) { - return mid; - } else { - return mid + 1; - } -} -static PyCodeObject *__pyx_find_code_object(int code_line) { - PyCodeObject* code_object; - int pos; - if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) { - return NULL; - } - pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); - if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) { - return NULL; - } - code_object = __pyx_code_cache.entries[pos].code_object; - Py_INCREF(code_object); - return code_object; -} -static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) { - int pos, i; - __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries; - if (unlikely(!code_line)) { - return; - } - if (unlikely(!entries)) { - entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry)); - if (likely(entries)) { - __pyx_code_cache.entries = entries; - __pyx_code_cache.max_count = 64; - __pyx_code_cache.count = 1; - entries[0].code_line = code_line; - entries[0].code_object = code_object; - Py_INCREF(code_object); - } - return; - } - pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); - if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) { - PyCodeObject* tmp = entries[pos].code_object; - entries[pos].code_object = code_object; - Py_DECREF(tmp); - return; - } - if (__pyx_code_cache.count == __pyx_code_cache.max_count) { - int new_max = __pyx_code_cache.max_count + 64; - entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc( - __pyx_code_cache.entries, ((size_t)new_max) * sizeof(__Pyx_CodeObjectCacheEntry)); - if (unlikely(!entries)) { - return; - } - __pyx_code_cache.entries = entries; - __pyx_code_cache.max_count = new_max; - } - for (i=__pyx_code_cache.count; i>pos; i--) { - entries[i] = entries[i-1]; - } - entries[pos].code_line = code_line; - entries[pos].code_object = code_object; - __pyx_code_cache.count++; - Py_INCREF(code_object); -} - -/* AddTraceback */ -#include "compile.h" -#include "frameobject.h" -#include "traceback.h" -static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( - const char *funcname, int c_line, - int py_line, const char *filename) { - PyCodeObject *py_code = 0; - PyObject *py_srcfile = 0; - PyObject *py_funcname = 0; - #if PY_MAJOR_VERSION < 3 - py_srcfile = PyString_FromString(filename); - #else - py_srcfile = PyUnicode_FromString(filename); - #endif - if (!py_srcfile) goto bad; - if (c_line) { - #if PY_MAJOR_VERSION < 3 - py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); - #else - py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); - #endif - } - else { - #if PY_MAJOR_VERSION < 3 - py_funcname = PyString_FromString(funcname); - #else - py_funcname = PyUnicode_FromString(funcname); - #endif - } - if (!py_funcname) goto bad; - py_code = __Pyx_PyCode_New( - 0, - 0, - 0, - 0, - 0, - __pyx_empty_bytes, /*PyObject *code,*/ - __pyx_empty_tuple, /*PyObject *consts,*/ - __pyx_empty_tuple, /*PyObject *names,*/ - __pyx_empty_tuple, /*PyObject *varnames,*/ - __pyx_empty_tuple, /*PyObject *freevars,*/ - __pyx_empty_tuple, /*PyObject *cellvars,*/ - py_srcfile, /*PyObject *filename,*/ - py_funcname, /*PyObject *name,*/ - py_line, - __pyx_empty_bytes /*PyObject *lnotab*/ - ); - Py_DECREF(py_srcfile); - Py_DECREF(py_funcname); - return py_code; -bad: - Py_XDECREF(py_srcfile); - Py_XDECREF(py_funcname); - return NULL; -} -static void __Pyx_AddTraceback(const char *funcname, int c_line, - int py_line, const char *filename) { - PyCodeObject *py_code = 0; - PyFrameObject *py_frame = 0; - PyThreadState *tstate = __Pyx_PyThreadState_Current; - if (c_line) { - c_line = __Pyx_CLineForTraceback(tstate, c_line); - } - py_code = __pyx_find_code_object(c_line ? -c_line : py_line); - if (!py_code) { - py_code = __Pyx_CreateCodeObjectForTraceback( - funcname, c_line, py_line, filename); - if (!py_code) goto bad; - __pyx_insert_code_object(c_line ? -c_line : py_line, py_code); - } - py_frame = PyFrame_New( - tstate, /*PyThreadState *tstate,*/ - py_code, /*PyCodeObject *code,*/ - __pyx_d, /*PyObject *globals,*/ - 0 /*PyObject *locals*/ - ); - if (!py_frame) goto bad; - __Pyx_PyFrame_SetLineNumber(py_frame, py_line); - PyTraceBack_Here(py_frame); -bad: - Py_XDECREF(py_code); - Py_XDECREF(py_frame); -} - -/* CIntToPy */ -static CYTHON_INLINE PyObject* __Pyx_PyInt_From_int(int value) { - const int neg_one = (int) ((int) 0 - (int) 1), const_zero = (int) 0; - const int is_unsigned = neg_one > const_zero; - if (is_unsigned) { - if (sizeof(int) < sizeof(long)) { - return PyInt_FromLong((long) value); - } else if (sizeof(int) <= sizeof(unsigned long)) { - return PyLong_FromUnsignedLong((unsigned long) value); -#ifdef HAVE_LONG_LONG - } else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) { - return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); -#endif - } - } else { - if (sizeof(int) <= sizeof(long)) { - return PyInt_FromLong((long) value); -#ifdef HAVE_LONG_LONG - } else if (sizeof(int) <= sizeof(PY_LONG_LONG)) { - return PyLong_FromLongLong((PY_LONG_LONG) value); -#endif - } - } - { - int one = 1; int little = (int)*(unsigned char *)&one; - unsigned char *bytes = (unsigned char *)&value; - return _PyLong_FromByteArray(bytes, sizeof(int), - little, !is_unsigned); - } -} - -/* CIntFromPyVerify */ -#define __PYX_VERIFY_RETURN_INT(target_type, func_type, func_value)\ - __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 0) -#define __PYX_VERIFY_RETURN_INT_EXC(target_type, func_type, func_value)\ - __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 1) -#define __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, exc)\ - {\ - func_type value = func_value;\ - if (sizeof(target_type) < sizeof(func_type)) {\ - if (unlikely(value != (func_type) (target_type) value)) {\ - func_type zero = 0;\ - if (exc && unlikely(value == (func_type)-1 && PyErr_Occurred()))\ - return (target_type) -1;\ - if (is_unsigned && unlikely(value < zero))\ - goto raise_neg_overflow;\ - else\ - goto raise_overflow;\ - }\ - }\ - return (target_type) value;\ - } - -/* CIntToPy */ -static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) { - const long neg_one = (long) ((long) 0 - (long) 1), const_zero = (long) 0; - const int is_unsigned = neg_one > const_zero; - if (is_unsigned) { - if (sizeof(long) < sizeof(long)) { - return PyInt_FromLong((long) value); - } else if (sizeof(long) <= sizeof(unsigned long)) { - return PyLong_FromUnsignedLong((unsigned long) value); -#ifdef HAVE_LONG_LONG - } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { - return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); -#endif - } - } else { - if (sizeof(long) <= sizeof(long)) { - return PyInt_FromLong((long) value); -#ifdef HAVE_LONG_LONG - } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { - return PyLong_FromLongLong((PY_LONG_LONG) value); -#endif - } - } - { - int one = 1; int little = (int)*(unsigned char *)&one; - unsigned char *bytes = (unsigned char *)&value; - return _PyLong_FromByteArray(bytes, sizeof(long), - little, !is_unsigned); - } -} - -/* CIntFromPy */ -static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) { - const long neg_one = (long) ((long) 0 - (long) 1), const_zero = (long) 0; - const int is_unsigned = neg_one > const_zero; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_Check(x))) { - if (sizeof(long) < sizeof(long)) { - __PYX_VERIFY_RETURN_INT(long, long, PyInt_AS_LONG(x)) - } else { - long val = PyInt_AS_LONG(x); - if (is_unsigned && unlikely(val < 0)) { - goto raise_neg_overflow; - } - return (long) val; - } - } else -#endif - if (likely(PyLong_Check(x))) { - if (is_unsigned) { -#if CYTHON_USE_PYLONG_INTERNALS - const digit* digits = ((PyLongObject*)x)->ob_digit; - switch (Py_SIZE(x)) { - case 0: return (long) 0; - case 1: __PYX_VERIFY_RETURN_INT(long, digit, digits[0]) - case 2: - if (8 * sizeof(long) > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) >= 2 * PyLong_SHIFT) { - return (long) (((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); - } - } - break; - case 3: - if (8 * sizeof(long) > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) >= 3 * PyLong_SHIFT) { - return (long) (((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); - } - } - break; - case 4: - if (8 * sizeof(long) > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) >= 4 * PyLong_SHIFT) { - return (long) (((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); - } - } - break; - } -#endif -#if CYTHON_COMPILING_IN_CPYTHON - if (unlikely(Py_SIZE(x) < 0)) { - goto raise_neg_overflow; - } -#else - { - int result = PyObject_RichCompareBool(x, Py_False, Py_LT); - if (unlikely(result < 0)) - return (long) -1; - if (unlikely(result == 1)) - goto raise_neg_overflow; - } -#endif - if (sizeof(long) <= sizeof(unsigned long)) { - __PYX_VERIFY_RETURN_INT_EXC(long, unsigned long, PyLong_AsUnsignedLong(x)) -#ifdef HAVE_LONG_LONG - } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { - __PYX_VERIFY_RETURN_INT_EXC(long, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) -#endif - } - } else { -#if CYTHON_USE_PYLONG_INTERNALS - const digit* digits = ((PyLongObject*)x)->ob_digit; - switch (Py_SIZE(x)) { - case 0: return (long) 0; - case -1: __PYX_VERIFY_RETURN_INT(long, sdigit, (sdigit) (-(sdigit)digits[0])) - case 1: __PYX_VERIFY_RETURN_INT(long, digit, +digits[0]) - case -2: - if (8 * sizeof(long) - 1 > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { - return (long) (((long)-1)*(((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case 2: - if (8 * sizeof(long) > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { - return (long) ((((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case -3: - if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { - return (long) (((long)-1)*(((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case 3: - if (8 * sizeof(long) > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { - return (long) ((((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case -4: - if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { - return (long) (((long)-1)*(((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case 4: - if (8 * sizeof(long) > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { - return (long) ((((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - } -#endif - if (sizeof(long) <= sizeof(long)) { - __PYX_VERIFY_RETURN_INT_EXC(long, long, PyLong_AsLong(x)) -#ifdef HAVE_LONG_LONG - } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { - __PYX_VERIFY_RETURN_INT_EXC(long, PY_LONG_LONG, PyLong_AsLongLong(x)) -#endif - } - } - { -#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) - PyErr_SetString(PyExc_RuntimeError, - "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); -#else - long val; - PyObject *v = __Pyx_PyNumber_IntOrLong(x); - #if PY_MAJOR_VERSION < 3 - if (likely(v) && !PyLong_Check(v)) { - PyObject *tmp = v; - v = PyNumber_Long(tmp); - Py_DECREF(tmp); - } - #endif - if (likely(v)) { - int one = 1; int is_little = (int)*(unsigned char *)&one; - unsigned char *bytes = (unsigned char *)&val; - int ret = _PyLong_AsByteArray((PyLongObject *)v, - bytes, sizeof(val), - is_little, !is_unsigned); - Py_DECREF(v); - if (likely(!ret)) - return val; - } -#endif - return (long) -1; - } - } else { - long val; - PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); - if (!tmp) return (long) -1; - val = __Pyx_PyInt_As_long(tmp); - Py_DECREF(tmp); - return val; - } -raise_overflow: - PyErr_SetString(PyExc_OverflowError, - "value too large to convert to long"); - return (long) -1; -raise_neg_overflow: - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to long"); - return (long) -1; -} - -/* CIntFromPy */ -static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *x) { - const int neg_one = (int) ((int) 0 - (int) 1), const_zero = (int) 0; - const int is_unsigned = neg_one > const_zero; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_Check(x))) { - if (sizeof(int) < sizeof(long)) { - __PYX_VERIFY_RETURN_INT(int, long, PyInt_AS_LONG(x)) - } else { - long val = PyInt_AS_LONG(x); - if (is_unsigned && unlikely(val < 0)) { - goto raise_neg_overflow; - } - return (int) val; - } - } else -#endif - if (likely(PyLong_Check(x))) { - if (is_unsigned) { -#if CYTHON_USE_PYLONG_INTERNALS - const digit* digits = ((PyLongObject*)x)->ob_digit; - switch (Py_SIZE(x)) { - case 0: return (int) 0; - case 1: __PYX_VERIFY_RETURN_INT(int, digit, digits[0]) - case 2: - if (8 * sizeof(int) > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) >= 2 * PyLong_SHIFT) { - return (int) (((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); - } - } - break; - case 3: - if (8 * sizeof(int) > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) >= 3 * PyLong_SHIFT) { - return (int) (((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); - } - } - break; - case 4: - if (8 * sizeof(int) > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) >= 4 * PyLong_SHIFT) { - return (int) (((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); - } - } - break; - } -#endif -#if CYTHON_COMPILING_IN_CPYTHON - if (unlikely(Py_SIZE(x) < 0)) { - goto raise_neg_overflow; - } -#else - { - int result = PyObject_RichCompareBool(x, Py_False, Py_LT); - if (unlikely(result < 0)) - return (int) -1; - if (unlikely(result == 1)) - goto raise_neg_overflow; - } -#endif - if (sizeof(int) <= sizeof(unsigned long)) { - __PYX_VERIFY_RETURN_INT_EXC(int, unsigned long, PyLong_AsUnsignedLong(x)) -#ifdef HAVE_LONG_LONG - } else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) { - __PYX_VERIFY_RETURN_INT_EXC(int, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) -#endif - } - } else { -#if CYTHON_USE_PYLONG_INTERNALS - const digit* digits = ((PyLongObject*)x)->ob_digit; - switch (Py_SIZE(x)) { - case 0: return (int) 0; - case -1: __PYX_VERIFY_RETURN_INT(int, sdigit, (sdigit) (-(sdigit)digits[0])) - case 1: __PYX_VERIFY_RETURN_INT(int, digit, +digits[0]) - case -2: - if (8 * sizeof(int) - 1 > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { - return (int) (((int)-1)*(((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case 2: - if (8 * sizeof(int) > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { - return (int) ((((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case -3: - if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { - return (int) (((int)-1)*(((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case 3: - if (8 * sizeof(int) > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { - return (int) ((((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case -4: - if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { - return (int) (((int)-1)*(((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case 4: - if (8 * sizeof(int) > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { - return (int) ((((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - } -#endif - if (sizeof(int) <= sizeof(long)) { - __PYX_VERIFY_RETURN_INT_EXC(int, long, PyLong_AsLong(x)) -#ifdef HAVE_LONG_LONG - } else if (sizeof(int) <= sizeof(PY_LONG_LONG)) { - __PYX_VERIFY_RETURN_INT_EXC(int, PY_LONG_LONG, PyLong_AsLongLong(x)) -#endif - } - } - { -#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) - PyErr_SetString(PyExc_RuntimeError, - "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); -#else - int val; - PyObject *v = __Pyx_PyNumber_IntOrLong(x); - #if PY_MAJOR_VERSION < 3 - if (likely(v) && !PyLong_Check(v)) { - PyObject *tmp = v; - v = PyNumber_Long(tmp); - Py_DECREF(tmp); - } - #endif - if (likely(v)) { - int one = 1; int is_little = (int)*(unsigned char *)&one; - unsigned char *bytes = (unsigned char *)&val; - int ret = _PyLong_AsByteArray((PyLongObject *)v, - bytes, sizeof(val), - is_little, !is_unsigned); - Py_DECREF(v); - if (likely(!ret)) - return val; - } -#endif - return (int) -1; - } - } else { - int val; - PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); - if (!tmp) return (int) -1; - val = __Pyx_PyInt_As_int(tmp); - Py_DECREF(tmp); - return val; - } -raise_overflow: - PyErr_SetString(PyExc_OverflowError, - "value too large to convert to int"); - return (int) -1; -raise_neg_overflow: - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to int"); - return (int) -1; -} - -/* FastTypeChecks */ -#if CYTHON_COMPILING_IN_CPYTHON -static int __Pyx_InBases(PyTypeObject *a, PyTypeObject *b) { - while (a) { - a = a->tp_base; - if (a == b) - return 1; - } - return b == &PyBaseObject_Type; -} -static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b) { - PyObject *mro; - if (a == b) return 1; - mro = a->tp_mro; - if (likely(mro)) { - Py_ssize_t i, n; - n = PyTuple_GET_SIZE(mro); - for (i = 0; i < n; i++) { - if (PyTuple_GET_ITEM(mro, i) == (PyObject *)b) - return 1; - } - return 0; - } - return __Pyx_InBases(a, b); -} -#if PY_MAJOR_VERSION == 2 -static int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject* exc_type2) { - PyObject *exception, *value, *tb; - int res; - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - __Pyx_ErrFetch(&exception, &value, &tb); - res = exc_type1 ? PyObject_IsSubclass(err, exc_type1) : 0; - if (unlikely(res == -1)) { - PyErr_WriteUnraisable(err); - res = 0; - } - if (!res) { - res = PyObject_IsSubclass(err, exc_type2); - if (unlikely(res == -1)) { - PyErr_WriteUnraisable(err); - res = 0; - } - } - __Pyx_ErrRestore(exception, value, tb); - return res; -} -#else -static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject *exc_type2) { - int res = exc_type1 ? __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type1) : 0; - if (!res) { - res = __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type2); - } - return res; -} -#endif -static int __Pyx_PyErr_GivenExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { - Py_ssize_t i, n; - assert(PyExceptionClass_Check(exc_type)); - n = PyTuple_GET_SIZE(tuple); -#if PY_MAJOR_VERSION >= 3 - for (i=0; ip) { - #if PY_MAJOR_VERSION < 3 - if (t->is_unicode) { - *t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL); - } else if (t->intern) { - *t->p = PyString_InternFromString(t->s); - } else { - *t->p = PyString_FromStringAndSize(t->s, t->n - 1); - } - #else - if (t->is_unicode | t->is_str) { - if (t->intern) { - *t->p = PyUnicode_InternFromString(t->s); - } else if (t->encoding) { - *t->p = PyUnicode_Decode(t->s, t->n - 1, t->encoding, NULL); - } else { - *t->p = PyUnicode_FromStringAndSize(t->s, t->n - 1); - } - } else { - *t->p = PyBytes_FromStringAndSize(t->s, t->n - 1); - } - #endif - if (!*t->p) - return -1; - if (PyObject_Hash(*t->p) == -1) - return -1; - ++t; - } - return 0; -} - -static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char* c_str) { - return __Pyx_PyUnicode_FromStringAndSize(c_str, (Py_ssize_t)strlen(c_str)); -} -static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject* o) { - Py_ssize_t ignore; - return __Pyx_PyObject_AsStringAndSize(o, &ignore); -} -#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT -#if !CYTHON_PEP393_ENABLED -static const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { - char* defenc_c; - PyObject* defenc = _PyUnicode_AsDefaultEncodedString(o, NULL); - if (!defenc) return NULL; - defenc_c = PyBytes_AS_STRING(defenc); -#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII - { - char* end = defenc_c + PyBytes_GET_SIZE(defenc); - char* c; - for (c = defenc_c; c < end; c++) { - if ((unsigned char) (*c) >= 128) { - PyUnicode_AsASCIIString(o); - return NULL; - } - } - } -#endif - *length = PyBytes_GET_SIZE(defenc); - return defenc_c; -} -#else -static CYTHON_INLINE const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { - if (unlikely(__Pyx_PyUnicode_READY(o) == -1)) return NULL; -#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII - if (likely(PyUnicode_IS_ASCII(o))) { - *length = PyUnicode_GET_LENGTH(o); - return PyUnicode_AsUTF8(o); - } else { - PyUnicode_AsASCIIString(o); - return NULL; - } -#else - return PyUnicode_AsUTF8AndSize(o, length); -#endif -} -#endif -#endif -static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) { -#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT - if ( -#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII - __Pyx_sys_getdefaultencoding_not_ascii && -#endif - PyUnicode_Check(o)) { - return __Pyx_PyUnicode_AsStringAndSize(o, length); - } else -#endif -#if (!CYTHON_COMPILING_IN_PYPY) || (defined(PyByteArray_AS_STRING) && defined(PyByteArray_GET_SIZE)) - if (PyByteArray_Check(o)) { - *length = PyByteArray_GET_SIZE(o); - return PyByteArray_AS_STRING(o); - } else -#endif - { - char* result; - int r = PyBytes_AsStringAndSize(o, &result, length); - if (unlikely(r < 0)) { - return NULL; - } else { - return result; - } - } -} -static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) { - int is_true = x == Py_True; - if (is_true | (x == Py_False) | (x == Py_None)) return is_true; - else return PyObject_IsTrue(x); -} -static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject* x) { - int retval; - if (unlikely(!x)) return -1; - retval = __Pyx_PyObject_IsTrue(x); - Py_DECREF(x); - return retval; -} -static PyObject* __Pyx_PyNumber_IntOrLongWrongResultType(PyObject* result, const char* type_name) { -#if PY_MAJOR_VERSION >= 3 - if (PyLong_Check(result)) { - if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1, - "__int__ returned non-int (type %.200s). " - "The ability to return an instance of a strict subclass of int " - "is deprecated, and may be removed in a future version of Python.", - Py_TYPE(result)->tp_name)) { - Py_DECREF(result); - return NULL; - } - return result; - } -#endif - PyErr_Format(PyExc_TypeError, - "__%.4s__ returned non-%.4s (type %.200s)", - type_name, type_name, Py_TYPE(result)->tp_name); - Py_DECREF(result); - return NULL; -} -static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x) { -#if CYTHON_USE_TYPE_SLOTS - PyNumberMethods *m; -#endif - const char *name = NULL; - PyObject *res = NULL; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_Check(x) || PyLong_Check(x))) -#else - if (likely(PyLong_Check(x))) -#endif - return __Pyx_NewRef(x); -#if CYTHON_USE_TYPE_SLOTS - m = Py_TYPE(x)->tp_as_number; - #if PY_MAJOR_VERSION < 3 - if (m && m->nb_int) { - name = "int"; - res = m->nb_int(x); - } - else if (m && m->nb_long) { - name = "long"; - res = m->nb_long(x); - } - #else - if (likely(m && m->nb_int)) { - name = "int"; - res = m->nb_int(x); - } - #endif -#else - if (!PyBytes_CheckExact(x) && !PyUnicode_CheckExact(x)) { - res = PyNumber_Int(x); - } -#endif - if (likely(res)) { -#if PY_MAJOR_VERSION < 3 - if (unlikely(!PyInt_Check(res) && !PyLong_Check(res))) { -#else - if (unlikely(!PyLong_CheckExact(res))) { -#endif - return __Pyx_PyNumber_IntOrLongWrongResultType(res, name); - } - } - else if (!PyErr_Occurred()) { - PyErr_SetString(PyExc_TypeError, - "an integer is required"); - } - return res; -} -static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { - Py_ssize_t ival; - PyObject *x; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_CheckExact(b))) { - if (sizeof(Py_ssize_t) >= sizeof(long)) - return PyInt_AS_LONG(b); - else - return PyInt_AsSsize_t(b); - } -#endif - if (likely(PyLong_CheckExact(b))) { - #if CYTHON_USE_PYLONG_INTERNALS - const digit* digits = ((PyLongObject*)b)->ob_digit; - const Py_ssize_t size = Py_SIZE(b); - if (likely(__Pyx_sst_abs(size) <= 1)) { - ival = likely(size) ? digits[0] : 0; - if (size == -1) ival = -ival; - return ival; - } else { - switch (size) { - case 2: - if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { - return (Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case -2: - if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { - return -(Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case 3: - if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { - return (Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case -3: - if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { - return -(Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case 4: - if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { - return (Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case -4: - if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { - return -(Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - } - } - #endif - return PyLong_AsSsize_t(b); - } - x = PyNumber_Index(b); - if (!x) return -1; - ival = PyInt_AsSsize_t(x); - Py_DECREF(x); - return ival; -} -static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b) { - return b ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False); -} -static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { - return PyInt_FromSize_t(ival); -} - - -#endif /* Py_PYTHON_H */ diff --git a/third_party/python/aiohttp/aiohttp/_frozenlist.pyx b/third_party/python/aiohttp/aiohttp/_frozenlist.pyx deleted file mode 100644 index b1305772f4b7..000000000000 --- a/third_party/python/aiohttp/aiohttp/_frozenlist.pyx +++ /dev/null @@ -1,108 +0,0 @@ -from collections.abc import MutableSequence - - -cdef class FrozenList: - - cdef readonly bint frozen - cdef list _items - - def __init__(self, items=None): - self.frozen = False - if items is not None: - items = list(items) - else: - items = [] - self._items = items - - cdef object _check_frozen(self): - if self.frozen: - raise RuntimeError("Cannot modify frozen list.") - - cdef inline object _fast_len(self): - return len(self._items) - - def freeze(self): - self.frozen = True - - def __getitem__(self, index): - return self._items[index] - - def __setitem__(self, index, value): - self._check_frozen() - self._items[index] = value - - def __delitem__(self, index): - self._check_frozen() - del self._items[index] - - def __len__(self): - return self._fast_len() - - def __iter__(self): - return self._items.__iter__() - - def __reversed__(self): - return self._items.__reversed__() - - def __richcmp__(self, other, op): - if op == 0: # < - return list(self) < other - if op == 1: # <= - return list(self) <= other - if op == 2: # == - return list(self) == other - if op == 3: # != - return list(self) != other - if op == 4: # > - return list(self) > other - if op == 5: # => - return list(self) >= other - - def insert(self, pos, item): - self._check_frozen() - self._items.insert(pos, item) - - def __contains__(self, item): - return item in self._items - - def __iadd__(self, items): - self._check_frozen() - self._items += list(items) - return self - - def index(self, item): - return self._items.index(item) - - def remove(self, item): - self._check_frozen() - self._items.remove(item) - - def clear(self): - self._check_frozen() - self._items.clear() - - def extend(self, items): - self._check_frozen() - self._items += list(items) - - def reverse(self): - self._check_frozen() - self._items.reverse() - - def pop(self, index=-1): - self._check_frozen() - return self._items.pop(index) - - def append(self, item): - self._check_frozen() - return self._items.append(item) - - def count(self, item): - return self._items.count(item) - - def __repr__(self): - return ''.format(self.frozen, - self._items) - - -MutableSequence.register(FrozenList) diff --git a/third_party/python/aiohttp/aiohttp/_headers.pxi b/third_party/python/aiohttp/aiohttp/_headers.pxi deleted file mode 100644 index 3744721d4786..000000000000 --- a/third_party/python/aiohttp/aiohttp/_headers.pxi +++ /dev/null @@ -1,83 +0,0 @@ -# The file is autogenerated from aiohttp/hdrs.py -# Run ./tools/gen.py to update it after the origin changing. - -from . import hdrs -cdef tuple headers = ( - hdrs.ACCEPT, - hdrs.ACCEPT_CHARSET, - hdrs.ACCEPT_ENCODING, - hdrs.ACCEPT_LANGUAGE, - hdrs.ACCEPT_RANGES, - hdrs.ACCESS_CONTROL_ALLOW_CREDENTIALS, - hdrs.ACCESS_CONTROL_ALLOW_HEADERS, - hdrs.ACCESS_CONTROL_ALLOW_METHODS, - hdrs.ACCESS_CONTROL_ALLOW_ORIGIN, - hdrs.ACCESS_CONTROL_EXPOSE_HEADERS, - hdrs.ACCESS_CONTROL_MAX_AGE, - hdrs.ACCESS_CONTROL_REQUEST_HEADERS, - hdrs.ACCESS_CONTROL_REQUEST_METHOD, - hdrs.AGE, - hdrs.ALLOW, - hdrs.AUTHORIZATION, - hdrs.CACHE_CONTROL, - hdrs.CONNECTION, - hdrs.CONTENT_DISPOSITION, - hdrs.CONTENT_ENCODING, - hdrs.CONTENT_LANGUAGE, - hdrs.CONTENT_LENGTH, - hdrs.CONTENT_LOCATION, - hdrs.CONTENT_MD5, - hdrs.CONTENT_RANGE, - hdrs.CONTENT_TRANSFER_ENCODING, - hdrs.CONTENT_TYPE, - hdrs.COOKIE, - hdrs.DATE, - hdrs.DESTINATION, - hdrs.DIGEST, - hdrs.ETAG, - hdrs.EXPECT, - hdrs.EXPIRES, - hdrs.FORWARDED, - hdrs.FROM, - hdrs.HOST, - hdrs.IF_MATCH, - hdrs.IF_MODIFIED_SINCE, - hdrs.IF_NONE_MATCH, - hdrs.IF_RANGE, - hdrs.IF_UNMODIFIED_SINCE, - hdrs.KEEP_ALIVE, - hdrs.LAST_EVENT_ID, - hdrs.LAST_MODIFIED, - hdrs.LINK, - hdrs.LOCATION, - hdrs.MAX_FORWARDS, - hdrs.ORIGIN, - hdrs.PRAGMA, - hdrs.PROXY_AUTHENTICATE, - hdrs.PROXY_AUTHORIZATION, - hdrs.RANGE, - hdrs.REFERER, - hdrs.RETRY_AFTER, - hdrs.SEC_WEBSOCKET_ACCEPT, - hdrs.SEC_WEBSOCKET_EXTENSIONS, - hdrs.SEC_WEBSOCKET_KEY, - hdrs.SEC_WEBSOCKET_KEY1, - hdrs.SEC_WEBSOCKET_PROTOCOL, - hdrs.SEC_WEBSOCKET_VERSION, - hdrs.SERVER, - hdrs.SET_COOKIE, - hdrs.TE, - hdrs.TRAILER, - hdrs.TRANSFER_ENCODING, - hdrs.URI, - hdrs.UPGRADE, - hdrs.USER_AGENT, - hdrs.VARY, - hdrs.VIA, - hdrs.WWW_AUTHENTICATE, - hdrs.WANT_DIGEST, - hdrs.WARNING, - hdrs.X_FORWARDED_FOR, - hdrs.X_FORWARDED_HOST, - hdrs.X_FORWARDED_PROTO, -) diff --git a/third_party/python/aiohttp/aiohttp/_helpers.c b/third_party/python/aiohttp/aiohttp/_helpers.c deleted file mode 100644 index 764f998447b4..000000000000 --- a/third_party/python/aiohttp/aiohttp/_helpers.c +++ /dev/null @@ -1,5433 +0,0 @@ -/* Generated by Cython 0.29.21 */ - -#define PY_SSIZE_T_CLEAN -#include "Python.h" -#ifndef Py_PYTHON_H - #error Python headers needed to compile C extensions, please install development version of Python. -#elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000) - #error Cython requires Python 2.6+ or Python 3.3+. -#else -#define CYTHON_ABI "0_29_21" -#define CYTHON_HEX_VERSION 0x001D15F0 -#define CYTHON_FUTURE_DIVISION 1 -#include -#ifndef offsetof - #define offsetof(type, member) ( (size_t) & ((type*)0) -> member ) -#endif -#if !defined(WIN32) && !defined(MS_WINDOWS) - #ifndef __stdcall - #define __stdcall - #endif - #ifndef __cdecl - #define __cdecl - #endif - #ifndef __fastcall - #define __fastcall - #endif -#endif -#ifndef DL_IMPORT - #define DL_IMPORT(t) t -#endif -#ifndef DL_EXPORT - #define DL_EXPORT(t) t -#endif -#define __PYX_COMMA , -#ifndef HAVE_LONG_LONG - #if PY_VERSION_HEX >= 0x02070000 - #define HAVE_LONG_LONG - #endif -#endif -#ifndef PY_LONG_LONG - #define PY_LONG_LONG LONG_LONG -#endif -#ifndef Py_HUGE_VAL - #define Py_HUGE_VAL HUGE_VAL -#endif -#ifdef PYPY_VERSION - #define CYTHON_COMPILING_IN_PYPY 1 - #define CYTHON_COMPILING_IN_PYSTON 0 - #define CYTHON_COMPILING_IN_CPYTHON 0 - #undef CYTHON_USE_TYPE_SLOTS - #define CYTHON_USE_TYPE_SLOTS 0 - #undef CYTHON_USE_PYTYPE_LOOKUP - #define CYTHON_USE_PYTYPE_LOOKUP 0 - #if PY_VERSION_HEX < 0x03050000 - #undef CYTHON_USE_ASYNC_SLOTS - #define CYTHON_USE_ASYNC_SLOTS 0 - #elif !defined(CYTHON_USE_ASYNC_SLOTS) - #define CYTHON_USE_ASYNC_SLOTS 1 - #endif - #undef CYTHON_USE_PYLIST_INTERNALS - #define CYTHON_USE_PYLIST_INTERNALS 0 - #undef CYTHON_USE_UNICODE_INTERNALS - #define CYTHON_USE_UNICODE_INTERNALS 0 - #undef CYTHON_USE_UNICODE_WRITER - #define CYTHON_USE_UNICODE_WRITER 0 - #undef CYTHON_USE_PYLONG_INTERNALS - #define CYTHON_USE_PYLONG_INTERNALS 0 - #undef CYTHON_AVOID_BORROWED_REFS - #define CYTHON_AVOID_BORROWED_REFS 1 - #undef CYTHON_ASSUME_SAFE_MACROS - #define CYTHON_ASSUME_SAFE_MACROS 0 - #undef CYTHON_UNPACK_METHODS - #define CYTHON_UNPACK_METHODS 0 - #undef CYTHON_FAST_THREAD_STATE - #define CYTHON_FAST_THREAD_STATE 0 - #undef CYTHON_FAST_PYCALL - #define CYTHON_FAST_PYCALL 0 - #undef CYTHON_PEP489_MULTI_PHASE_INIT - #define CYTHON_PEP489_MULTI_PHASE_INIT 0 - #undef CYTHON_USE_TP_FINALIZE - #define CYTHON_USE_TP_FINALIZE 0 - #undef CYTHON_USE_DICT_VERSIONS - #define CYTHON_USE_DICT_VERSIONS 0 - #undef CYTHON_USE_EXC_INFO_STACK - #define CYTHON_USE_EXC_INFO_STACK 0 -#elif defined(PYSTON_VERSION) - #define CYTHON_COMPILING_IN_PYPY 0 - #define CYTHON_COMPILING_IN_PYSTON 1 - #define CYTHON_COMPILING_IN_CPYTHON 0 - #ifndef CYTHON_USE_TYPE_SLOTS - #define CYTHON_USE_TYPE_SLOTS 1 - #endif - #undef CYTHON_USE_PYTYPE_LOOKUP - #define CYTHON_USE_PYTYPE_LOOKUP 0 - #undef CYTHON_USE_ASYNC_SLOTS - #define CYTHON_USE_ASYNC_SLOTS 0 - #undef CYTHON_USE_PYLIST_INTERNALS - #define CYTHON_USE_PYLIST_INTERNALS 0 - #ifndef CYTHON_USE_UNICODE_INTERNALS - #define CYTHON_USE_UNICODE_INTERNALS 1 - #endif - #undef CYTHON_USE_UNICODE_WRITER - #define CYTHON_USE_UNICODE_WRITER 0 - #undef CYTHON_USE_PYLONG_INTERNALS - #define CYTHON_USE_PYLONG_INTERNALS 0 - #ifndef CYTHON_AVOID_BORROWED_REFS - #define CYTHON_AVOID_BORROWED_REFS 0 - #endif - #ifndef CYTHON_ASSUME_SAFE_MACROS - #define CYTHON_ASSUME_SAFE_MACROS 1 - #endif - #ifndef CYTHON_UNPACK_METHODS - #define CYTHON_UNPACK_METHODS 1 - #endif - #undef CYTHON_FAST_THREAD_STATE - #define CYTHON_FAST_THREAD_STATE 0 - #undef CYTHON_FAST_PYCALL - #define CYTHON_FAST_PYCALL 0 - #undef CYTHON_PEP489_MULTI_PHASE_INIT - #define CYTHON_PEP489_MULTI_PHASE_INIT 0 - #undef CYTHON_USE_TP_FINALIZE - #define CYTHON_USE_TP_FINALIZE 0 - #undef CYTHON_USE_DICT_VERSIONS - #define CYTHON_USE_DICT_VERSIONS 0 - #undef CYTHON_USE_EXC_INFO_STACK - #define CYTHON_USE_EXC_INFO_STACK 0 -#else - #define CYTHON_COMPILING_IN_PYPY 0 - #define CYTHON_COMPILING_IN_PYSTON 0 - #define CYTHON_COMPILING_IN_CPYTHON 1 - #ifndef CYTHON_USE_TYPE_SLOTS - #define CYTHON_USE_TYPE_SLOTS 1 - #endif - #if PY_VERSION_HEX < 0x02070000 - #undef CYTHON_USE_PYTYPE_LOOKUP - #define CYTHON_USE_PYTYPE_LOOKUP 0 - #elif !defined(CYTHON_USE_PYTYPE_LOOKUP) - #define CYTHON_USE_PYTYPE_LOOKUP 1 - #endif - #if PY_MAJOR_VERSION < 3 - #undef CYTHON_USE_ASYNC_SLOTS - #define CYTHON_USE_ASYNC_SLOTS 0 - #elif !defined(CYTHON_USE_ASYNC_SLOTS) - #define CYTHON_USE_ASYNC_SLOTS 1 - #endif - #if PY_VERSION_HEX < 0x02070000 - #undef CYTHON_USE_PYLONG_INTERNALS - #define CYTHON_USE_PYLONG_INTERNALS 0 - #elif !defined(CYTHON_USE_PYLONG_INTERNALS) - #define CYTHON_USE_PYLONG_INTERNALS 1 - #endif - #ifndef CYTHON_USE_PYLIST_INTERNALS - #define CYTHON_USE_PYLIST_INTERNALS 1 - #endif - #ifndef CYTHON_USE_UNICODE_INTERNALS - #define CYTHON_USE_UNICODE_INTERNALS 1 - #endif - #if PY_VERSION_HEX < 0x030300F0 - #undef CYTHON_USE_UNICODE_WRITER - #define CYTHON_USE_UNICODE_WRITER 0 - #elif !defined(CYTHON_USE_UNICODE_WRITER) - #define CYTHON_USE_UNICODE_WRITER 1 - #endif - #ifndef CYTHON_AVOID_BORROWED_REFS - #define CYTHON_AVOID_BORROWED_REFS 0 - #endif - #ifndef CYTHON_ASSUME_SAFE_MACROS - #define CYTHON_ASSUME_SAFE_MACROS 1 - #endif - #ifndef CYTHON_UNPACK_METHODS - #define CYTHON_UNPACK_METHODS 1 - #endif - #ifndef CYTHON_FAST_THREAD_STATE - #define CYTHON_FAST_THREAD_STATE 1 - #endif - #ifndef CYTHON_FAST_PYCALL - #define CYTHON_FAST_PYCALL 1 - #endif - #ifndef CYTHON_PEP489_MULTI_PHASE_INIT - #define CYTHON_PEP489_MULTI_PHASE_INIT (PY_VERSION_HEX >= 0x03050000) - #endif - #ifndef CYTHON_USE_TP_FINALIZE - #define CYTHON_USE_TP_FINALIZE (PY_VERSION_HEX >= 0x030400a1) - #endif - #ifndef CYTHON_USE_DICT_VERSIONS - #define CYTHON_USE_DICT_VERSIONS (PY_VERSION_HEX >= 0x030600B1) - #endif - #ifndef CYTHON_USE_EXC_INFO_STACK - #define CYTHON_USE_EXC_INFO_STACK (PY_VERSION_HEX >= 0x030700A3) - #endif -#endif -#if !defined(CYTHON_FAST_PYCCALL) -#define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1) -#endif -#if CYTHON_USE_PYLONG_INTERNALS - #include "longintrepr.h" - #undef SHIFT - #undef BASE - #undef MASK - #ifdef SIZEOF_VOID_P - enum { __pyx_check_sizeof_voidp = 1 / (int)(SIZEOF_VOID_P == sizeof(void*)) }; - #endif -#endif -#ifndef __has_attribute - #define __has_attribute(x) 0 -#endif -#ifndef __has_cpp_attribute - #define __has_cpp_attribute(x) 0 -#endif -#ifndef CYTHON_RESTRICT - #if defined(__GNUC__) - #define CYTHON_RESTRICT __restrict__ - #elif defined(_MSC_VER) && _MSC_VER >= 1400 - #define CYTHON_RESTRICT __restrict - #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L - #define CYTHON_RESTRICT restrict - #else - #define CYTHON_RESTRICT - #endif -#endif -#ifndef CYTHON_UNUSED -# if defined(__GNUC__) -# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) -# define CYTHON_UNUSED __attribute__ ((__unused__)) -# else -# define CYTHON_UNUSED -# endif -# elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER)) -# define CYTHON_UNUSED __attribute__ ((__unused__)) -# else -# define CYTHON_UNUSED -# endif -#endif -#ifndef CYTHON_MAYBE_UNUSED_VAR -# if defined(__cplusplus) - template void CYTHON_MAYBE_UNUSED_VAR( const T& ) { } -# else -# define CYTHON_MAYBE_UNUSED_VAR(x) (void)(x) -# endif -#endif -#ifndef CYTHON_NCP_UNUSED -# if CYTHON_COMPILING_IN_CPYTHON -# define CYTHON_NCP_UNUSED -# else -# define CYTHON_NCP_UNUSED CYTHON_UNUSED -# endif -#endif -#define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None) -#ifdef _MSC_VER - #ifndef _MSC_STDINT_H_ - #if _MSC_VER < 1300 - typedef unsigned char uint8_t; - typedef unsigned int uint32_t; - #else - typedef unsigned __int8 uint8_t; - typedef unsigned __int32 uint32_t; - #endif - #endif -#else - #include -#endif -#ifndef CYTHON_FALLTHROUGH - #if defined(__cplusplus) && __cplusplus >= 201103L - #if __has_cpp_attribute(fallthrough) - #define CYTHON_FALLTHROUGH [[fallthrough]] - #elif __has_cpp_attribute(clang::fallthrough) - #define CYTHON_FALLTHROUGH [[clang::fallthrough]] - #elif __has_cpp_attribute(gnu::fallthrough) - #define CYTHON_FALLTHROUGH [[gnu::fallthrough]] - #endif - #endif - #ifndef CYTHON_FALLTHROUGH - #if __has_attribute(fallthrough) - #define CYTHON_FALLTHROUGH __attribute__((fallthrough)) - #else - #define CYTHON_FALLTHROUGH - #endif - #endif - #if defined(__clang__ ) && defined(__apple_build_version__) - #if __apple_build_version__ < 7000000 - #undef CYTHON_FALLTHROUGH - #define CYTHON_FALLTHROUGH - #endif - #endif -#endif - -#ifndef CYTHON_INLINE - #if defined(__clang__) - #define CYTHON_INLINE __inline__ __attribute__ ((__unused__)) - #elif defined(__GNUC__) - #define CYTHON_INLINE __inline__ - #elif defined(_MSC_VER) - #define CYTHON_INLINE __inline - #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L - #define CYTHON_INLINE inline - #else - #define CYTHON_INLINE - #endif -#endif - -#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x02070600 && !defined(Py_OptimizeFlag) - #define Py_OptimizeFlag 0 -#endif -#define __PYX_BUILD_PY_SSIZE_T "n" -#define CYTHON_FORMAT_SSIZE_T "z" -#if PY_MAJOR_VERSION < 3 - #define __Pyx_BUILTIN_MODULE_NAME "__builtin__" - #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ - PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) - #define __Pyx_DefaultClassType PyClass_Type -#else - #define __Pyx_BUILTIN_MODULE_NAME "builtins" -#if PY_VERSION_HEX >= 0x030800A4 && PY_VERSION_HEX < 0x030800B2 - #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ - PyCode_New(a, 0, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) -#else - #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ - PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) -#endif - #define __Pyx_DefaultClassType PyType_Type -#endif -#ifndef Py_TPFLAGS_CHECKTYPES - #define Py_TPFLAGS_CHECKTYPES 0 -#endif -#ifndef Py_TPFLAGS_HAVE_INDEX - #define Py_TPFLAGS_HAVE_INDEX 0 -#endif -#ifndef Py_TPFLAGS_HAVE_NEWBUFFER - #define Py_TPFLAGS_HAVE_NEWBUFFER 0 -#endif -#ifndef Py_TPFLAGS_HAVE_FINALIZE - #define Py_TPFLAGS_HAVE_FINALIZE 0 -#endif -#ifndef METH_STACKLESS - #define METH_STACKLESS 0 -#endif -#if PY_VERSION_HEX <= 0x030700A3 || !defined(METH_FASTCALL) - #ifndef METH_FASTCALL - #define METH_FASTCALL 0x80 - #endif - typedef PyObject *(*__Pyx_PyCFunctionFast) (PyObject *self, PyObject *const *args, Py_ssize_t nargs); - typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject *const *args, - Py_ssize_t nargs, PyObject *kwnames); -#else - #define __Pyx_PyCFunctionFast _PyCFunctionFast - #define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords -#endif -#if CYTHON_FAST_PYCCALL -#define __Pyx_PyFastCFunction_Check(func)\ - ((PyCFunction_Check(func) && (METH_FASTCALL == (PyCFunction_GET_FLAGS(func) & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS | METH_STACKLESS))))) -#else -#define __Pyx_PyFastCFunction_Check(func) 0 -#endif -#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc) - #define PyObject_Malloc(s) PyMem_Malloc(s) - #define PyObject_Free(p) PyMem_Free(p) - #define PyObject_Realloc(p) PyMem_Realloc(p) -#endif -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030400A1 - #define PyMem_RawMalloc(n) PyMem_Malloc(n) - #define PyMem_RawRealloc(p, n) PyMem_Realloc(p, n) - #define PyMem_RawFree(p) PyMem_Free(p) -#endif -#if CYTHON_COMPILING_IN_PYSTON - #define __Pyx_PyCode_HasFreeVars(co) PyCode_HasFreeVars(co) - #define __Pyx_PyFrame_SetLineNumber(frame, lineno) PyFrame_SetLineNumber(frame, lineno) -#else - #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) - #define __Pyx_PyFrame_SetLineNumber(frame, lineno) (frame)->f_lineno = (lineno) -#endif -#if !CYTHON_FAST_THREAD_STATE || PY_VERSION_HEX < 0x02070000 - #define __Pyx_PyThreadState_Current PyThreadState_GET() -#elif PY_VERSION_HEX >= 0x03060000 - #define __Pyx_PyThreadState_Current _PyThreadState_UncheckedGet() -#elif PY_VERSION_HEX >= 0x03000000 - #define __Pyx_PyThreadState_Current PyThreadState_GET() -#else - #define __Pyx_PyThreadState_Current _PyThreadState_Current -#endif -#if PY_VERSION_HEX < 0x030700A2 && !defined(PyThread_tss_create) && !defined(Py_tss_NEEDS_INIT) -#include "pythread.h" -#define Py_tss_NEEDS_INIT 0 -typedef int Py_tss_t; -static CYTHON_INLINE int PyThread_tss_create(Py_tss_t *key) { - *key = PyThread_create_key(); - return 0; -} -static CYTHON_INLINE Py_tss_t * PyThread_tss_alloc(void) { - Py_tss_t *key = (Py_tss_t *)PyObject_Malloc(sizeof(Py_tss_t)); - *key = Py_tss_NEEDS_INIT; - return key; -} -static CYTHON_INLINE void PyThread_tss_free(Py_tss_t *key) { - PyObject_Free(key); -} -static CYTHON_INLINE int PyThread_tss_is_created(Py_tss_t *key) { - return *key != Py_tss_NEEDS_INIT; -} -static CYTHON_INLINE void PyThread_tss_delete(Py_tss_t *key) { - PyThread_delete_key(*key); - *key = Py_tss_NEEDS_INIT; -} -static CYTHON_INLINE int PyThread_tss_set(Py_tss_t *key, void *value) { - return PyThread_set_key_value(*key, value); -} -static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) { - return PyThread_get_key_value(*key); -} -#endif -#if CYTHON_COMPILING_IN_CPYTHON || defined(_PyDict_NewPresized) -#define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n)) -#else -#define __Pyx_PyDict_NewPresized(n) PyDict_New() -#endif -#if PY_MAJOR_VERSION >= 3 || CYTHON_FUTURE_DIVISION - #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y) - #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y) -#else - #define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y) - #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y) -#endif -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 && CYTHON_USE_UNICODE_INTERNALS -#define __Pyx_PyDict_GetItemStr(dict, name) _PyDict_GetItem_KnownHash(dict, name, ((PyASCIIObject *) name)->hash) -#else -#define __Pyx_PyDict_GetItemStr(dict, name) PyDict_GetItem(dict, name) -#endif -#if PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND) - #define CYTHON_PEP393_ENABLED 1 - #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\ - 0 : _PyUnicode_Ready((PyObject *)(op))) - #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u) - #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i) - #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u) - #define __Pyx_PyUnicode_KIND(u) PyUnicode_KIND(u) - #define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u) - #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i) - #define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, ch) - #if defined(PyUnicode_IS_READY) && defined(PyUnicode_GET_SIZE) - #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u))) - #else - #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_LENGTH(u)) - #endif -#else - #define CYTHON_PEP393_ENABLED 0 - #define PyUnicode_1BYTE_KIND 1 - #define PyUnicode_2BYTE_KIND 2 - #define PyUnicode_4BYTE_KIND 4 - #define __Pyx_PyUnicode_READY(op) (0) - #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u) - #define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i])) - #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((sizeof(Py_UNICODE) == 2) ? 65535 : 1114111) - #define __Pyx_PyUnicode_KIND(u) (sizeof(Py_UNICODE)) - #define __Pyx_PyUnicode_DATA(u) ((void*)PyUnicode_AS_UNICODE(u)) - #define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i])) - #define __Pyx_PyUnicode_WRITE(k, d, i, ch) (((void)(k)), ((Py_UNICODE*)d)[i] = ch) - #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_SIZE(u)) -#endif -#if CYTHON_COMPILING_IN_PYPY - #define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b) - #define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b) -#else - #define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b) - #define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ?\ - PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b)) -#endif -#if CYTHON_COMPILING_IN_PYPY && !defined(PyUnicode_Contains) - #define PyUnicode_Contains(u, s) PySequence_Contains(u, s) -#endif -#if CYTHON_COMPILING_IN_PYPY && !defined(PyByteArray_Check) - #define PyByteArray_Check(obj) PyObject_TypeCheck(obj, &PyByteArray_Type) -#endif -#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Format) - #define PyObject_Format(obj, fmt) PyObject_CallMethod(obj, "__format__", "O", fmt) -#endif -#define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyString_Check(b) && !PyString_CheckExact(b)))) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b)) -#define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyUnicode_Check(b) && !PyUnicode_CheckExact(b)))) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b)) -#if PY_MAJOR_VERSION >= 3 - #define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b) -#else - #define __Pyx_PyString_Format(a, b) PyString_Format(a, b) -#endif -#if PY_MAJOR_VERSION < 3 && !defined(PyObject_ASCII) - #define PyObject_ASCII(o) PyObject_Repr(o) -#endif -#if PY_MAJOR_VERSION >= 3 - #define PyBaseString_Type PyUnicode_Type - #define PyStringObject PyUnicodeObject - #define PyString_Type PyUnicode_Type - #define PyString_Check PyUnicode_Check - #define PyString_CheckExact PyUnicode_CheckExact -#ifndef PyObject_Unicode - #define PyObject_Unicode PyObject_Str -#endif -#endif -#if PY_MAJOR_VERSION >= 3 - #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj) - #define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj) -#else - #define __Pyx_PyBaseString_Check(obj) (PyString_Check(obj) || PyUnicode_Check(obj)) - #define __Pyx_PyBaseString_CheckExact(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj)) -#endif -#ifndef PySet_CheckExact - #define PySet_CheckExact(obj) (Py_TYPE(obj) == &PySet_Type) -#endif -#if PY_VERSION_HEX >= 0x030900A4 - #define __Pyx_SET_REFCNT(obj, refcnt) Py_SET_REFCNT(obj, refcnt) - #define __Pyx_SET_SIZE(obj, size) Py_SET_SIZE(obj, size) -#else - #define __Pyx_SET_REFCNT(obj, refcnt) Py_REFCNT(obj) = (refcnt) - #define __Pyx_SET_SIZE(obj, size) Py_SIZE(obj) = (size) -#endif -#if CYTHON_ASSUME_SAFE_MACROS - #define __Pyx_PySequence_SIZE(seq) Py_SIZE(seq) -#else - #define __Pyx_PySequence_SIZE(seq) PySequence_Size(seq) -#endif -#if PY_MAJOR_VERSION >= 3 - #define PyIntObject PyLongObject - #define PyInt_Type PyLong_Type - #define PyInt_Check(op) PyLong_Check(op) - #define PyInt_CheckExact(op) PyLong_CheckExact(op) - #define PyInt_FromString PyLong_FromString - #define PyInt_FromUnicode PyLong_FromUnicode - #define PyInt_FromLong PyLong_FromLong - #define PyInt_FromSize_t PyLong_FromSize_t - #define PyInt_FromSsize_t PyLong_FromSsize_t - #define PyInt_AsLong PyLong_AsLong - #define PyInt_AS_LONG PyLong_AS_LONG - #define PyInt_AsSsize_t PyLong_AsSsize_t - #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask - #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask - #define PyNumber_Int PyNumber_Long -#endif -#if PY_MAJOR_VERSION >= 3 - #define PyBoolObject PyLongObject -#endif -#if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_PYPY - #ifndef PyUnicode_InternFromString - #define PyUnicode_InternFromString(s) PyUnicode_FromString(s) - #endif -#endif -#if PY_VERSION_HEX < 0x030200A4 - typedef long Py_hash_t; - #define __Pyx_PyInt_FromHash_t PyInt_FromLong - #define __Pyx_PyInt_AsHash_t PyInt_AsLong -#else - #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t - #define __Pyx_PyInt_AsHash_t PyInt_AsSsize_t -#endif -#if PY_MAJOR_VERSION >= 3 - #define __Pyx_PyMethod_New(func, self, klass) ((self) ? ((void)(klass), PyMethod_New(func, self)) : __Pyx_NewRef(func)) -#else - #define __Pyx_PyMethod_New(func, self, klass) PyMethod_New(func, self, klass) -#endif -#if CYTHON_USE_ASYNC_SLOTS - #if PY_VERSION_HEX >= 0x030500B1 - #define __Pyx_PyAsyncMethodsStruct PyAsyncMethods - #define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async) - #else - #define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved)) - #endif -#else - #define __Pyx_PyType_AsAsync(obj) NULL -#endif -#ifndef __Pyx_PyAsyncMethodsStruct - typedef struct { - unaryfunc am_await; - unaryfunc am_aiter; - unaryfunc am_anext; - } __Pyx_PyAsyncMethodsStruct; -#endif - -#if defined(WIN32) || defined(MS_WINDOWS) - #define _USE_MATH_DEFINES -#endif -#include -#ifdef NAN -#define __PYX_NAN() ((float) NAN) -#else -static CYTHON_INLINE float __PYX_NAN() { - float value; - memset(&value, 0xFF, sizeof(value)); - return value; -} -#endif -#if defined(__CYGWIN__) && defined(_LDBL_EQ_DBL) -#define __Pyx_truncl trunc -#else -#define __Pyx_truncl truncl -#endif - -#define __PYX_MARK_ERR_POS(f_index, lineno) \ - { __pyx_filename = __pyx_f[f_index]; (void)__pyx_filename; __pyx_lineno = lineno; (void)__pyx_lineno; __pyx_clineno = __LINE__; (void)__pyx_clineno; } -#define __PYX_ERR(f_index, lineno, Ln_error) \ - { __PYX_MARK_ERR_POS(f_index, lineno) goto Ln_error; } - -#ifndef __PYX_EXTERN_C - #ifdef __cplusplus - #define __PYX_EXTERN_C extern "C" - #else - #define __PYX_EXTERN_C extern - #endif -#endif - -#define __PYX_HAVE__aiohttp___helpers -#define __PYX_HAVE_API__aiohttp___helpers -/* Early includes */ -#ifdef _OPENMP -#include -#endif /* _OPENMP */ - -#if defined(PYREX_WITHOUT_ASSERTIONS) && !defined(CYTHON_WITHOUT_ASSERTIONS) -#define CYTHON_WITHOUT_ASSERTIONS -#endif - -typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* encoding; - const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; - -#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0 -#define __PYX_DEFAULT_STRING_ENCODING_IS_UTF8 0 -#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT (PY_MAJOR_VERSION >= 3 && __PYX_DEFAULT_STRING_ENCODING_IS_UTF8) -#define __PYX_DEFAULT_STRING_ENCODING "" -#define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString -#define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize -#define __Pyx_uchar_cast(c) ((unsigned char)c) -#define __Pyx_long_cast(x) ((long)x) -#define __Pyx_fits_Py_ssize_t(v, type, is_signed) (\ - (sizeof(type) < sizeof(Py_ssize_t)) ||\ - (sizeof(type) > sizeof(Py_ssize_t) &&\ - likely(v < (type)PY_SSIZE_T_MAX ||\ - v == (type)PY_SSIZE_T_MAX) &&\ - (!is_signed || likely(v > (type)PY_SSIZE_T_MIN ||\ - v == (type)PY_SSIZE_T_MIN))) ||\ - (sizeof(type) == sizeof(Py_ssize_t) &&\ - (is_signed || likely(v < (type)PY_SSIZE_T_MAX ||\ - v == (type)PY_SSIZE_T_MAX))) ) -static CYTHON_INLINE int __Pyx_is_valid_index(Py_ssize_t i, Py_ssize_t limit) { - return (size_t) i < (size_t) limit; -} -#if defined (__cplusplus) && __cplusplus >= 201103L - #include - #define __Pyx_sst_abs(value) std::abs(value) -#elif SIZEOF_INT >= SIZEOF_SIZE_T - #define __Pyx_sst_abs(value) abs(value) -#elif SIZEOF_LONG >= SIZEOF_SIZE_T - #define __Pyx_sst_abs(value) labs(value) -#elif defined (_MSC_VER) - #define __Pyx_sst_abs(value) ((Py_ssize_t)_abs64(value)) -#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L - #define __Pyx_sst_abs(value) llabs(value) -#elif defined (__GNUC__) - #define __Pyx_sst_abs(value) __builtin_llabs(value) -#else - #define __Pyx_sst_abs(value) ((value<0) ? -value : value) -#endif -static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject*); -static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length); -#define __Pyx_PyByteArray_FromString(s) PyByteArray_FromStringAndSize((const char*)s, strlen((const char*)s)) -#define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l) -#define __Pyx_PyBytes_FromString PyBytes_FromString -#define __Pyx_PyBytes_FromStringAndSize PyBytes_FromStringAndSize -static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*); -#if PY_MAJOR_VERSION < 3 - #define __Pyx_PyStr_FromString __Pyx_PyBytes_FromString - #define __Pyx_PyStr_FromStringAndSize __Pyx_PyBytes_FromStringAndSize -#else - #define __Pyx_PyStr_FromString __Pyx_PyUnicode_FromString - #define __Pyx_PyStr_FromStringAndSize __Pyx_PyUnicode_FromStringAndSize -#endif -#define __Pyx_PyBytes_AsWritableString(s) ((char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsWritableSString(s) ((signed char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsWritableUString(s) ((unsigned char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsString(s) ((const char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsSString(s) ((const signed char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsUString(s) ((const unsigned char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyObject_AsWritableString(s) ((char*) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_AsWritableSString(s) ((signed char*) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_AsWritableUString(s) ((unsigned char*) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_AsSString(s) ((const signed char*) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_AsUString(s) ((const unsigned char*) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_FromCString(s) __Pyx_PyObject_FromString((const char*)s) -#define __Pyx_PyBytes_FromCString(s) __Pyx_PyBytes_FromString((const char*)s) -#define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s) -#define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s) -#define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s) -static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u) { - const Py_UNICODE *u_end = u; - while (*u_end++) ; - return (size_t)(u_end - u - 1); -} -#define __Pyx_PyUnicode_FromUnicode(u) PyUnicode_FromUnicode(u, __Pyx_Py_UNICODE_strlen(u)) -#define __Pyx_PyUnicode_FromUnicodeAndLength PyUnicode_FromUnicode -#define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode -#define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) -#define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) -static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b); -static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); -static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject*); -static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x); -#define __Pyx_PySequence_Tuple(obj)\ - (likely(PyTuple_CheckExact(obj)) ? __Pyx_NewRef(obj) : PySequence_Tuple(obj)) -static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*); -static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t); -#if CYTHON_ASSUME_SAFE_MACROS -#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x)) -#else -#define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x) -#endif -#define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x)) -#if PY_MAJOR_VERSION >= 3 -#define __Pyx_PyNumber_Int(x) (PyLong_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Long(x)) -#else -#define __Pyx_PyNumber_Int(x) (PyInt_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Int(x)) -#endif -#define __Pyx_PyNumber_Float(x) (PyFloat_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Float(x)) -#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII -static int __Pyx_sys_getdefaultencoding_not_ascii; -static int __Pyx_init_sys_getdefaultencoding_params(void) { - PyObject* sys; - PyObject* default_encoding = NULL; - PyObject* ascii_chars_u = NULL; - PyObject* ascii_chars_b = NULL; - const char* default_encoding_c; - sys = PyImport_ImportModule("sys"); - if (!sys) goto bad; - default_encoding = PyObject_CallMethod(sys, (char*) "getdefaultencoding", NULL); - Py_DECREF(sys); - if (!default_encoding) goto bad; - default_encoding_c = PyBytes_AsString(default_encoding); - if (!default_encoding_c) goto bad; - if (strcmp(default_encoding_c, "ascii") == 0) { - __Pyx_sys_getdefaultencoding_not_ascii = 0; - } else { - char ascii_chars[128]; - int c; - for (c = 0; c < 128; c++) { - ascii_chars[c] = c; - } - __Pyx_sys_getdefaultencoding_not_ascii = 1; - ascii_chars_u = PyUnicode_DecodeASCII(ascii_chars, 128, NULL); - if (!ascii_chars_u) goto bad; - ascii_chars_b = PyUnicode_AsEncodedString(ascii_chars_u, default_encoding_c, NULL); - if (!ascii_chars_b || !PyBytes_Check(ascii_chars_b) || memcmp(ascii_chars, PyBytes_AS_STRING(ascii_chars_b), 128) != 0) { - PyErr_Format( - PyExc_ValueError, - "This module compiled with c_string_encoding=ascii, but default encoding '%.200s' is not a superset of ascii.", - default_encoding_c); - goto bad; - } - Py_DECREF(ascii_chars_u); - Py_DECREF(ascii_chars_b); - } - Py_DECREF(default_encoding); - return 0; -bad: - Py_XDECREF(default_encoding); - Py_XDECREF(ascii_chars_u); - Py_XDECREF(ascii_chars_b); - return -1; -} -#endif -#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT && PY_MAJOR_VERSION >= 3 -#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeUTF8(c_str, size, NULL) -#else -#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_Decode(c_str, size, __PYX_DEFAULT_STRING_ENCODING, NULL) -#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT -static char* __PYX_DEFAULT_STRING_ENCODING; -static int __Pyx_init_sys_getdefaultencoding_params(void) { - PyObject* sys; - PyObject* default_encoding = NULL; - char* default_encoding_c; - sys = PyImport_ImportModule("sys"); - if (!sys) goto bad; - default_encoding = PyObject_CallMethod(sys, (char*) (const char*) "getdefaultencoding", NULL); - Py_DECREF(sys); - if (!default_encoding) goto bad; - default_encoding_c = PyBytes_AsString(default_encoding); - if (!default_encoding_c) goto bad; - __PYX_DEFAULT_STRING_ENCODING = (char*) malloc(strlen(default_encoding_c) + 1); - if (!__PYX_DEFAULT_STRING_ENCODING) goto bad; - strcpy(__PYX_DEFAULT_STRING_ENCODING, default_encoding_c); - Py_DECREF(default_encoding); - return 0; -bad: - Py_XDECREF(default_encoding); - return -1; -} -#endif -#endif - - -/* Test for GCC > 2.95 */ -#if defined(__GNUC__) && (__GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95))) - #define likely(x) __builtin_expect(!!(x), 1) - #define unlikely(x) __builtin_expect(!!(x), 0) -#else /* !__GNUC__ or GCC < 2.95 */ - #define likely(x) (x) - #define unlikely(x) (x) -#endif /* __GNUC__ */ -static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; } - -static PyObject *__pyx_m = NULL; -static PyObject *__pyx_d; -static PyObject *__pyx_b; -static PyObject *__pyx_cython_runtime = NULL; -static PyObject *__pyx_empty_tuple; -static PyObject *__pyx_empty_bytes; -static PyObject *__pyx_empty_unicode; -static int __pyx_lineno; -static int __pyx_clineno = 0; -static const char * __pyx_cfilenm= __FILE__; -static const char *__pyx_filename; - - -static const char *__pyx_f[] = { - "aiohttp/_helpers.pyx", - "stringsource", -}; - -/*--- Type declarations ---*/ -struct __pyx_obj_7aiohttp_8_helpers_reify; - -/* "aiohttp/_helpers.pyx":1 - * cdef class reify: # <<<<<<<<<<<<<< - * """Use as a class method decorator. It operates almost exactly like - * the Python `@property` decorator, but it puts the result of the - */ -struct __pyx_obj_7aiohttp_8_helpers_reify { - PyObject_HEAD - PyObject *wrapped; - PyObject *name; -}; - - -/* --- Runtime support code (head) --- */ -/* Refnanny.proto */ -#ifndef CYTHON_REFNANNY - #define CYTHON_REFNANNY 0 -#endif -#if CYTHON_REFNANNY - typedef struct { - void (*INCREF)(void*, PyObject*, int); - void (*DECREF)(void*, PyObject*, int); - void (*GOTREF)(void*, PyObject*, int); - void (*GIVEREF)(void*, PyObject*, int); - void* (*SetupContext)(const char*, int, const char*); - void (*FinishContext)(void**); - } __Pyx_RefNannyAPIStruct; - static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL; - static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname); - #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL; -#ifdef WITH_THREAD - #define __Pyx_RefNannySetupContext(name, acquire_gil)\ - if (acquire_gil) {\ - PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ - __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ - PyGILState_Release(__pyx_gilstate_save);\ - } else {\ - __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ - } -#else - #define __Pyx_RefNannySetupContext(name, acquire_gil)\ - __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__) -#endif - #define __Pyx_RefNannyFinishContext()\ - __Pyx_RefNanny->FinishContext(&__pyx_refnanny) - #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), __LINE__) - #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), __LINE__) - #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), __LINE__) - #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), __LINE__) - #define __Pyx_XINCREF(r) do { if((r) != NULL) {__Pyx_INCREF(r); }} while(0) - #define __Pyx_XDECREF(r) do { if((r) != NULL) {__Pyx_DECREF(r); }} while(0) - #define __Pyx_XGOTREF(r) do { if((r) != NULL) {__Pyx_GOTREF(r); }} while(0) - #define __Pyx_XGIVEREF(r) do { if((r) != NULL) {__Pyx_GIVEREF(r);}} while(0) -#else - #define __Pyx_RefNannyDeclarations - #define __Pyx_RefNannySetupContext(name, acquire_gil) - #define __Pyx_RefNannyFinishContext() - #define __Pyx_INCREF(r) Py_INCREF(r) - #define __Pyx_DECREF(r) Py_DECREF(r) - #define __Pyx_GOTREF(r) - #define __Pyx_GIVEREF(r) - #define __Pyx_XINCREF(r) Py_XINCREF(r) - #define __Pyx_XDECREF(r) Py_XDECREF(r) - #define __Pyx_XGOTREF(r) - #define __Pyx_XGIVEREF(r) -#endif -#define __Pyx_XDECREF_SET(r, v) do {\ - PyObject *tmp = (PyObject *) r;\ - r = v; __Pyx_XDECREF(tmp);\ - } while (0) -#define __Pyx_DECREF_SET(r, v) do {\ - PyObject *tmp = (PyObject *) r;\ - r = v; __Pyx_DECREF(tmp);\ - } while (0) -#define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0) -#define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0) - -/* PyObjectGetAttrStr.proto */ -#if CYTHON_USE_TYPE_SLOTS -static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name); -#else -#define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n) -#endif - -/* GetBuiltinName.proto */ -static PyObject *__Pyx_GetBuiltinName(PyObject *name); - -/* RaiseDoubleKeywords.proto */ -static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name); - -/* ParseKeywords.proto */ -static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject **argnames[],\ - PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args,\ - const char* function_name); - -/* RaiseArgTupleInvalid.proto */ -static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact, - Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found); - -/* GetItemInt.proto */ -#define __Pyx_GetItemInt(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ - (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ - __Pyx_GetItemInt_Fast(o, (Py_ssize_t)i, is_list, wraparound, boundscheck) :\ - (is_list ? (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL) :\ - __Pyx_GetItemInt_Generic(o, to_py_func(i)))) -#define __Pyx_GetItemInt_List(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ - (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ - __Pyx_GetItemInt_List_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ - (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL)) -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i, - int wraparound, int boundscheck); -#define __Pyx_GetItemInt_Tuple(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ - (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ - __Pyx_GetItemInt_Tuple_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ - (PyErr_SetString(PyExc_IndexError, "tuple index out of range"), (PyObject*)NULL)) -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i, - int wraparound, int boundscheck); -static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j); -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, - int is_list, int wraparound, int boundscheck); - -/* ObjectGetItem.proto */ -#if CYTHON_USE_TYPE_SLOTS -static CYTHON_INLINE PyObject *__Pyx_PyObject_GetItem(PyObject *obj, PyObject* key); -#else -#define __Pyx_PyObject_GetItem(obj, key) PyObject_GetItem(obj, key) -#endif - -/* GetTopmostException.proto */ -#if CYTHON_USE_EXC_INFO_STACK -static _PyErr_StackItem * __Pyx_PyErr_GetTopmostException(PyThreadState *tstate); -#endif - -/* PyThreadStateGet.proto */ -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_PyThreadState_declare PyThreadState *__pyx_tstate; -#define __Pyx_PyThreadState_assign __pyx_tstate = __Pyx_PyThreadState_Current; -#define __Pyx_PyErr_Occurred() __pyx_tstate->curexc_type -#else -#define __Pyx_PyThreadState_declare -#define __Pyx_PyThreadState_assign -#define __Pyx_PyErr_Occurred() PyErr_Occurred() -#endif - -/* SaveResetException.proto */ -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_ExceptionSave(type, value, tb) __Pyx__ExceptionSave(__pyx_tstate, type, value, tb) -static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); -#define __Pyx_ExceptionReset(type, value, tb) __Pyx__ExceptionReset(__pyx_tstate, type, value, tb) -static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); -#else -#define __Pyx_ExceptionSave(type, value, tb) PyErr_GetExcInfo(type, value, tb) -#define __Pyx_ExceptionReset(type, value, tb) PyErr_SetExcInfo(type, value, tb) -#endif - -/* PyErrExceptionMatches.proto */ -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_PyErr_ExceptionMatches(err) __Pyx_PyErr_ExceptionMatchesInState(__pyx_tstate, err) -static CYTHON_INLINE int __Pyx_PyErr_ExceptionMatchesInState(PyThreadState* tstate, PyObject* err); -#else -#define __Pyx_PyErr_ExceptionMatches(err) PyErr_ExceptionMatches(err) -#endif - -/* GetException.proto */ -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_GetException(type, value, tb) __Pyx__GetException(__pyx_tstate, type, value, tb) -static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); -#else -static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb); -#endif - -/* PyCFunctionFastCall.proto */ -#if CYTHON_FAST_PYCCALL -static CYTHON_INLINE PyObject *__Pyx_PyCFunction_FastCall(PyObject *func, PyObject **args, Py_ssize_t nargs); -#else -#define __Pyx_PyCFunction_FastCall(func, args, nargs) (assert(0), NULL) -#endif - -/* PyFunctionFastCall.proto */ -#if CYTHON_FAST_PYCALL -#define __Pyx_PyFunction_FastCall(func, args, nargs)\ - __Pyx_PyFunction_FastCallDict((func), (args), (nargs), NULL) -#if 1 || PY_VERSION_HEX < 0x030600B1 -static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs); -#else -#define __Pyx_PyFunction_FastCallDict(func, args, nargs, kwargs) _PyFunction_FastCallDict(func, args, nargs, kwargs) -#endif -#define __Pyx_BUILD_ASSERT_EXPR(cond)\ - (sizeof(char [1 - 2*!(cond)]) - 1) -#ifndef Py_MEMBER_SIZE -#define Py_MEMBER_SIZE(type, member) sizeof(((type *)0)->member) -#endif - static size_t __pyx_pyframe_localsplus_offset = 0; - #include "frameobject.h" - #define __Pxy_PyFrame_Initialize_Offsets()\ - ((void)__Pyx_BUILD_ASSERT_EXPR(sizeof(PyFrameObject) == offsetof(PyFrameObject, f_localsplus) + Py_MEMBER_SIZE(PyFrameObject, f_localsplus)),\ - (void)(__pyx_pyframe_localsplus_offset = ((size_t)PyFrame_Type.tp_basicsize) - Py_MEMBER_SIZE(PyFrameObject, f_localsplus))) - #define __Pyx_PyFrame_GetLocalsplus(frame)\ - (assert(__pyx_pyframe_localsplus_offset), (PyObject **)(((char *)(frame)) + __pyx_pyframe_localsplus_offset)) -#endif - -/* PyObjectCall.proto */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw); -#else -#define __Pyx_PyObject_Call(func, arg, kw) PyObject_Call(func, arg, kw) -#endif - -/* PyObjectCall2Args.proto */ -static CYTHON_UNUSED PyObject* __Pyx_PyObject_Call2Args(PyObject* function, PyObject* arg1, PyObject* arg2); - -/* PyObjectCallMethO.proto */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg); -#endif - -/* PyObjectCallOneArg.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg); - -/* PyErrFetchRestore.proto */ -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_PyErr_Clear() __Pyx_ErrRestore(NULL, NULL, NULL) -#define __Pyx_ErrRestoreWithState(type, value, tb) __Pyx_ErrRestoreInState(PyThreadState_GET(), type, value, tb) -#define __Pyx_ErrFetchWithState(type, value, tb) __Pyx_ErrFetchInState(PyThreadState_GET(), type, value, tb) -#define __Pyx_ErrRestore(type, value, tb) __Pyx_ErrRestoreInState(__pyx_tstate, type, value, tb) -#define __Pyx_ErrFetch(type, value, tb) __Pyx_ErrFetchInState(__pyx_tstate, type, value, tb) -static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); -static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); -#if CYTHON_COMPILING_IN_CPYTHON -#define __Pyx_PyErr_SetNone(exc) (Py_INCREF(exc), __Pyx_ErrRestore((exc), NULL, NULL)) -#else -#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) -#endif -#else -#define __Pyx_PyErr_Clear() PyErr_Clear() -#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) -#define __Pyx_ErrRestoreWithState(type, value, tb) PyErr_Restore(type, value, tb) -#define __Pyx_ErrFetchWithState(type, value, tb) PyErr_Fetch(type, value, tb) -#define __Pyx_ErrRestoreInState(tstate, type, value, tb) PyErr_Restore(type, value, tb) -#define __Pyx_ErrFetchInState(tstate, type, value, tb) PyErr_Fetch(type, value, tb) -#define __Pyx_ErrRestore(type, value, tb) PyErr_Restore(type, value, tb) -#define __Pyx_ErrFetch(type, value, tb) PyErr_Fetch(type, value, tb) -#endif - -/* RaiseException.proto */ -static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause); - -/* GetAttr.proto */ -static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *, PyObject *); - -/* GetAttr3.proto */ -static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *, PyObject *, PyObject *); - -/* PyDictVersioning.proto */ -#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS -#define __PYX_DICT_VERSION_INIT ((PY_UINT64_T) -1) -#define __PYX_GET_DICT_VERSION(dict) (((PyDictObject*)(dict))->ma_version_tag) -#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)\ - (version_var) = __PYX_GET_DICT_VERSION(dict);\ - (cache_var) = (value); -#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) {\ - static PY_UINT64_T __pyx_dict_version = 0;\ - static PyObject *__pyx_dict_cached_value = NULL;\ - if (likely(__PYX_GET_DICT_VERSION(DICT) == __pyx_dict_version)) {\ - (VAR) = __pyx_dict_cached_value;\ - } else {\ - (VAR) = __pyx_dict_cached_value = (LOOKUP);\ - __pyx_dict_version = __PYX_GET_DICT_VERSION(DICT);\ - }\ -} -static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj); -static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj); -static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version); -#else -#define __PYX_GET_DICT_VERSION(dict) (0) -#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var) -#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) (VAR) = (LOOKUP); -#endif - -/* GetModuleGlobalName.proto */ -#if CYTHON_USE_DICT_VERSIONS -#define __Pyx_GetModuleGlobalName(var, name) {\ - static PY_UINT64_T __pyx_dict_version = 0;\ - static PyObject *__pyx_dict_cached_value = NULL;\ - (var) = (likely(__pyx_dict_version == __PYX_GET_DICT_VERSION(__pyx_d))) ?\ - (likely(__pyx_dict_cached_value) ? __Pyx_NewRef(__pyx_dict_cached_value) : __Pyx_GetBuiltinName(name)) :\ - __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\ -} -#define __Pyx_GetModuleGlobalNameUncached(var, name) {\ - PY_UINT64_T __pyx_dict_version;\ - PyObject *__pyx_dict_cached_value;\ - (var) = __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\ -} -static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value); -#else -#define __Pyx_GetModuleGlobalName(var, name) (var) = __Pyx__GetModuleGlobalName(name) -#define __Pyx_GetModuleGlobalNameUncached(var, name) (var) = __Pyx__GetModuleGlobalName(name) -static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name); -#endif - -/* Import.proto */ -static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level); - -/* ImportFrom.proto */ -static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name); - -/* HasAttr.proto */ -static CYTHON_INLINE int __Pyx_HasAttr(PyObject *, PyObject *); - -/* PyObject_GenericGetAttrNoDict.proto */ -#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 -static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name); -#else -#define __Pyx_PyObject_GenericGetAttrNoDict PyObject_GenericGetAttr -#endif - -/* PyObject_GenericGetAttr.proto */ -#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 -static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name); -#else -#define __Pyx_PyObject_GenericGetAttr PyObject_GenericGetAttr -#endif - -/* PyObjectGetAttrStrNoError.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name); - -/* SetupReduce.proto */ -static int __Pyx_setup_reduce(PyObject* type_obj); - -/* CLineInTraceback.proto */ -#ifdef CYTHON_CLINE_IN_TRACEBACK -#define __Pyx_CLineForTraceback(tstate, c_line) (((CYTHON_CLINE_IN_TRACEBACK)) ? c_line : 0) -#else -static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line); -#endif - -/* CodeObjectCache.proto */ -typedef struct { - PyCodeObject* code_object; - int code_line; -} __Pyx_CodeObjectCacheEntry; -struct __Pyx_CodeObjectCache { - int count; - int max_count; - __Pyx_CodeObjectCacheEntry* entries; -}; -static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL}; -static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line); -static PyCodeObject *__pyx_find_code_object(int code_line); -static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object); - -/* AddTraceback.proto */ -static void __Pyx_AddTraceback(const char *funcname, int c_line, - int py_line, const char *filename); - -/* CIntToPy.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value); - -/* CIntFromPy.proto */ -static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *); - -/* CIntFromPy.proto */ -static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *); - -/* FastTypeChecks.proto */ -#if CYTHON_COMPILING_IN_CPYTHON -#define __Pyx_TypeCheck(obj, type) __Pyx_IsSubtype(Py_TYPE(obj), (PyTypeObject *)type) -static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b); -static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject *type); -static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *type1, PyObject *type2); -#else -#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type) -#define __Pyx_PyErr_GivenExceptionMatches(err, type) PyErr_GivenExceptionMatches(err, type) -#define __Pyx_PyErr_GivenExceptionMatches2(err, type1, type2) (PyErr_GivenExceptionMatches(err, type1) || PyErr_GivenExceptionMatches(err, type2)) -#endif -#define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception) - -/* CheckBinaryVersion.proto */ -static int __Pyx_check_binary_version(void); - -/* InitStrings.proto */ -static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); - - -/* Module declarations from 'aiohttp._helpers' */ -static PyTypeObject *__pyx_ptype_7aiohttp_8_helpers_reify = 0; -static PyObject *__pyx_f_7aiohttp_8_helpers___pyx_unpickle_reify__set_state(struct __pyx_obj_7aiohttp_8_helpers_reify *, PyObject *); /*proto*/ -#define __Pyx_MODULE_NAME "aiohttp._helpers" -extern int __pyx_module_is_main_aiohttp___helpers; -int __pyx_module_is_main_aiohttp___helpers = 0; - -/* Implementation of 'aiohttp._helpers' */ -static PyObject *__pyx_builtin_KeyError; -static PyObject *__pyx_builtin_AttributeError; -static const char __pyx_k_doc[] = "__doc__"; -static const char __pyx_k_new[] = "__new__"; -static const char __pyx_k_dict[] = "__dict__"; -static const char __pyx_k_main[] = "__main__"; -static const char __pyx_k_name[] = "__name__"; -static const char __pyx_k_test[] = "__test__"; -static const char __pyx_k_cache[] = "_cache"; -static const char __pyx_k_reify[] = "reify"; -static const char __pyx_k_import[] = "__import__"; -static const char __pyx_k_pickle[] = "pickle"; -static const char __pyx_k_reduce[] = "__reduce__"; -static const char __pyx_k_update[] = "update"; -static const char __pyx_k_wrapped[] = "wrapped"; -static const char __pyx_k_KeyError[] = "KeyError"; -static const char __pyx_k_getstate[] = "__getstate__"; -static const char __pyx_k_pyx_type[] = "__pyx_type"; -static const char __pyx_k_setstate[] = "__setstate__"; -static const char __pyx_k_pyx_state[] = "__pyx_state"; -static const char __pyx_k_reduce_ex[] = "__reduce_ex__"; -static const char __pyx_k_pyx_result[] = "__pyx_result"; -static const char __pyx_k_PickleError[] = "PickleError"; -static const char __pyx_k_pyx_checksum[] = "__pyx_checksum"; -static const char __pyx_k_stringsource[] = "stringsource"; -static const char __pyx_k_reduce_cython[] = "__reduce_cython__"; -static const char __pyx_k_AttributeError[] = "AttributeError"; -static const char __pyx_k_pyx_PickleError[] = "__pyx_PickleError"; -static const char __pyx_k_setstate_cython[] = "__setstate_cython__"; -static const char __pyx_k_aiohttp__helpers[] = "aiohttp._helpers"; -static const char __pyx_k_cline_in_traceback[] = "cline_in_traceback"; -static const char __pyx_k_pyx_unpickle_reify[] = "__pyx_unpickle_reify"; -static const char __pyx_k_reified_property_is_read_only[] = "reified property is read-only"; -static const char __pyx_k_Incompatible_checksums_s_vs_0x77[] = "Incompatible checksums (%s vs 0x770cb8f = (name, wrapped))"; -static PyObject *__pyx_n_s_AttributeError; -static PyObject *__pyx_kp_s_Incompatible_checksums_s_vs_0x77; -static PyObject *__pyx_n_s_KeyError; -static PyObject *__pyx_n_s_PickleError; -static PyObject *__pyx_n_s_aiohttp__helpers; -static PyObject *__pyx_n_s_cache; -static PyObject *__pyx_n_s_cline_in_traceback; -static PyObject *__pyx_n_s_dict; -static PyObject *__pyx_n_s_doc; -static PyObject *__pyx_n_s_getstate; -static PyObject *__pyx_n_s_import; -static PyObject *__pyx_n_s_main; -static PyObject *__pyx_n_s_name; -static PyObject *__pyx_n_s_new; -static PyObject *__pyx_n_s_pickle; -static PyObject *__pyx_n_s_pyx_PickleError; -static PyObject *__pyx_n_s_pyx_checksum; -static PyObject *__pyx_n_s_pyx_result; -static PyObject *__pyx_n_s_pyx_state; -static PyObject *__pyx_n_s_pyx_type; -static PyObject *__pyx_n_s_pyx_unpickle_reify; -static PyObject *__pyx_n_s_reduce; -static PyObject *__pyx_n_s_reduce_cython; -static PyObject *__pyx_n_s_reduce_ex; -static PyObject *__pyx_kp_u_reified_property_is_read_only; -static PyObject *__pyx_n_s_reify; -static PyObject *__pyx_n_s_setstate; -static PyObject *__pyx_n_s_setstate_cython; -static PyObject *__pyx_kp_s_stringsource; -static PyObject *__pyx_n_s_test; -static PyObject *__pyx_n_s_update; -static PyObject *__pyx_n_s_wrapped; -static int __pyx_pf_7aiohttp_8_helpers_5reify___init__(struct __pyx_obj_7aiohttp_8_helpers_reify *__pyx_v_self, PyObject *__pyx_v_wrapped); /* proto */ -static PyObject *__pyx_pf_7aiohttp_8_helpers_5reify_7__doc_____get__(struct __pyx_obj_7aiohttp_8_helpers_reify *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_7aiohttp_8_helpers_5reify_2__get__(struct __pyx_obj_7aiohttp_8_helpers_reify *__pyx_v_self, PyObject *__pyx_v_inst, CYTHON_UNUSED PyObject *__pyx_v_owner); /* proto */ -static int __pyx_pf_7aiohttp_8_helpers_5reify_4__set__(CYTHON_UNUSED struct __pyx_obj_7aiohttp_8_helpers_reify *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_inst, CYTHON_UNUSED PyObject *__pyx_v_value); /* proto */ -static PyObject *__pyx_pf_7aiohttp_8_helpers_5reify_6__reduce_cython__(struct __pyx_obj_7aiohttp_8_helpers_reify *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_7aiohttp_8_helpers_5reify_8__setstate_cython__(struct __pyx_obj_7aiohttp_8_helpers_reify *__pyx_v_self, PyObject *__pyx_v___pyx_state); /* proto */ -static PyObject *__pyx_pf_7aiohttp_8_helpers___pyx_unpickle_reify(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state); /* proto */ -static PyObject *__pyx_tp_new_7aiohttp_8_helpers_reify(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/ -static PyObject *__pyx_int_124832655; -static PyObject *__pyx_tuple_; -static PyObject *__pyx_tuple__2; -static PyObject *__pyx_codeobj__3; -/* Late includes */ - -/* "aiohttp/_helpers.pyx":13 - * cdef object name - * - * def __init__(self, wrapped): # <<<<<<<<<<<<<< - * self.wrapped = wrapped - * self.name = wrapped.__name__ - */ - -/* Python wrapper */ -static int __pyx_pw_7aiohttp_8_helpers_5reify_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ -static int __pyx_pw_7aiohttp_8_helpers_5reify_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { - PyObject *__pyx_v_wrapped = 0; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - int __pyx_r; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__init__ (wrapper)", 0); - { - static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_wrapped,0}; - PyObject* values[1] = {0}; - if (unlikely(__pyx_kwds)) { - Py_ssize_t kw_args; - const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); - switch (pos_args) { - case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = PyDict_Size(__pyx_kwds); - switch (pos_args) { - case 0: - if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_wrapped)) != 0)) kw_args--; - else goto __pyx_L5_argtuple_error; - } - if (unlikely(kw_args > 0)) { - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__init__") < 0)) __PYX_ERR(0, 13, __pyx_L3_error) - } - } else if (PyTuple_GET_SIZE(__pyx_args) != 1) { - goto __pyx_L5_argtuple_error; - } else { - values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - } - __pyx_v_wrapped = values[0]; - } - goto __pyx_L4_argument_unpacking_done; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("__init__", 1, 1, 1, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 13, __pyx_L3_error) - __pyx_L3_error:; - __Pyx_AddTraceback("aiohttp._helpers.reify.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return -1; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_7aiohttp_8_helpers_5reify___init__(((struct __pyx_obj_7aiohttp_8_helpers_reify *)__pyx_v_self), __pyx_v_wrapped); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static int __pyx_pf_7aiohttp_8_helpers_5reify___init__(struct __pyx_obj_7aiohttp_8_helpers_reify *__pyx_v_self, PyObject *__pyx_v_wrapped) { - int __pyx_r; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__init__", 0); - - /* "aiohttp/_helpers.pyx":14 - * - * def __init__(self, wrapped): - * self.wrapped = wrapped # <<<<<<<<<<<<<< - * self.name = wrapped.__name__ - * - */ - __Pyx_INCREF(__pyx_v_wrapped); - __Pyx_GIVEREF(__pyx_v_wrapped); - __Pyx_GOTREF(__pyx_v_self->wrapped); - __Pyx_DECREF(__pyx_v_self->wrapped); - __pyx_v_self->wrapped = __pyx_v_wrapped; - - /* "aiohttp/_helpers.pyx":15 - * def __init__(self, wrapped): - * self.wrapped = wrapped - * self.name = wrapped.__name__ # <<<<<<<<<<<<<< - * - * @property - */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_wrapped, __pyx_n_s_name); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_v_self->name); - __Pyx_DECREF(__pyx_v_self->name); - __pyx_v_self->name = __pyx_t_1; - __pyx_t_1 = 0; - - /* "aiohttp/_helpers.pyx":13 - * cdef object name - * - * def __init__(self, wrapped): # <<<<<<<<<<<<<< - * self.wrapped = wrapped - * self.name = wrapped.__name__ - */ - - /* function exit code */ - __pyx_r = 0; - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("aiohttp._helpers.reify.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = -1; - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_helpers.pyx":18 - * - * @property - * def __doc__(self): # <<<<<<<<<<<<<< - * return self.wrapped.__doc__ - * - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_8_helpers_5reify_7__doc___1__get__(PyObject *__pyx_v_self); /*proto*/ -static PyObject *__pyx_pw_7aiohttp_8_helpers_5reify_7__doc___1__get__(PyObject *__pyx_v_self) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_8_helpers_5reify_7__doc_____get__(((struct __pyx_obj_7aiohttp_8_helpers_reify *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_8_helpers_5reify_7__doc_____get__(struct __pyx_obj_7aiohttp_8_helpers_reify *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__get__", 0); - - /* "aiohttp/_helpers.pyx":19 - * @property - * def __doc__(self): - * return self.wrapped.__doc__ # <<<<<<<<<<<<<< - * - * def __get__(self, inst, owner): - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->wrapped, __pyx_n_s_doc); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 19, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* "aiohttp/_helpers.pyx":18 - * - * @property - * def __doc__(self): # <<<<<<<<<<<<<< - * return self.wrapped.__doc__ - * - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("aiohttp._helpers.reify.__doc__.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_helpers.pyx":21 - * return self.wrapped.__doc__ - * - * def __get__(self, inst, owner): # <<<<<<<<<<<<<< - * try: - * try: - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_8_helpers_5reify_3__get__(PyObject *__pyx_v_self, PyObject *__pyx_v_inst, PyObject *__pyx_v_owner); /*proto*/ -static PyObject *__pyx_pw_7aiohttp_8_helpers_5reify_3__get__(PyObject *__pyx_v_self, PyObject *__pyx_v_inst, PyObject *__pyx_v_owner) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_8_helpers_5reify_2__get__(((struct __pyx_obj_7aiohttp_8_helpers_reify *)__pyx_v_self), ((PyObject *)__pyx_v_inst), ((PyObject *)__pyx_v_owner)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_8_helpers_5reify_2__get__(struct __pyx_obj_7aiohttp_8_helpers_reify *__pyx_v_self, PyObject *__pyx_v_inst, CYTHON_UNUSED PyObject *__pyx_v_owner) { - PyObject *__pyx_v_val = NULL; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - PyObject *__pyx_t_5 = NULL; - PyObject *__pyx_t_6 = NULL; - PyObject *__pyx_t_7 = NULL; - PyObject *__pyx_t_8 = NULL; - int __pyx_t_9; - PyObject *__pyx_t_10 = NULL; - PyObject *__pyx_t_11 = NULL; - PyObject *__pyx_t_12 = NULL; - PyObject *__pyx_t_13 = NULL; - int __pyx_t_14; - int __pyx_t_15; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__get__", 0); - - /* "aiohttp/_helpers.pyx":22 - * - * def __get__(self, inst, owner): - * try: # <<<<<<<<<<<<<< - * try: - * return inst._cache[self.name] - */ - { - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - __Pyx_ExceptionSave(&__pyx_t_1, &__pyx_t_2, &__pyx_t_3); - __Pyx_XGOTREF(__pyx_t_1); - __Pyx_XGOTREF(__pyx_t_2); - __Pyx_XGOTREF(__pyx_t_3); - /*try:*/ { - - /* "aiohttp/_helpers.pyx":23 - * def __get__(self, inst, owner): - * try: - * try: # <<<<<<<<<<<<<< - * return inst._cache[self.name] - * except KeyError: - */ - { - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - __Pyx_ExceptionSave(&__pyx_t_4, &__pyx_t_5, &__pyx_t_6); - __Pyx_XGOTREF(__pyx_t_4); - __Pyx_XGOTREF(__pyx_t_5); - __Pyx_XGOTREF(__pyx_t_6); - /*try:*/ { - - /* "aiohttp/_helpers.pyx":24 - * try: - * try: - * return inst._cache[self.name] # <<<<<<<<<<<<<< - * except KeyError: - * val = self.wrapped(inst) - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_v_inst, __pyx_n_s_cache); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 24, __pyx_L9_error) - __Pyx_GOTREF(__pyx_t_7); - __pyx_t_8 = __Pyx_PyObject_GetItem(__pyx_t_7, __pyx_v_self->name); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 24, __pyx_L9_error) - __Pyx_GOTREF(__pyx_t_8); - __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - __pyx_r = __pyx_t_8; - __pyx_t_8 = 0; - goto __pyx_L13_try_return; - - /* "aiohttp/_helpers.pyx":23 - * def __get__(self, inst, owner): - * try: - * try: # <<<<<<<<<<<<<< - * return inst._cache[self.name] - * except KeyError: - */ - } - __pyx_L9_error:; - __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; - __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; - - /* "aiohttp/_helpers.pyx":25 - * try: - * return inst._cache[self.name] - * except KeyError: # <<<<<<<<<<<<<< - * val = self.wrapped(inst) - * inst._cache[self.name] = val - */ - __pyx_t_9 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_KeyError); - if (__pyx_t_9) { - __Pyx_AddTraceback("aiohttp._helpers.reify.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); - if (__Pyx_GetException(&__pyx_t_8, &__pyx_t_7, &__pyx_t_10) < 0) __PYX_ERR(0, 25, __pyx_L11_except_error) - __Pyx_GOTREF(__pyx_t_8); - __Pyx_GOTREF(__pyx_t_7); - __Pyx_GOTREF(__pyx_t_10); - - /* "aiohttp/_helpers.pyx":26 - * return inst._cache[self.name] - * except KeyError: - * val = self.wrapped(inst) # <<<<<<<<<<<<<< - * inst._cache[self.name] = val - * return val - */ - __Pyx_INCREF(__pyx_v_self->wrapped); - __pyx_t_12 = __pyx_v_self->wrapped; __pyx_t_13 = NULL; - if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_12))) { - __pyx_t_13 = PyMethod_GET_SELF(__pyx_t_12); - if (likely(__pyx_t_13)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_12); - __Pyx_INCREF(__pyx_t_13); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_12, function); - } - } - __pyx_t_11 = (__pyx_t_13) ? __Pyx_PyObject_Call2Args(__pyx_t_12, __pyx_t_13, __pyx_v_inst) : __Pyx_PyObject_CallOneArg(__pyx_t_12, __pyx_v_inst); - __Pyx_XDECREF(__pyx_t_13); __pyx_t_13 = 0; - if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 26, __pyx_L11_except_error) - __Pyx_GOTREF(__pyx_t_11); - __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; - __pyx_v_val = __pyx_t_11; - __pyx_t_11 = 0; - - /* "aiohttp/_helpers.pyx":27 - * except KeyError: - * val = self.wrapped(inst) - * inst._cache[self.name] = val # <<<<<<<<<<<<<< - * return val - * except AttributeError: - */ - __pyx_t_11 = __Pyx_PyObject_GetAttrStr(__pyx_v_inst, __pyx_n_s_cache); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 27, __pyx_L11_except_error) - __Pyx_GOTREF(__pyx_t_11); - if (unlikely(PyObject_SetItem(__pyx_t_11, __pyx_v_self->name, __pyx_v_val) < 0)) __PYX_ERR(0, 27, __pyx_L11_except_error) - __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; - - /* "aiohttp/_helpers.pyx":28 - * val = self.wrapped(inst) - * inst._cache[self.name] = val - * return val # <<<<<<<<<<<<<< - * except AttributeError: - * if inst is None: - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v_val); - __pyx_r = __pyx_v_val; - __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; - goto __pyx_L12_except_return; - } - goto __pyx_L11_except_error; - __pyx_L11_except_error:; - - /* "aiohttp/_helpers.pyx":23 - * def __get__(self, inst, owner): - * try: - * try: # <<<<<<<<<<<<<< - * return inst._cache[self.name] - * except KeyError: - */ - __Pyx_XGIVEREF(__pyx_t_4); - __Pyx_XGIVEREF(__pyx_t_5); - __Pyx_XGIVEREF(__pyx_t_6); - __Pyx_ExceptionReset(__pyx_t_4, __pyx_t_5, __pyx_t_6); - goto __pyx_L3_error; - __pyx_L13_try_return:; - __Pyx_XGIVEREF(__pyx_t_4); - __Pyx_XGIVEREF(__pyx_t_5); - __Pyx_XGIVEREF(__pyx_t_6); - __Pyx_ExceptionReset(__pyx_t_4, __pyx_t_5, __pyx_t_6); - goto __pyx_L7_try_return; - __pyx_L12_except_return:; - __Pyx_XGIVEREF(__pyx_t_4); - __Pyx_XGIVEREF(__pyx_t_5); - __Pyx_XGIVEREF(__pyx_t_6); - __Pyx_ExceptionReset(__pyx_t_4, __pyx_t_5, __pyx_t_6); - goto __pyx_L7_try_return; - } - - /* "aiohttp/_helpers.pyx":22 - * - * def __get__(self, inst, owner): - * try: # <<<<<<<<<<<<<< - * try: - * return inst._cache[self.name] - */ - } - __pyx_L3_error:; - __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; - __Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0; - __Pyx_XDECREF(__pyx_t_12); __pyx_t_12 = 0; - __Pyx_XDECREF(__pyx_t_13); __pyx_t_13 = 0; - __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; - __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; - - /* "aiohttp/_helpers.pyx":29 - * inst._cache[self.name] = val - * return val - * except AttributeError: # <<<<<<<<<<<<<< - * if inst is None: - * return self - */ - __pyx_t_9 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_AttributeError); - if (__pyx_t_9) { - __Pyx_AddTraceback("aiohttp._helpers.reify.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); - if (__Pyx_GetException(&__pyx_t_10, &__pyx_t_7, &__pyx_t_8) < 0) __PYX_ERR(0, 29, __pyx_L5_except_error) - __Pyx_GOTREF(__pyx_t_10); - __Pyx_GOTREF(__pyx_t_7); - __Pyx_GOTREF(__pyx_t_8); - - /* "aiohttp/_helpers.pyx":30 - * return val - * except AttributeError: - * if inst is None: # <<<<<<<<<<<<<< - * return self - * raise - */ - __pyx_t_14 = (__pyx_v_inst == Py_None); - __pyx_t_15 = (__pyx_t_14 != 0); - if (__pyx_t_15) { - - /* "aiohttp/_helpers.pyx":31 - * except AttributeError: - * if inst is None: - * return self # <<<<<<<<<<<<<< - * raise - * - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(((PyObject *)__pyx_v_self)); - __pyx_r = ((PyObject *)__pyx_v_self); - __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; - goto __pyx_L6_except_return; - - /* "aiohttp/_helpers.pyx":30 - * return val - * except AttributeError: - * if inst is None: # <<<<<<<<<<<<<< - * return self - * raise - */ - } - - /* "aiohttp/_helpers.pyx":32 - * if inst is None: - * return self - * raise # <<<<<<<<<<<<<< - * - * def __set__(self, inst, value): - */ - __Pyx_GIVEREF(__pyx_t_10); - __Pyx_GIVEREF(__pyx_t_7); - __Pyx_XGIVEREF(__pyx_t_8); - __Pyx_ErrRestoreWithState(__pyx_t_10, __pyx_t_7, __pyx_t_8); - __pyx_t_10 = 0; __pyx_t_7 = 0; __pyx_t_8 = 0; - __PYX_ERR(0, 32, __pyx_L5_except_error) - } - goto __pyx_L5_except_error; - __pyx_L5_except_error:; - - /* "aiohttp/_helpers.pyx":22 - * - * def __get__(self, inst, owner): - * try: # <<<<<<<<<<<<<< - * try: - * return inst._cache[self.name] - */ - __Pyx_XGIVEREF(__pyx_t_1); - __Pyx_XGIVEREF(__pyx_t_2); - __Pyx_XGIVEREF(__pyx_t_3); - __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3); - goto __pyx_L1_error; - __pyx_L7_try_return:; - __Pyx_XGIVEREF(__pyx_t_1); - __Pyx_XGIVEREF(__pyx_t_2); - __Pyx_XGIVEREF(__pyx_t_3); - __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3); - goto __pyx_L0; - __pyx_L6_except_return:; - __Pyx_XGIVEREF(__pyx_t_1); - __Pyx_XGIVEREF(__pyx_t_2); - __Pyx_XGIVEREF(__pyx_t_3); - __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3); - goto __pyx_L0; - } - - /* "aiohttp/_helpers.pyx":21 - * return self.wrapped.__doc__ - * - * def __get__(self, inst, owner): # <<<<<<<<<<<<<< - * try: - * try: - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_7); - __Pyx_XDECREF(__pyx_t_8); - __Pyx_XDECREF(__pyx_t_10); - __Pyx_XDECREF(__pyx_t_11); - __Pyx_XDECREF(__pyx_t_12); - __Pyx_XDECREF(__pyx_t_13); - __Pyx_AddTraceback("aiohttp._helpers.reify.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v_val); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_helpers.pyx":34 - * raise - * - * def __set__(self, inst, value): # <<<<<<<<<<<<<< - * raise AttributeError("reified property is read-only") - */ - -/* Python wrapper */ -static int __pyx_pw_7aiohttp_8_helpers_5reify_5__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_inst, PyObject *__pyx_v_value); /*proto*/ -static int __pyx_pw_7aiohttp_8_helpers_5reify_5__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_inst, PyObject *__pyx_v_value) { - int __pyx_r; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_8_helpers_5reify_4__set__(((struct __pyx_obj_7aiohttp_8_helpers_reify *)__pyx_v_self), ((PyObject *)__pyx_v_inst), ((PyObject *)__pyx_v_value)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static int __pyx_pf_7aiohttp_8_helpers_5reify_4__set__(CYTHON_UNUSED struct __pyx_obj_7aiohttp_8_helpers_reify *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_inst, CYTHON_UNUSED PyObject *__pyx_v_value) { - int __pyx_r; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__set__", 0); - - /* "aiohttp/_helpers.pyx":35 - * - * def __set__(self, inst, value): - * raise AttributeError("reified property is read-only") # <<<<<<<<<<<<<< - */ - __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_AttributeError, __pyx_tuple_, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 35, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_Raise(__pyx_t_1, 0, 0, 0); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __PYX_ERR(0, 35, __pyx_L1_error) - - /* "aiohttp/_helpers.pyx":34 - * raise - * - * def __set__(self, inst, value): # <<<<<<<<<<<<<< - * raise AttributeError("reified property is read-only") - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("aiohttp._helpers.reify.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = -1; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "(tree fragment)":1 - * def __reduce_cython__(self): # <<<<<<<<<<<<<< - * cdef tuple state - * cdef object _dict - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_8_helpers_5reify_7__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ -static PyObject *__pyx_pw_7aiohttp_8_helpers_5reify_7__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_8_helpers_5reify_6__reduce_cython__(((struct __pyx_obj_7aiohttp_8_helpers_reify *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_8_helpers_5reify_6__reduce_cython__(struct __pyx_obj_7aiohttp_8_helpers_reify *__pyx_v_self) { - PyObject *__pyx_v_state = 0; - PyObject *__pyx_v__dict = 0; - int __pyx_v_use_setstate; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_t_2; - int __pyx_t_3; - PyObject *__pyx_t_4 = NULL; - int __pyx_t_5; - PyObject *__pyx_t_6 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__reduce_cython__", 0); - - /* "(tree fragment)":5 - * cdef object _dict - * cdef bint use_setstate - * state = (self.name, self.wrapped) # <<<<<<<<<<<<<< - * _dict = getattr(self, '__dict__', None) - * if _dict is not None: - */ - __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 5, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF(__pyx_v_self->name); - __Pyx_GIVEREF(__pyx_v_self->name); - PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v_self->name); - __Pyx_INCREF(__pyx_v_self->wrapped); - __Pyx_GIVEREF(__pyx_v_self->wrapped); - PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_self->wrapped); - __pyx_v_state = ((PyObject*)__pyx_t_1); - __pyx_t_1 = 0; - - /* "(tree fragment)":6 - * cdef bint use_setstate - * state = (self.name, self.wrapped) - * _dict = getattr(self, '__dict__', None) # <<<<<<<<<<<<<< - * if _dict is not None: - * state += (_dict,) - */ - __pyx_t_1 = __Pyx_GetAttr3(((PyObject *)__pyx_v_self), __pyx_n_s_dict, Py_None); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 6, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_v__dict = __pyx_t_1; - __pyx_t_1 = 0; - - /* "(tree fragment)":7 - * state = (self.name, self.wrapped) - * _dict = getattr(self, '__dict__', None) - * if _dict is not None: # <<<<<<<<<<<<<< - * state += (_dict,) - * use_setstate = True - */ - __pyx_t_2 = (__pyx_v__dict != Py_None); - __pyx_t_3 = (__pyx_t_2 != 0); - if (__pyx_t_3) { - - /* "(tree fragment)":8 - * _dict = getattr(self, '__dict__', None) - * if _dict is not None: - * state += (_dict,) # <<<<<<<<<<<<<< - * use_setstate = True - * else: - */ - __pyx_t_1 = PyTuple_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 8, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF(__pyx_v__dict); - __Pyx_GIVEREF(__pyx_v__dict); - PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v__dict); - __pyx_t_4 = PyNumber_InPlaceAdd(__pyx_v_state, __pyx_t_1); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 8, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF_SET(__pyx_v_state, ((PyObject*)__pyx_t_4)); - __pyx_t_4 = 0; - - /* "(tree fragment)":9 - * if _dict is not None: - * state += (_dict,) - * use_setstate = True # <<<<<<<<<<<<<< - * else: - * use_setstate = self.name is not None or self.wrapped is not None - */ - __pyx_v_use_setstate = 1; - - /* "(tree fragment)":7 - * state = (self.name, self.wrapped) - * _dict = getattr(self, '__dict__', None) - * if _dict is not None: # <<<<<<<<<<<<<< - * state += (_dict,) - * use_setstate = True - */ - goto __pyx_L3; - } - - /* "(tree fragment)":11 - * use_setstate = True - * else: - * use_setstate = self.name is not None or self.wrapped is not None # <<<<<<<<<<<<<< - * if use_setstate: - * return __pyx_unpickle_reify, (type(self), 0x770cb8f, None), state - */ - /*else*/ { - __pyx_t_2 = (__pyx_v_self->name != Py_None); - __pyx_t_5 = (__pyx_t_2 != 0); - if (!__pyx_t_5) { - } else { - __pyx_t_3 = __pyx_t_5; - goto __pyx_L4_bool_binop_done; - } - __pyx_t_5 = (__pyx_v_self->wrapped != Py_None); - __pyx_t_2 = (__pyx_t_5 != 0); - __pyx_t_3 = __pyx_t_2; - __pyx_L4_bool_binop_done:; - __pyx_v_use_setstate = __pyx_t_3; - } - __pyx_L3:; - - /* "(tree fragment)":12 - * else: - * use_setstate = self.name is not None or self.wrapped is not None - * if use_setstate: # <<<<<<<<<<<<<< - * return __pyx_unpickle_reify, (type(self), 0x770cb8f, None), state - * else: - */ - __pyx_t_3 = (__pyx_v_use_setstate != 0); - if (__pyx_t_3) { - - /* "(tree fragment)":13 - * use_setstate = self.name is not None or self.wrapped is not None - * if use_setstate: - * return __pyx_unpickle_reify, (type(self), 0x770cb8f, None), state # <<<<<<<<<<<<<< - * else: - * return __pyx_unpickle_reify, (type(self), 0x770cb8f, state) - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_pyx_unpickle_reify); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); - __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); - PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); - __Pyx_INCREF(__pyx_int_124832655); - __Pyx_GIVEREF(__pyx_int_124832655); - PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_int_124832655); - __Pyx_INCREF(Py_None); - __Pyx_GIVEREF(Py_None); - PyTuple_SET_ITEM(__pyx_t_1, 2, Py_None); - __pyx_t_6 = PyTuple_New(3); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_6); - __Pyx_GIVEREF(__pyx_t_4); - PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_4); - __Pyx_GIVEREF(__pyx_t_1); - PyTuple_SET_ITEM(__pyx_t_6, 1, __pyx_t_1); - __Pyx_INCREF(__pyx_v_state); - __Pyx_GIVEREF(__pyx_v_state); - PyTuple_SET_ITEM(__pyx_t_6, 2, __pyx_v_state); - __pyx_t_4 = 0; - __pyx_t_1 = 0; - __pyx_r = __pyx_t_6; - __pyx_t_6 = 0; - goto __pyx_L0; - - /* "(tree fragment)":12 - * else: - * use_setstate = self.name is not None or self.wrapped is not None - * if use_setstate: # <<<<<<<<<<<<<< - * return __pyx_unpickle_reify, (type(self), 0x770cb8f, None), state - * else: - */ - } - - /* "(tree fragment)":15 - * return __pyx_unpickle_reify, (type(self), 0x770cb8f, None), state - * else: - * return __pyx_unpickle_reify, (type(self), 0x770cb8f, state) # <<<<<<<<<<<<<< - * def __setstate_cython__(self, __pyx_state): - * __pyx_unpickle_reify__set_state(self, __pyx_state) - */ - /*else*/ { - __Pyx_XDECREF(__pyx_r); - __Pyx_GetModuleGlobalName(__pyx_t_6, __pyx_n_s_pyx_unpickle_reify); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_6); - __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); - __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); - PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); - __Pyx_INCREF(__pyx_int_124832655); - __Pyx_GIVEREF(__pyx_int_124832655); - PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_int_124832655); - __Pyx_INCREF(__pyx_v_state); - __Pyx_GIVEREF(__pyx_v_state); - PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_v_state); - __pyx_t_4 = PyTuple_New(2); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_GIVEREF(__pyx_t_6); - PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_6); - __Pyx_GIVEREF(__pyx_t_1); - PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_1); - __pyx_t_6 = 0; - __pyx_t_1 = 0; - __pyx_r = __pyx_t_4; - __pyx_t_4 = 0; - goto __pyx_L0; - } - - /* "(tree fragment)":1 - * def __reduce_cython__(self): # <<<<<<<<<<<<<< - * cdef tuple state - * cdef object _dict - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_XDECREF(__pyx_t_6); - __Pyx_AddTraceback("aiohttp._helpers.reify.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v_state); - __Pyx_XDECREF(__pyx_v__dict); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "(tree fragment)":16 - * else: - * return __pyx_unpickle_reify, (type(self), 0x770cb8f, state) - * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< - * __pyx_unpickle_reify__set_state(self, __pyx_state) - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_8_helpers_5reify_9__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state); /*proto*/ -static PyObject *__pyx_pw_7aiohttp_8_helpers_5reify_9__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_8_helpers_5reify_8__setstate_cython__(((struct __pyx_obj_7aiohttp_8_helpers_reify *)__pyx_v_self), ((PyObject *)__pyx_v___pyx_state)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_8_helpers_5reify_8__setstate_cython__(struct __pyx_obj_7aiohttp_8_helpers_reify *__pyx_v_self, PyObject *__pyx_v___pyx_state) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__setstate_cython__", 0); - - /* "(tree fragment)":17 - * return __pyx_unpickle_reify, (type(self), 0x770cb8f, state) - * def __setstate_cython__(self, __pyx_state): - * __pyx_unpickle_reify__set_state(self, __pyx_state) # <<<<<<<<<<<<<< - */ - if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v___pyx_state)->tp_name), 0))) __PYX_ERR(1, 17, __pyx_L1_error) - __pyx_t_1 = __pyx_f_7aiohttp_8_helpers___pyx_unpickle_reify__set_state(__pyx_v_self, ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 17, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "(tree fragment)":16 - * else: - * return __pyx_unpickle_reify, (type(self), 0x770cb8f, state) - * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< - * __pyx_unpickle_reify__set_state(self, __pyx_state) - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("aiohttp._helpers.reify.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "(tree fragment)":1 - * def __pyx_unpickle_reify(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< - * cdef object __pyx_PickleError - * cdef object __pyx_result - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_8_helpers_1__pyx_unpickle_reify(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ -static PyMethodDef __pyx_mdef_7aiohttp_8_helpers_1__pyx_unpickle_reify = {"__pyx_unpickle_reify", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_7aiohttp_8_helpers_1__pyx_unpickle_reify, METH_VARARGS|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_7aiohttp_8_helpers_1__pyx_unpickle_reify(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { - PyObject *__pyx_v___pyx_type = 0; - long __pyx_v___pyx_checksum; - PyObject *__pyx_v___pyx_state = 0; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__pyx_unpickle_reify (wrapper)", 0); - { - static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_pyx_type,&__pyx_n_s_pyx_checksum,&__pyx_n_s_pyx_state,0}; - PyObject* values[3] = {0,0,0}; - if (unlikely(__pyx_kwds)) { - Py_ssize_t kw_args; - const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); - switch (pos_args) { - case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); - CYTHON_FALLTHROUGH; - case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); - CYTHON_FALLTHROUGH; - case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = PyDict_Size(__pyx_kwds); - switch (pos_args) { - case 0: - if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_type)) != 0)) kw_args--; - else goto __pyx_L5_argtuple_error; - CYTHON_FALLTHROUGH; - case 1: - if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_checksum)) != 0)) kw_args--; - else { - __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_reify", 1, 3, 3, 1); __PYX_ERR(1, 1, __pyx_L3_error) - } - CYTHON_FALLTHROUGH; - case 2: - if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_state)) != 0)) kw_args--; - else { - __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_reify", 1, 3, 3, 2); __PYX_ERR(1, 1, __pyx_L3_error) - } - } - if (unlikely(kw_args > 0)) { - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__pyx_unpickle_reify") < 0)) __PYX_ERR(1, 1, __pyx_L3_error) - } - } else if (PyTuple_GET_SIZE(__pyx_args) != 3) { - goto __pyx_L5_argtuple_error; - } else { - values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - values[1] = PyTuple_GET_ITEM(__pyx_args, 1); - values[2] = PyTuple_GET_ITEM(__pyx_args, 2); - } - __pyx_v___pyx_type = values[0]; - __pyx_v___pyx_checksum = __Pyx_PyInt_As_long(values[1]); if (unlikely((__pyx_v___pyx_checksum == (long)-1) && PyErr_Occurred())) __PYX_ERR(1, 1, __pyx_L3_error) - __pyx_v___pyx_state = values[2]; - } - goto __pyx_L4_argument_unpacking_done; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_reify", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(1, 1, __pyx_L3_error) - __pyx_L3_error:; - __Pyx_AddTraceback("aiohttp._helpers.__pyx_unpickle_reify", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_7aiohttp_8_helpers___pyx_unpickle_reify(__pyx_self, __pyx_v___pyx_type, __pyx_v___pyx_checksum, __pyx_v___pyx_state); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_8_helpers___pyx_unpickle_reify(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state) { - PyObject *__pyx_v___pyx_PickleError = 0; - PyObject *__pyx_v___pyx_result = 0; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - PyObject *__pyx_t_5 = NULL; - int __pyx_t_6; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__pyx_unpickle_reify", 0); - - /* "(tree fragment)":4 - * cdef object __pyx_PickleError - * cdef object __pyx_result - * if __pyx_checksum != 0x770cb8f: # <<<<<<<<<<<<<< - * from pickle import PickleError as __pyx_PickleError - * raise __pyx_PickleError("Incompatible checksums (%s vs 0x770cb8f = (name, wrapped))" % __pyx_checksum) - */ - __pyx_t_1 = ((__pyx_v___pyx_checksum != 0x770cb8f) != 0); - if (__pyx_t_1) { - - /* "(tree fragment)":5 - * cdef object __pyx_result - * if __pyx_checksum != 0x770cb8f: - * from pickle import PickleError as __pyx_PickleError # <<<<<<<<<<<<<< - * raise __pyx_PickleError("Incompatible checksums (%s vs 0x770cb8f = (name, wrapped))" % __pyx_checksum) - * __pyx_result = reify.__new__(__pyx_type) - */ - __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 5, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(__pyx_n_s_PickleError); - __Pyx_GIVEREF(__pyx_n_s_PickleError); - PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_PickleError); - __pyx_t_3 = __Pyx_Import(__pyx_n_s_pickle, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 5, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_PickleError); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 5, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(__pyx_t_2); - __pyx_v___pyx_PickleError = __pyx_t_2; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "(tree fragment)":6 - * if __pyx_checksum != 0x770cb8f: - * from pickle import PickleError as __pyx_PickleError - * raise __pyx_PickleError("Incompatible checksums (%s vs 0x770cb8f = (name, wrapped))" % __pyx_checksum) # <<<<<<<<<<<<<< - * __pyx_result = reify.__new__(__pyx_type) - * if __pyx_state is not None: - */ - __pyx_t_2 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 6, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_4 = __Pyx_PyString_Format(__pyx_kp_s_Incompatible_checksums_s_vs_0x77, __pyx_t_2); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 6, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_INCREF(__pyx_v___pyx_PickleError); - __pyx_t_2 = __pyx_v___pyx_PickleError; __pyx_t_5 = NULL; - if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_5)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_5); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - } - } - __pyx_t_3 = (__pyx_t_5) ? __Pyx_PyObject_Call2Args(__pyx_t_2, __pyx_t_5, __pyx_t_4) : __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_4); - __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 6, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_Raise(__pyx_t_3, 0, 0, 0); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __PYX_ERR(1, 6, __pyx_L1_error) - - /* "(tree fragment)":4 - * cdef object __pyx_PickleError - * cdef object __pyx_result - * if __pyx_checksum != 0x770cb8f: # <<<<<<<<<<<<<< - * from pickle import PickleError as __pyx_PickleError - * raise __pyx_PickleError("Incompatible checksums (%s vs 0x770cb8f = (name, wrapped))" % __pyx_checksum) - */ - } - - /* "(tree fragment)":7 - * from pickle import PickleError as __pyx_PickleError - * raise __pyx_PickleError("Incompatible checksums (%s vs 0x770cb8f = (name, wrapped))" % __pyx_checksum) - * __pyx_result = reify.__new__(__pyx_type) # <<<<<<<<<<<<<< - * if __pyx_state is not None: - * __pyx_unpickle_reify__set_state( __pyx_result, __pyx_state) - */ - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_ptype_7aiohttp_8_helpers_reify), __pyx_n_s_new); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 7, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_4 = NULL; - if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_4)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_4); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - } - } - __pyx_t_3 = (__pyx_t_4) ? __Pyx_PyObject_Call2Args(__pyx_t_2, __pyx_t_4, __pyx_v___pyx_type) : __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_v___pyx_type); - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 7, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_v___pyx_result = __pyx_t_3; - __pyx_t_3 = 0; - - /* "(tree fragment)":8 - * raise __pyx_PickleError("Incompatible checksums (%s vs 0x770cb8f = (name, wrapped))" % __pyx_checksum) - * __pyx_result = reify.__new__(__pyx_type) - * if __pyx_state is not None: # <<<<<<<<<<<<<< - * __pyx_unpickle_reify__set_state( __pyx_result, __pyx_state) - * return __pyx_result - */ - __pyx_t_1 = (__pyx_v___pyx_state != Py_None); - __pyx_t_6 = (__pyx_t_1 != 0); - if (__pyx_t_6) { - - /* "(tree fragment)":9 - * __pyx_result = reify.__new__(__pyx_type) - * if __pyx_state is not None: - * __pyx_unpickle_reify__set_state( __pyx_result, __pyx_state) # <<<<<<<<<<<<<< - * return __pyx_result - * cdef __pyx_unpickle_reify__set_state(reify __pyx_result, tuple __pyx_state): - */ - if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v___pyx_state)->tp_name), 0))) __PYX_ERR(1, 9, __pyx_L1_error) - __pyx_t_3 = __pyx_f_7aiohttp_8_helpers___pyx_unpickle_reify__set_state(((struct __pyx_obj_7aiohttp_8_helpers_reify *)__pyx_v___pyx_result), ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 9, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "(tree fragment)":8 - * raise __pyx_PickleError("Incompatible checksums (%s vs 0x770cb8f = (name, wrapped))" % __pyx_checksum) - * __pyx_result = reify.__new__(__pyx_type) - * if __pyx_state is not None: # <<<<<<<<<<<<<< - * __pyx_unpickle_reify__set_state( __pyx_result, __pyx_state) - * return __pyx_result - */ - } - - /* "(tree fragment)":10 - * if __pyx_state is not None: - * __pyx_unpickle_reify__set_state( __pyx_result, __pyx_state) - * return __pyx_result # <<<<<<<<<<<<<< - * cdef __pyx_unpickle_reify__set_state(reify __pyx_result, tuple __pyx_state): - * __pyx_result.name = __pyx_state[0]; __pyx_result.wrapped = __pyx_state[1] - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v___pyx_result); - __pyx_r = __pyx_v___pyx_result; - goto __pyx_L0; - - /* "(tree fragment)":1 - * def __pyx_unpickle_reify(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< - * cdef object __pyx_PickleError - * cdef object __pyx_result - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_XDECREF(__pyx_t_5); - __Pyx_AddTraceback("aiohttp._helpers.__pyx_unpickle_reify", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v___pyx_PickleError); - __Pyx_XDECREF(__pyx_v___pyx_result); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "(tree fragment)":11 - * __pyx_unpickle_reify__set_state( __pyx_result, __pyx_state) - * return __pyx_result - * cdef __pyx_unpickle_reify__set_state(reify __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< - * __pyx_result.name = __pyx_state[0]; __pyx_result.wrapped = __pyx_state[1] - * if len(__pyx_state) > 2 and hasattr(__pyx_result, '__dict__'): - */ - -static PyObject *__pyx_f_7aiohttp_8_helpers___pyx_unpickle_reify__set_state(struct __pyx_obj_7aiohttp_8_helpers_reify *__pyx_v___pyx_result, PyObject *__pyx_v___pyx_state) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_t_2; - Py_ssize_t __pyx_t_3; - int __pyx_t_4; - int __pyx_t_5; - PyObject *__pyx_t_6 = NULL; - PyObject *__pyx_t_7 = NULL; - PyObject *__pyx_t_8 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__pyx_unpickle_reify__set_state", 0); - - /* "(tree fragment)":12 - * return __pyx_result - * cdef __pyx_unpickle_reify__set_state(reify __pyx_result, tuple __pyx_state): - * __pyx_result.name = __pyx_state[0]; __pyx_result.wrapped = __pyx_state[1] # <<<<<<<<<<<<<< - * if len(__pyx_state) > 2 and hasattr(__pyx_result, '__dict__'): - * __pyx_result.__dict__.update(__pyx_state[2]) - */ - if (unlikely(__pyx_v___pyx_state == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(1, 12, __pyx_L1_error) - } - __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_v___pyx_result->name); - __Pyx_DECREF(__pyx_v___pyx_result->name); - __pyx_v___pyx_result->name = __pyx_t_1; - __pyx_t_1 = 0; - if (unlikely(__pyx_v___pyx_state == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(1, 12, __pyx_L1_error) - } - __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_v___pyx_result->wrapped); - __Pyx_DECREF(__pyx_v___pyx_result->wrapped); - __pyx_v___pyx_result->wrapped = __pyx_t_1; - __pyx_t_1 = 0; - - /* "(tree fragment)":13 - * cdef __pyx_unpickle_reify__set_state(reify __pyx_result, tuple __pyx_state): - * __pyx_result.name = __pyx_state[0]; __pyx_result.wrapped = __pyx_state[1] - * if len(__pyx_state) > 2 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< - * __pyx_result.__dict__.update(__pyx_state[2]) - */ - if (unlikely(__pyx_v___pyx_state == Py_None)) { - PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); - __PYX_ERR(1, 13, __pyx_L1_error) - } - __pyx_t_3 = PyTuple_GET_SIZE(__pyx_v___pyx_state); if (unlikely(__pyx_t_3 == ((Py_ssize_t)-1))) __PYX_ERR(1, 13, __pyx_L1_error) - __pyx_t_4 = ((__pyx_t_3 > 2) != 0); - if (__pyx_t_4) { - } else { - __pyx_t_2 = __pyx_t_4; - goto __pyx_L4_bool_binop_done; - } - __pyx_t_4 = __Pyx_HasAttr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(1, 13, __pyx_L1_error) - __pyx_t_5 = (__pyx_t_4 != 0); - __pyx_t_2 = __pyx_t_5; - __pyx_L4_bool_binop_done:; - if (__pyx_t_2) { - - /* "(tree fragment)":14 - * __pyx_result.name = __pyx_state[0]; __pyx_result.wrapped = __pyx_state[1] - * if len(__pyx_state) > 2 and hasattr(__pyx_result, '__dict__'): - * __pyx_result.__dict__.update(__pyx_state[2]) # <<<<<<<<<<<<<< - */ - __pyx_t_6 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 14, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_6); - __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_t_6, __pyx_n_s_update); if (unlikely(!__pyx_t_7)) __PYX_ERR(1, 14, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_7); - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - if (unlikely(__pyx_v___pyx_state == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(1, 14, __pyx_L1_error) - } - __pyx_t_6 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 2, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 14, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_6); - __pyx_t_8 = NULL; - if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_7))) { - __pyx_t_8 = PyMethod_GET_SELF(__pyx_t_7); - if (likely(__pyx_t_8)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7); - __Pyx_INCREF(__pyx_t_8); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_7, function); - } - } - __pyx_t_1 = (__pyx_t_8) ? __Pyx_PyObject_Call2Args(__pyx_t_7, __pyx_t_8, __pyx_t_6) : __Pyx_PyObject_CallOneArg(__pyx_t_7, __pyx_t_6); - __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 14, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "(tree fragment)":13 - * cdef __pyx_unpickle_reify__set_state(reify __pyx_result, tuple __pyx_state): - * __pyx_result.name = __pyx_state[0]; __pyx_result.wrapped = __pyx_state[1] - * if len(__pyx_state) > 2 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< - * __pyx_result.__dict__.update(__pyx_state[2]) - */ - } - - /* "(tree fragment)":11 - * __pyx_unpickle_reify__set_state( __pyx_result, __pyx_state) - * return __pyx_result - * cdef __pyx_unpickle_reify__set_state(reify __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< - * __pyx_result.name = __pyx_state[0]; __pyx_result.wrapped = __pyx_state[1] - * if len(__pyx_state) > 2 and hasattr(__pyx_result, '__dict__'): - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_6); - __Pyx_XDECREF(__pyx_t_7); - __Pyx_XDECREF(__pyx_t_8); - __Pyx_AddTraceback("aiohttp._helpers.__pyx_unpickle_reify__set_state", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_tp_new_7aiohttp_8_helpers_reify(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { - struct __pyx_obj_7aiohttp_8_helpers_reify *p; - PyObject *o; - if (likely((t->tp_flags & Py_TPFLAGS_IS_ABSTRACT) == 0)) { - o = (*t->tp_alloc)(t, 0); - } else { - o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0); - } - if (unlikely(!o)) return 0; - p = ((struct __pyx_obj_7aiohttp_8_helpers_reify *)o); - p->wrapped = Py_None; Py_INCREF(Py_None); - p->name = Py_None; Py_INCREF(Py_None); - return o; -} - -static void __pyx_tp_dealloc_7aiohttp_8_helpers_reify(PyObject *o) { - struct __pyx_obj_7aiohttp_8_helpers_reify *p = (struct __pyx_obj_7aiohttp_8_helpers_reify *)o; - #if CYTHON_USE_TP_FINALIZE - if (unlikely(PyType_HasFeature(Py_TYPE(o), Py_TPFLAGS_HAVE_FINALIZE) && Py_TYPE(o)->tp_finalize) && !_PyGC_FINALIZED(o)) { - if (PyObject_CallFinalizerFromDealloc(o)) return; - } - #endif - PyObject_GC_UnTrack(o); - Py_CLEAR(p->wrapped); - Py_CLEAR(p->name); - (*Py_TYPE(o)->tp_free)(o); -} - -static int __pyx_tp_traverse_7aiohttp_8_helpers_reify(PyObject *o, visitproc v, void *a) { - int e; - struct __pyx_obj_7aiohttp_8_helpers_reify *p = (struct __pyx_obj_7aiohttp_8_helpers_reify *)o; - if (p->wrapped) { - e = (*v)(p->wrapped, a); if (e) return e; - } - if (p->name) { - e = (*v)(p->name, a); if (e) return e; - } - return 0; -} - -static int __pyx_tp_clear_7aiohttp_8_helpers_reify(PyObject *o) { - PyObject* tmp; - struct __pyx_obj_7aiohttp_8_helpers_reify *p = (struct __pyx_obj_7aiohttp_8_helpers_reify *)o; - tmp = ((PyObject*)p->wrapped); - p->wrapped = Py_None; Py_INCREF(Py_None); - Py_XDECREF(tmp); - tmp = ((PyObject*)p->name); - p->name = Py_None; Py_INCREF(Py_None); - Py_XDECREF(tmp); - return 0; -} - -static PyObject *__pyx_tp_descr_get_7aiohttp_8_helpers_reify(PyObject *o, PyObject *i, PyObject *c) { - PyObject *r = 0; - if (!i) i = Py_None; - if (!c) c = Py_None; - r = __pyx_pw_7aiohttp_8_helpers_5reify_3__get__(o, i, c); - return r; -} - -static int __pyx_tp_descr_set_7aiohttp_8_helpers_reify(PyObject *o, PyObject *i, PyObject *v) { - if (v) { - return __pyx_pw_7aiohttp_8_helpers_5reify_5__set__(o, i, v); - } - else { - PyErr_SetString(PyExc_NotImplementedError, "__delete__"); - return -1; - } -} - -static PyObject *__pyx_getprop_7aiohttp_8_helpers_5reify___doc__(PyObject *o, CYTHON_UNUSED void *x) { - return __pyx_pw_7aiohttp_8_helpers_5reify_7__doc___1__get__(o); -} - -static PyMethodDef __pyx_methods_7aiohttp_8_helpers_reify[] = { - {"__reduce_cython__", (PyCFunction)__pyx_pw_7aiohttp_8_helpers_5reify_7__reduce_cython__, METH_NOARGS, 0}, - {"__setstate_cython__", (PyCFunction)__pyx_pw_7aiohttp_8_helpers_5reify_9__setstate_cython__, METH_O, 0}, - {0, 0, 0, 0} -}; - -static struct PyGetSetDef __pyx_getsets_7aiohttp_8_helpers_reify[] = { - {(char *)"__doc__", __pyx_getprop_7aiohttp_8_helpers_5reify___doc__, 0, (char *)0, 0}, - {0, 0, 0, 0, 0} -}; - -static PyTypeObject __pyx_type_7aiohttp_8_helpers_reify = { - PyVarObject_HEAD_INIT(0, 0) - "aiohttp._helpers.reify", /*tp_name*/ - sizeof(struct __pyx_obj_7aiohttp_8_helpers_reify), /*tp_basicsize*/ - 0, /*tp_itemsize*/ - __pyx_tp_dealloc_7aiohttp_8_helpers_reify, /*tp_dealloc*/ - #if PY_VERSION_HEX < 0x030800b4 - 0, /*tp_print*/ - #endif - #if PY_VERSION_HEX >= 0x030800b4 - 0, /*tp_vectorcall_offset*/ - #endif - 0, /*tp_getattr*/ - 0, /*tp_setattr*/ - #if PY_MAJOR_VERSION < 3 - 0, /*tp_compare*/ - #endif - #if PY_MAJOR_VERSION >= 3 - 0, /*tp_as_async*/ - #endif - 0, /*tp_repr*/ - 0, /*tp_as_number*/ - 0, /*tp_as_sequence*/ - 0, /*tp_as_mapping*/ - 0, /*tp_hash*/ - 0, /*tp_call*/ - 0, /*tp_str*/ - 0, /*tp_getattro*/ - 0, /*tp_setattro*/ - 0, /*tp_as_buffer*/ - Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ - "Use as a class method decorator. It operates almost exactly like\n the Python `@property` decorator, but it puts the result of the\n method it decorates into the instance dict after the first call,\n effectively replacing the function it decorates with an instance\n variable. It is, in Python parlance, a data descriptor.\n\n ", /*tp_doc*/ - __pyx_tp_traverse_7aiohttp_8_helpers_reify, /*tp_traverse*/ - __pyx_tp_clear_7aiohttp_8_helpers_reify, /*tp_clear*/ - 0, /*tp_richcompare*/ - 0, /*tp_weaklistoffset*/ - 0, /*tp_iter*/ - 0, /*tp_iternext*/ - __pyx_methods_7aiohttp_8_helpers_reify, /*tp_methods*/ - 0, /*tp_members*/ - __pyx_getsets_7aiohttp_8_helpers_reify, /*tp_getset*/ - 0, /*tp_base*/ - 0, /*tp_dict*/ - __pyx_tp_descr_get_7aiohttp_8_helpers_reify, /*tp_descr_get*/ - __pyx_tp_descr_set_7aiohttp_8_helpers_reify, /*tp_descr_set*/ - 0, /*tp_dictoffset*/ - __pyx_pw_7aiohttp_8_helpers_5reify_1__init__, /*tp_init*/ - 0, /*tp_alloc*/ - __pyx_tp_new_7aiohttp_8_helpers_reify, /*tp_new*/ - 0, /*tp_free*/ - 0, /*tp_is_gc*/ - 0, /*tp_bases*/ - 0, /*tp_mro*/ - 0, /*tp_cache*/ - 0, /*tp_subclasses*/ - 0, /*tp_weaklist*/ - 0, /*tp_del*/ - 0, /*tp_version_tag*/ - #if PY_VERSION_HEX >= 0x030400a1 - 0, /*tp_finalize*/ - #endif - #if PY_VERSION_HEX >= 0x030800b1 - 0, /*tp_vectorcall*/ - #endif - #if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 - 0, /*tp_print*/ - #endif -}; - -static PyMethodDef __pyx_methods[] = { - {0, 0, 0, 0} -}; - -#if PY_MAJOR_VERSION >= 3 -#if CYTHON_PEP489_MULTI_PHASE_INIT -static PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def); /*proto*/ -static int __pyx_pymod_exec__helpers(PyObject* module); /*proto*/ -static PyModuleDef_Slot __pyx_moduledef_slots[] = { - {Py_mod_create, (void*)__pyx_pymod_create}, - {Py_mod_exec, (void*)__pyx_pymod_exec__helpers}, - {0, NULL} -}; -#endif - -static struct PyModuleDef __pyx_moduledef = { - PyModuleDef_HEAD_INIT, - "_helpers", - 0, /* m_doc */ - #if CYTHON_PEP489_MULTI_PHASE_INIT - 0, /* m_size */ - #else - -1, /* m_size */ - #endif - __pyx_methods /* m_methods */, - #if CYTHON_PEP489_MULTI_PHASE_INIT - __pyx_moduledef_slots, /* m_slots */ - #else - NULL, /* m_reload */ - #endif - NULL, /* m_traverse */ - NULL, /* m_clear */ - NULL /* m_free */ -}; -#endif -#ifndef CYTHON_SMALL_CODE -#if defined(__clang__) - #define CYTHON_SMALL_CODE -#elif defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3)) - #define CYTHON_SMALL_CODE __attribute__((cold)) -#else - #define CYTHON_SMALL_CODE -#endif -#endif - -static __Pyx_StringTabEntry __pyx_string_tab[] = { - {&__pyx_n_s_AttributeError, __pyx_k_AttributeError, sizeof(__pyx_k_AttributeError), 0, 0, 1, 1}, - {&__pyx_kp_s_Incompatible_checksums_s_vs_0x77, __pyx_k_Incompatible_checksums_s_vs_0x77, sizeof(__pyx_k_Incompatible_checksums_s_vs_0x77), 0, 0, 1, 0}, - {&__pyx_n_s_KeyError, __pyx_k_KeyError, sizeof(__pyx_k_KeyError), 0, 0, 1, 1}, - {&__pyx_n_s_PickleError, __pyx_k_PickleError, sizeof(__pyx_k_PickleError), 0, 0, 1, 1}, - {&__pyx_n_s_aiohttp__helpers, __pyx_k_aiohttp__helpers, sizeof(__pyx_k_aiohttp__helpers), 0, 0, 1, 1}, - {&__pyx_n_s_cache, __pyx_k_cache, sizeof(__pyx_k_cache), 0, 0, 1, 1}, - {&__pyx_n_s_cline_in_traceback, __pyx_k_cline_in_traceback, sizeof(__pyx_k_cline_in_traceback), 0, 0, 1, 1}, - {&__pyx_n_s_dict, __pyx_k_dict, sizeof(__pyx_k_dict), 0, 0, 1, 1}, - {&__pyx_n_s_doc, __pyx_k_doc, sizeof(__pyx_k_doc), 0, 0, 1, 1}, - {&__pyx_n_s_getstate, __pyx_k_getstate, sizeof(__pyx_k_getstate), 0, 0, 1, 1}, - {&__pyx_n_s_import, __pyx_k_import, sizeof(__pyx_k_import), 0, 0, 1, 1}, - {&__pyx_n_s_main, __pyx_k_main, sizeof(__pyx_k_main), 0, 0, 1, 1}, - {&__pyx_n_s_name, __pyx_k_name, sizeof(__pyx_k_name), 0, 0, 1, 1}, - {&__pyx_n_s_new, __pyx_k_new, sizeof(__pyx_k_new), 0, 0, 1, 1}, - {&__pyx_n_s_pickle, __pyx_k_pickle, sizeof(__pyx_k_pickle), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_PickleError, __pyx_k_pyx_PickleError, sizeof(__pyx_k_pyx_PickleError), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_checksum, __pyx_k_pyx_checksum, sizeof(__pyx_k_pyx_checksum), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_result, __pyx_k_pyx_result, sizeof(__pyx_k_pyx_result), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_state, __pyx_k_pyx_state, sizeof(__pyx_k_pyx_state), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_type, __pyx_k_pyx_type, sizeof(__pyx_k_pyx_type), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_unpickle_reify, __pyx_k_pyx_unpickle_reify, sizeof(__pyx_k_pyx_unpickle_reify), 0, 0, 1, 1}, - {&__pyx_n_s_reduce, __pyx_k_reduce, sizeof(__pyx_k_reduce), 0, 0, 1, 1}, - {&__pyx_n_s_reduce_cython, __pyx_k_reduce_cython, sizeof(__pyx_k_reduce_cython), 0, 0, 1, 1}, - {&__pyx_n_s_reduce_ex, __pyx_k_reduce_ex, sizeof(__pyx_k_reduce_ex), 0, 0, 1, 1}, - {&__pyx_kp_u_reified_property_is_read_only, __pyx_k_reified_property_is_read_only, sizeof(__pyx_k_reified_property_is_read_only), 0, 1, 0, 0}, - {&__pyx_n_s_reify, __pyx_k_reify, sizeof(__pyx_k_reify), 0, 0, 1, 1}, - {&__pyx_n_s_setstate, __pyx_k_setstate, sizeof(__pyx_k_setstate), 0, 0, 1, 1}, - {&__pyx_n_s_setstate_cython, __pyx_k_setstate_cython, sizeof(__pyx_k_setstate_cython), 0, 0, 1, 1}, - {&__pyx_kp_s_stringsource, __pyx_k_stringsource, sizeof(__pyx_k_stringsource), 0, 0, 1, 0}, - {&__pyx_n_s_test, __pyx_k_test, sizeof(__pyx_k_test), 0, 0, 1, 1}, - {&__pyx_n_s_update, __pyx_k_update, sizeof(__pyx_k_update), 0, 0, 1, 1}, - {&__pyx_n_s_wrapped, __pyx_k_wrapped, sizeof(__pyx_k_wrapped), 0, 0, 1, 1}, - {0, 0, 0, 0, 0, 0, 0} -}; -static CYTHON_SMALL_CODE int __Pyx_InitCachedBuiltins(void) { - __pyx_builtin_KeyError = __Pyx_GetBuiltinName(__pyx_n_s_KeyError); if (!__pyx_builtin_KeyError) __PYX_ERR(0, 25, __pyx_L1_error) - __pyx_builtin_AttributeError = __Pyx_GetBuiltinName(__pyx_n_s_AttributeError); if (!__pyx_builtin_AttributeError) __PYX_ERR(0, 29, __pyx_L1_error) - return 0; - __pyx_L1_error:; - return -1; -} - -static CYTHON_SMALL_CODE int __Pyx_InitCachedConstants(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0); - - /* "aiohttp/_helpers.pyx":35 - * - * def __set__(self, inst, value): - * raise AttributeError("reified property is read-only") # <<<<<<<<<<<<<< - */ - __pyx_tuple_ = PyTuple_Pack(1, __pyx_kp_u_reified_property_is_read_only); if (unlikely(!__pyx_tuple_)) __PYX_ERR(0, 35, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple_); - __Pyx_GIVEREF(__pyx_tuple_); - - /* "(tree fragment)":1 - * def __pyx_unpickle_reify(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< - * cdef object __pyx_PickleError - * cdef object __pyx_result - */ - __pyx_tuple__2 = PyTuple_Pack(5, __pyx_n_s_pyx_type, __pyx_n_s_pyx_checksum, __pyx_n_s_pyx_state, __pyx_n_s_pyx_PickleError, __pyx_n_s_pyx_result); if (unlikely(!__pyx_tuple__2)) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__2); - __Pyx_GIVEREF(__pyx_tuple__2); - __pyx_codeobj__3 = (PyObject*)__Pyx_PyCode_New(3, 0, 5, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__2, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_pyx_unpickle_reify, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__3)) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_RefNannyFinishContext(); - return 0; - __pyx_L1_error:; - __Pyx_RefNannyFinishContext(); - return -1; -} - -static CYTHON_SMALL_CODE int __Pyx_InitGlobals(void) { - if (__Pyx_InitStrings(__pyx_string_tab) < 0) __PYX_ERR(0, 1, __pyx_L1_error); - __pyx_int_124832655 = PyInt_FromLong(124832655L); if (unlikely(!__pyx_int_124832655)) __PYX_ERR(0, 1, __pyx_L1_error) - return 0; - __pyx_L1_error:; - return -1; -} - -static CYTHON_SMALL_CODE int __Pyx_modinit_global_init_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_variable_export_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_function_export_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_type_init_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_type_import_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_variable_import_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_function_import_code(void); /*proto*/ - -static int __Pyx_modinit_global_init_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_global_init_code", 0); - /*--- Global init code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_variable_export_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_variable_export_code", 0); - /*--- Variable export code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_function_export_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_function_export_code", 0); - /*--- Function export code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_type_init_code(void) { - __Pyx_RefNannyDeclarations - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__Pyx_modinit_type_init_code", 0); - /*--- Type init code ---*/ - if (PyType_Ready(&__pyx_type_7aiohttp_8_helpers_reify) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #if PY_VERSION_HEX < 0x030800B1 - __pyx_type_7aiohttp_8_helpers_reify.tp_print = 0; - #endif - if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type_7aiohttp_8_helpers_reify.tp_dictoffset && __pyx_type_7aiohttp_8_helpers_reify.tp_getattro == PyObject_GenericGetAttr)) { - __pyx_type_7aiohttp_8_helpers_reify.tp_getattro = __Pyx_PyObject_GenericGetAttr; - } - if (PyObject_SetAttr(__pyx_m, __pyx_n_s_reify, (PyObject *)&__pyx_type_7aiohttp_8_helpers_reify) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - if (__Pyx_setup_reduce((PyObject*)&__pyx_type_7aiohttp_8_helpers_reify) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - __pyx_ptype_7aiohttp_8_helpers_reify = &__pyx_type_7aiohttp_8_helpers_reify; - __Pyx_RefNannyFinishContext(); - return 0; - __pyx_L1_error:; - __Pyx_RefNannyFinishContext(); - return -1; -} - -static int __Pyx_modinit_type_import_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_type_import_code", 0); - /*--- Type import code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_variable_import_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_variable_import_code", 0); - /*--- Variable import code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_function_import_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_function_import_code", 0); - /*--- Function import code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - - -#ifndef CYTHON_NO_PYINIT_EXPORT -#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC -#elif PY_MAJOR_VERSION < 3 -#ifdef __cplusplus -#define __Pyx_PyMODINIT_FUNC extern "C" void -#else -#define __Pyx_PyMODINIT_FUNC void -#endif -#else -#ifdef __cplusplus -#define __Pyx_PyMODINIT_FUNC extern "C" PyObject * -#else -#define __Pyx_PyMODINIT_FUNC PyObject * -#endif -#endif - - -#if PY_MAJOR_VERSION < 3 -__Pyx_PyMODINIT_FUNC init_helpers(void) CYTHON_SMALL_CODE; /*proto*/ -__Pyx_PyMODINIT_FUNC init_helpers(void) -#else -__Pyx_PyMODINIT_FUNC PyInit__helpers(void) CYTHON_SMALL_CODE; /*proto*/ -__Pyx_PyMODINIT_FUNC PyInit__helpers(void) -#if CYTHON_PEP489_MULTI_PHASE_INIT -{ - return PyModuleDef_Init(&__pyx_moduledef); -} -static CYTHON_SMALL_CODE int __Pyx_check_single_interpreter(void) { - #if PY_VERSION_HEX >= 0x030700A1 - static PY_INT64_T main_interpreter_id = -1; - PY_INT64_T current_id = PyInterpreterState_GetID(PyThreadState_Get()->interp); - if (main_interpreter_id == -1) { - main_interpreter_id = current_id; - return (unlikely(current_id == -1)) ? -1 : 0; - } else if (unlikely(main_interpreter_id != current_id)) - #else - static PyInterpreterState *main_interpreter = NULL; - PyInterpreterState *current_interpreter = PyThreadState_Get()->interp; - if (!main_interpreter) { - main_interpreter = current_interpreter; - } else if (unlikely(main_interpreter != current_interpreter)) - #endif - { - PyErr_SetString( - PyExc_ImportError, - "Interpreter change detected - this module can only be loaded into one interpreter per process."); - return -1; - } - return 0; -} -static CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *moddict, const char* from_name, const char* to_name, int allow_none) { - PyObject *value = PyObject_GetAttrString(spec, from_name); - int result = 0; - if (likely(value)) { - if (allow_none || value != Py_None) { - result = PyDict_SetItemString(moddict, to_name, value); - } - Py_DECREF(value); - } else if (PyErr_ExceptionMatches(PyExc_AttributeError)) { - PyErr_Clear(); - } else { - result = -1; - } - return result; -} -static CYTHON_SMALL_CODE PyObject* __pyx_pymod_create(PyObject *spec, CYTHON_UNUSED PyModuleDef *def) { - PyObject *module = NULL, *moddict, *modname; - if (__Pyx_check_single_interpreter()) - return NULL; - if (__pyx_m) - return __Pyx_NewRef(__pyx_m); - modname = PyObject_GetAttrString(spec, "name"); - if (unlikely(!modname)) goto bad; - module = PyModule_NewObject(modname); - Py_DECREF(modname); - if (unlikely(!module)) goto bad; - moddict = PyModule_GetDict(module); - if (unlikely(!moddict)) goto bad; - if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "loader", "__loader__", 1) < 0)) goto bad; - if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "origin", "__file__", 1) < 0)) goto bad; - if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "parent", "__package__", 1) < 0)) goto bad; - if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "submodule_search_locations", "__path__", 0) < 0)) goto bad; - return module; -bad: - Py_XDECREF(module); - return NULL; -} - - -static CYTHON_SMALL_CODE int __pyx_pymod_exec__helpers(PyObject *__pyx_pyinit_module) -#endif -#endif -{ - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannyDeclarations - #if CYTHON_PEP489_MULTI_PHASE_INIT - if (__pyx_m) { - if (__pyx_m == __pyx_pyinit_module) return 0; - PyErr_SetString(PyExc_RuntimeError, "Module '_helpers' has already been imported. Re-initialisation is not supported."); - return -1; - } - #elif PY_MAJOR_VERSION >= 3 - if (__pyx_m) return __Pyx_NewRef(__pyx_m); - #endif - #if CYTHON_REFNANNY -__Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny"); -if (!__Pyx_RefNanny) { - PyErr_Clear(); - __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny"); - if (!__Pyx_RefNanny) - Py_FatalError("failed to import 'refnanny' module"); -} -#endif - __Pyx_RefNannySetupContext("__Pyx_PyMODINIT_FUNC PyInit__helpers(void)", 0); - if (__Pyx_check_binary_version() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #ifdef __Pxy_PyFrame_Initialize_Offsets - __Pxy_PyFrame_Initialize_Offsets(); - #endif - __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) __PYX_ERR(0, 1, __pyx_L1_error) - __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) __PYX_ERR(0, 1, __pyx_L1_error) - __pyx_empty_unicode = PyUnicode_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_unicode)) __PYX_ERR(0, 1, __pyx_L1_error) - #ifdef __Pyx_CyFunction_USED - if (__pyx_CyFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_FusedFunction_USED - if (__pyx_FusedFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_Coroutine_USED - if (__pyx_Coroutine_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_Generator_USED - if (__pyx_Generator_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_AsyncGen_USED - if (__pyx_AsyncGen_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_StopAsyncIteration_USED - if (__pyx_StopAsyncIteration_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - /*--- Library function declarations ---*/ - /*--- Threads initialization code ---*/ - #if defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS - #ifdef WITH_THREAD /* Python build with threading support? */ - PyEval_InitThreads(); - #endif - #endif - /*--- Module creation code ---*/ - #if CYTHON_PEP489_MULTI_PHASE_INIT - __pyx_m = __pyx_pyinit_module; - Py_INCREF(__pyx_m); - #else - #if PY_MAJOR_VERSION < 3 - __pyx_m = Py_InitModule4("_helpers", __pyx_methods, 0, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m); - #else - __pyx_m = PyModule_Create(&__pyx_moduledef); - #endif - if (unlikely(!__pyx_m)) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - __pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(0, 1, __pyx_L1_error) - Py_INCREF(__pyx_d); - __pyx_b = PyImport_AddModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(0, 1, __pyx_L1_error) - Py_INCREF(__pyx_b); - __pyx_cython_runtime = PyImport_AddModule((char *) "cython_runtime"); if (unlikely(!__pyx_cython_runtime)) __PYX_ERR(0, 1, __pyx_L1_error) - Py_INCREF(__pyx_cython_runtime); - if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(0, 1, __pyx_L1_error); - /*--- Initialize various global constants etc. ---*/ - if (__Pyx_InitGlobals() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #if PY_MAJOR_VERSION < 3 && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT) - if (__Pyx_init_sys_getdefaultencoding_params() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - if (__pyx_module_is_main_aiohttp___helpers) { - if (PyObject_SetAttr(__pyx_m, __pyx_n_s_name, __pyx_n_s_main) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - } - #if PY_MAJOR_VERSION >= 3 - { - PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) __PYX_ERR(0, 1, __pyx_L1_error) - if (!PyDict_GetItemString(modules, "aiohttp._helpers")) { - if (unlikely(PyDict_SetItemString(modules, "aiohttp._helpers", __pyx_m) < 0)) __PYX_ERR(0, 1, __pyx_L1_error) - } - } - #endif - /*--- Builtin init code ---*/ - if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - /*--- Constants init code ---*/ - if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - /*--- Global type/function init code ---*/ - (void)__Pyx_modinit_global_init_code(); - (void)__Pyx_modinit_variable_export_code(); - (void)__Pyx_modinit_function_export_code(); - if (unlikely(__Pyx_modinit_type_init_code() < 0)) __PYX_ERR(0, 1, __pyx_L1_error) - (void)__Pyx_modinit_type_import_code(); - (void)__Pyx_modinit_variable_import_code(); - (void)__Pyx_modinit_function_import_code(); - /*--- Execution code ---*/ - #if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) - if (__Pyx_patch_abc() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - - /* "(tree fragment)":1 - * def __pyx_unpickle_reify(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< - * cdef object __pyx_PickleError - * cdef object __pyx_result - */ - __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_7aiohttp_8_helpers_1__pyx_unpickle_reify, NULL, __pyx_n_s_aiohttp__helpers); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_pyx_unpickle_reify, __pyx_t_1) < 0) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_helpers.pyx":1 - * cdef class reify: # <<<<<<<<<<<<<< - * """Use as a class method decorator. It operates almost exactly like - * the Python `@property` decorator, but it puts the result of the - */ - __pyx_t_1 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_1) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /*--- Wrapped vars code ---*/ - - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - if (__pyx_m) { - if (__pyx_d) { - __Pyx_AddTraceback("init aiohttp._helpers", __pyx_clineno, __pyx_lineno, __pyx_filename); - } - Py_CLEAR(__pyx_m); - } else if (!PyErr_Occurred()) { - PyErr_SetString(PyExc_ImportError, "init aiohttp._helpers"); - } - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - #if CYTHON_PEP489_MULTI_PHASE_INIT - return (__pyx_m != NULL) ? 0 : -1; - #elif PY_MAJOR_VERSION >= 3 - return __pyx_m; - #else - return; - #endif -} - -/* --- Runtime support code --- */ -/* Refnanny */ -#if CYTHON_REFNANNY -static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) { - PyObject *m = NULL, *p = NULL; - void *r = NULL; - m = PyImport_ImportModule(modname); - if (!m) goto end; - p = PyObject_GetAttrString(m, "RefNannyAPI"); - if (!p) goto end; - r = PyLong_AsVoidPtr(p); -end: - Py_XDECREF(p); - Py_XDECREF(m); - return (__Pyx_RefNannyAPIStruct *)r; -} -#endif - -/* PyObjectGetAttrStr */ -#if CYTHON_USE_TYPE_SLOTS -static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name) { - PyTypeObject* tp = Py_TYPE(obj); - if (likely(tp->tp_getattro)) - return tp->tp_getattro(obj, attr_name); -#if PY_MAJOR_VERSION < 3 - if (likely(tp->tp_getattr)) - return tp->tp_getattr(obj, PyString_AS_STRING(attr_name)); -#endif - return PyObject_GetAttr(obj, attr_name); -} -#endif - -/* GetBuiltinName */ -static PyObject *__Pyx_GetBuiltinName(PyObject *name) { - PyObject* result = __Pyx_PyObject_GetAttrStr(__pyx_b, name); - if (unlikely(!result)) { - PyErr_Format(PyExc_NameError, -#if PY_MAJOR_VERSION >= 3 - "name '%U' is not defined", name); -#else - "name '%.200s' is not defined", PyString_AS_STRING(name)); -#endif - } - return result; -} - -/* RaiseDoubleKeywords */ -static void __Pyx_RaiseDoubleKeywordsError( - const char* func_name, - PyObject* kw_name) -{ - PyErr_Format(PyExc_TypeError, - #if PY_MAJOR_VERSION >= 3 - "%s() got multiple values for keyword argument '%U'", func_name, kw_name); - #else - "%s() got multiple values for keyword argument '%s'", func_name, - PyString_AsString(kw_name)); - #endif -} - -/* ParseKeywords */ -static int __Pyx_ParseOptionalKeywords( - PyObject *kwds, - PyObject **argnames[], - PyObject *kwds2, - PyObject *values[], - Py_ssize_t num_pos_args, - const char* function_name) -{ - PyObject *key = 0, *value = 0; - Py_ssize_t pos = 0; - PyObject*** name; - PyObject*** first_kw_arg = argnames + num_pos_args; - while (PyDict_Next(kwds, &pos, &key, &value)) { - name = first_kw_arg; - while (*name && (**name != key)) name++; - if (*name) { - values[name-argnames] = value; - continue; - } - name = first_kw_arg; - #if PY_MAJOR_VERSION < 3 - if (likely(PyString_Check(key))) { - while (*name) { - if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key)) - && _PyString_Eq(**name, key)) { - values[name-argnames] = value; - break; - } - name++; - } - if (*name) continue; - else { - PyObject*** argname = argnames; - while (argname != first_kw_arg) { - if ((**argname == key) || ( - (CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**argname) == PyString_GET_SIZE(key)) - && _PyString_Eq(**argname, key))) { - goto arg_passed_twice; - } - argname++; - } - } - } else - #endif - if (likely(PyUnicode_Check(key))) { - while (*name) { - int cmp = (**name == key) ? 0 : - #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 - (__Pyx_PyUnicode_GET_LENGTH(**name) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 : - #endif - PyUnicode_Compare(**name, key); - if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; - if (cmp == 0) { - values[name-argnames] = value; - break; - } - name++; - } - if (*name) continue; - else { - PyObject*** argname = argnames; - while (argname != first_kw_arg) { - int cmp = (**argname == key) ? 0 : - #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 - (__Pyx_PyUnicode_GET_LENGTH(**argname) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 : - #endif - PyUnicode_Compare(**argname, key); - if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; - if (cmp == 0) goto arg_passed_twice; - argname++; - } - } - } else - goto invalid_keyword_type; - if (kwds2) { - if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad; - } else { - goto invalid_keyword; - } - } - return 0; -arg_passed_twice: - __Pyx_RaiseDoubleKeywordsError(function_name, key); - goto bad; -invalid_keyword_type: - PyErr_Format(PyExc_TypeError, - "%.200s() keywords must be strings", function_name); - goto bad; -invalid_keyword: - PyErr_Format(PyExc_TypeError, - #if PY_MAJOR_VERSION < 3 - "%.200s() got an unexpected keyword argument '%.200s'", - function_name, PyString_AsString(key)); - #else - "%s() got an unexpected keyword argument '%U'", - function_name, key); - #endif -bad: - return -1; -} - -/* RaiseArgTupleInvalid */ -static void __Pyx_RaiseArgtupleInvalid( - const char* func_name, - int exact, - Py_ssize_t num_min, - Py_ssize_t num_max, - Py_ssize_t num_found) -{ - Py_ssize_t num_expected; - const char *more_or_less; - if (num_found < num_min) { - num_expected = num_min; - more_or_less = "at least"; - } else { - num_expected = num_max; - more_or_less = "at most"; - } - if (exact) { - more_or_less = "exactly"; - } - PyErr_Format(PyExc_TypeError, - "%.200s() takes %.8s %" CYTHON_FORMAT_SSIZE_T "d positional argument%.1s (%" CYTHON_FORMAT_SSIZE_T "d given)", - func_name, more_or_less, num_expected, - (num_expected == 1) ? "" : "s", num_found); -} - -/* GetItemInt */ -static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j) { - PyObject *r; - if (!j) return NULL; - r = PyObject_GetItem(o, j); - Py_DECREF(j); - return r; -} -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i, - CYTHON_NCP_UNUSED int wraparound, - CYTHON_NCP_UNUSED int boundscheck) { -#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - Py_ssize_t wrapped_i = i; - if (wraparound & unlikely(i < 0)) { - wrapped_i += PyList_GET_SIZE(o); - } - if ((!boundscheck) || likely(__Pyx_is_valid_index(wrapped_i, PyList_GET_SIZE(o)))) { - PyObject *r = PyList_GET_ITEM(o, wrapped_i); - Py_INCREF(r); - return r; - } - return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); -#else - return PySequence_GetItem(o, i); -#endif -} -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i, - CYTHON_NCP_UNUSED int wraparound, - CYTHON_NCP_UNUSED int boundscheck) { -#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - Py_ssize_t wrapped_i = i; - if (wraparound & unlikely(i < 0)) { - wrapped_i += PyTuple_GET_SIZE(o); - } - if ((!boundscheck) || likely(__Pyx_is_valid_index(wrapped_i, PyTuple_GET_SIZE(o)))) { - PyObject *r = PyTuple_GET_ITEM(o, wrapped_i); - Py_INCREF(r); - return r; - } - return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); -#else - return PySequence_GetItem(o, i); -#endif -} -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, int is_list, - CYTHON_NCP_UNUSED int wraparound, - CYTHON_NCP_UNUSED int boundscheck) { -#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS && CYTHON_USE_TYPE_SLOTS - if (is_list || PyList_CheckExact(o)) { - Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyList_GET_SIZE(o); - if ((!boundscheck) || (likely(__Pyx_is_valid_index(n, PyList_GET_SIZE(o))))) { - PyObject *r = PyList_GET_ITEM(o, n); - Py_INCREF(r); - return r; - } - } - else if (PyTuple_CheckExact(o)) { - Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyTuple_GET_SIZE(o); - if ((!boundscheck) || likely(__Pyx_is_valid_index(n, PyTuple_GET_SIZE(o)))) { - PyObject *r = PyTuple_GET_ITEM(o, n); - Py_INCREF(r); - return r; - } - } else { - PySequenceMethods *m = Py_TYPE(o)->tp_as_sequence; - if (likely(m && m->sq_item)) { - if (wraparound && unlikely(i < 0) && likely(m->sq_length)) { - Py_ssize_t l = m->sq_length(o); - if (likely(l >= 0)) { - i += l; - } else { - if (!PyErr_ExceptionMatches(PyExc_OverflowError)) - return NULL; - PyErr_Clear(); - } - } - return m->sq_item(o, i); - } - } -#else - if (is_list || PySequence_Check(o)) { - return PySequence_GetItem(o, i); - } -#endif - return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); -} - -/* ObjectGetItem */ -#if CYTHON_USE_TYPE_SLOTS -static PyObject *__Pyx_PyObject_GetIndex(PyObject *obj, PyObject* index) { - PyObject *runerr; - Py_ssize_t key_value; - PySequenceMethods *m = Py_TYPE(obj)->tp_as_sequence; - if (unlikely(!(m && m->sq_item))) { - PyErr_Format(PyExc_TypeError, "'%.200s' object is not subscriptable", Py_TYPE(obj)->tp_name); - return NULL; - } - key_value = __Pyx_PyIndex_AsSsize_t(index); - if (likely(key_value != -1 || !(runerr = PyErr_Occurred()))) { - return __Pyx_GetItemInt_Fast(obj, key_value, 0, 1, 1); - } - if (PyErr_GivenExceptionMatches(runerr, PyExc_OverflowError)) { - PyErr_Clear(); - PyErr_Format(PyExc_IndexError, "cannot fit '%.200s' into an index-sized integer", Py_TYPE(index)->tp_name); - } - return NULL; -} -static PyObject *__Pyx_PyObject_GetItem(PyObject *obj, PyObject* key) { - PyMappingMethods *m = Py_TYPE(obj)->tp_as_mapping; - if (likely(m && m->mp_subscript)) { - return m->mp_subscript(obj, key); - } - return __Pyx_PyObject_GetIndex(obj, key); -} -#endif - -/* GetTopmostException */ -#if CYTHON_USE_EXC_INFO_STACK -static _PyErr_StackItem * -__Pyx_PyErr_GetTopmostException(PyThreadState *tstate) -{ - _PyErr_StackItem *exc_info = tstate->exc_info; - while ((exc_info->exc_type == NULL || exc_info->exc_type == Py_None) && - exc_info->previous_item != NULL) - { - exc_info = exc_info->previous_item; - } - return exc_info; -} -#endif - -/* SaveResetException */ -#if CYTHON_FAST_THREAD_STATE -static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { - #if CYTHON_USE_EXC_INFO_STACK - _PyErr_StackItem *exc_info = __Pyx_PyErr_GetTopmostException(tstate); - *type = exc_info->exc_type; - *value = exc_info->exc_value; - *tb = exc_info->exc_traceback; - #else - *type = tstate->exc_type; - *value = tstate->exc_value; - *tb = tstate->exc_traceback; - #endif - Py_XINCREF(*type); - Py_XINCREF(*value); - Py_XINCREF(*tb); -} -static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { - PyObject *tmp_type, *tmp_value, *tmp_tb; - #if CYTHON_USE_EXC_INFO_STACK - _PyErr_StackItem *exc_info = tstate->exc_info; - tmp_type = exc_info->exc_type; - tmp_value = exc_info->exc_value; - tmp_tb = exc_info->exc_traceback; - exc_info->exc_type = type; - exc_info->exc_value = value; - exc_info->exc_traceback = tb; - #else - tmp_type = tstate->exc_type; - tmp_value = tstate->exc_value; - tmp_tb = tstate->exc_traceback; - tstate->exc_type = type; - tstate->exc_value = value; - tstate->exc_traceback = tb; - #endif - Py_XDECREF(tmp_type); - Py_XDECREF(tmp_value); - Py_XDECREF(tmp_tb); -} -#endif - -/* PyErrExceptionMatches */ -#if CYTHON_FAST_THREAD_STATE -static int __Pyx_PyErr_ExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { - Py_ssize_t i, n; - n = PyTuple_GET_SIZE(tuple); -#if PY_MAJOR_VERSION >= 3 - for (i=0; icurexc_type; - if (exc_type == err) return 1; - if (unlikely(!exc_type)) return 0; - if (unlikely(PyTuple_Check(err))) - return __Pyx_PyErr_ExceptionMatchesTuple(exc_type, err); - return __Pyx_PyErr_GivenExceptionMatches(exc_type, err); -} -#endif - -/* GetException */ -#if CYTHON_FAST_THREAD_STATE -static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) -#else -static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb) -#endif -{ - PyObject *local_type, *local_value, *local_tb; -#if CYTHON_FAST_THREAD_STATE - PyObject *tmp_type, *tmp_value, *tmp_tb; - local_type = tstate->curexc_type; - local_value = tstate->curexc_value; - local_tb = tstate->curexc_traceback; - tstate->curexc_type = 0; - tstate->curexc_value = 0; - tstate->curexc_traceback = 0; -#else - PyErr_Fetch(&local_type, &local_value, &local_tb); -#endif - PyErr_NormalizeException(&local_type, &local_value, &local_tb); -#if CYTHON_FAST_THREAD_STATE - if (unlikely(tstate->curexc_type)) -#else - if (unlikely(PyErr_Occurred())) -#endif - goto bad; - #if PY_MAJOR_VERSION >= 3 - if (local_tb) { - if (unlikely(PyException_SetTraceback(local_value, local_tb) < 0)) - goto bad; - } - #endif - Py_XINCREF(local_tb); - Py_XINCREF(local_type); - Py_XINCREF(local_value); - *type = local_type; - *value = local_value; - *tb = local_tb; -#if CYTHON_FAST_THREAD_STATE - #if CYTHON_USE_EXC_INFO_STACK - { - _PyErr_StackItem *exc_info = tstate->exc_info; - tmp_type = exc_info->exc_type; - tmp_value = exc_info->exc_value; - tmp_tb = exc_info->exc_traceback; - exc_info->exc_type = local_type; - exc_info->exc_value = local_value; - exc_info->exc_traceback = local_tb; - } - #else - tmp_type = tstate->exc_type; - tmp_value = tstate->exc_value; - tmp_tb = tstate->exc_traceback; - tstate->exc_type = local_type; - tstate->exc_value = local_value; - tstate->exc_traceback = local_tb; - #endif - Py_XDECREF(tmp_type); - Py_XDECREF(tmp_value); - Py_XDECREF(tmp_tb); -#else - PyErr_SetExcInfo(local_type, local_value, local_tb); -#endif - return 0; -bad: - *type = 0; - *value = 0; - *tb = 0; - Py_XDECREF(local_type); - Py_XDECREF(local_value); - Py_XDECREF(local_tb); - return -1; -} - -/* PyCFunctionFastCall */ -#if CYTHON_FAST_PYCCALL -static CYTHON_INLINE PyObject * __Pyx_PyCFunction_FastCall(PyObject *func_obj, PyObject **args, Py_ssize_t nargs) { - PyCFunctionObject *func = (PyCFunctionObject*)func_obj; - PyCFunction meth = PyCFunction_GET_FUNCTION(func); - PyObject *self = PyCFunction_GET_SELF(func); - int flags = PyCFunction_GET_FLAGS(func); - assert(PyCFunction_Check(func)); - assert(METH_FASTCALL == (flags & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS | METH_STACKLESS))); - assert(nargs >= 0); - assert(nargs == 0 || args != NULL); - /* _PyCFunction_FastCallDict() must not be called with an exception set, - because it may clear it (directly or indirectly) and so the - caller loses its exception */ - assert(!PyErr_Occurred()); - if ((PY_VERSION_HEX < 0x030700A0) || unlikely(flags & METH_KEYWORDS)) { - return (*((__Pyx_PyCFunctionFastWithKeywords)(void*)meth)) (self, args, nargs, NULL); - } else { - return (*((__Pyx_PyCFunctionFast)(void*)meth)) (self, args, nargs); - } -} -#endif - -/* PyFunctionFastCall */ -#if CYTHON_FAST_PYCALL -static PyObject* __Pyx_PyFunction_FastCallNoKw(PyCodeObject *co, PyObject **args, Py_ssize_t na, - PyObject *globals) { - PyFrameObject *f; - PyThreadState *tstate = __Pyx_PyThreadState_Current; - PyObject **fastlocals; - Py_ssize_t i; - PyObject *result; - assert(globals != NULL); - /* XXX Perhaps we should create a specialized - PyFrame_New() that doesn't take locals, but does - take builtins without sanity checking them. - */ - assert(tstate != NULL); - f = PyFrame_New(tstate, co, globals, NULL); - if (f == NULL) { - return NULL; - } - fastlocals = __Pyx_PyFrame_GetLocalsplus(f); - for (i = 0; i < na; i++) { - Py_INCREF(*args); - fastlocals[i] = *args++; - } - result = PyEval_EvalFrameEx(f,0); - ++tstate->recursion_depth; - Py_DECREF(f); - --tstate->recursion_depth; - return result; -} -#if 1 || PY_VERSION_HEX < 0x030600B1 -static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs) { - PyCodeObject *co = (PyCodeObject *)PyFunction_GET_CODE(func); - PyObject *globals = PyFunction_GET_GLOBALS(func); - PyObject *argdefs = PyFunction_GET_DEFAULTS(func); - PyObject *closure; -#if PY_MAJOR_VERSION >= 3 - PyObject *kwdefs; -#endif - PyObject *kwtuple, **k; - PyObject **d; - Py_ssize_t nd; - Py_ssize_t nk; - PyObject *result; - assert(kwargs == NULL || PyDict_Check(kwargs)); - nk = kwargs ? PyDict_Size(kwargs) : 0; - if (Py_EnterRecursiveCall((char*)" while calling a Python object")) { - return NULL; - } - if ( -#if PY_MAJOR_VERSION >= 3 - co->co_kwonlyargcount == 0 && -#endif - likely(kwargs == NULL || nk == 0) && - co->co_flags == (CO_OPTIMIZED | CO_NEWLOCALS | CO_NOFREE)) { - if (argdefs == NULL && co->co_argcount == nargs) { - result = __Pyx_PyFunction_FastCallNoKw(co, args, nargs, globals); - goto done; - } - else if (nargs == 0 && argdefs != NULL - && co->co_argcount == Py_SIZE(argdefs)) { - /* function called with no arguments, but all parameters have - a default value: use default values as arguments .*/ - args = &PyTuple_GET_ITEM(argdefs, 0); - result =__Pyx_PyFunction_FastCallNoKw(co, args, Py_SIZE(argdefs), globals); - goto done; - } - } - if (kwargs != NULL) { - Py_ssize_t pos, i; - kwtuple = PyTuple_New(2 * nk); - if (kwtuple == NULL) { - result = NULL; - goto done; - } - k = &PyTuple_GET_ITEM(kwtuple, 0); - pos = i = 0; - while (PyDict_Next(kwargs, &pos, &k[i], &k[i+1])) { - Py_INCREF(k[i]); - Py_INCREF(k[i+1]); - i += 2; - } - nk = i / 2; - } - else { - kwtuple = NULL; - k = NULL; - } - closure = PyFunction_GET_CLOSURE(func); -#if PY_MAJOR_VERSION >= 3 - kwdefs = PyFunction_GET_KW_DEFAULTS(func); -#endif - if (argdefs != NULL) { - d = &PyTuple_GET_ITEM(argdefs, 0); - nd = Py_SIZE(argdefs); - } - else { - d = NULL; - nd = 0; - } -#if PY_MAJOR_VERSION >= 3 - result = PyEval_EvalCodeEx((PyObject*)co, globals, (PyObject *)NULL, - args, (int)nargs, - k, (int)nk, - d, (int)nd, kwdefs, closure); -#else - result = PyEval_EvalCodeEx(co, globals, (PyObject *)NULL, - args, (int)nargs, - k, (int)nk, - d, (int)nd, closure); -#endif - Py_XDECREF(kwtuple); -done: - Py_LeaveRecursiveCall(); - return result; -} -#endif -#endif - -/* PyObjectCall */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) { - PyObject *result; - ternaryfunc call = func->ob_type->tp_call; - if (unlikely(!call)) - return PyObject_Call(func, arg, kw); - if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) - return NULL; - result = (*call)(func, arg, kw); - Py_LeaveRecursiveCall(); - if (unlikely(!result) && unlikely(!PyErr_Occurred())) { - PyErr_SetString( - PyExc_SystemError, - "NULL result without error in PyObject_Call"); - } - return result; -} -#endif - -/* PyObjectCall2Args */ -static CYTHON_UNUSED PyObject* __Pyx_PyObject_Call2Args(PyObject* function, PyObject* arg1, PyObject* arg2) { - PyObject *args, *result = NULL; - #if CYTHON_FAST_PYCALL - if (PyFunction_Check(function)) { - PyObject *args[2] = {arg1, arg2}; - return __Pyx_PyFunction_FastCall(function, args, 2); - } - #endif - #if CYTHON_FAST_PYCCALL - if (__Pyx_PyFastCFunction_Check(function)) { - PyObject *args[2] = {arg1, arg2}; - return __Pyx_PyCFunction_FastCall(function, args, 2); - } - #endif - args = PyTuple_New(2); - if (unlikely(!args)) goto done; - Py_INCREF(arg1); - PyTuple_SET_ITEM(args, 0, arg1); - Py_INCREF(arg2); - PyTuple_SET_ITEM(args, 1, arg2); - Py_INCREF(function); - result = __Pyx_PyObject_Call(function, args, NULL); - Py_DECREF(args); - Py_DECREF(function); -done: - return result; -} - -/* PyObjectCallMethO */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg) { - PyObject *self, *result; - PyCFunction cfunc; - cfunc = PyCFunction_GET_FUNCTION(func); - self = PyCFunction_GET_SELF(func); - if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) - return NULL; - result = cfunc(self, arg); - Py_LeaveRecursiveCall(); - if (unlikely(!result) && unlikely(!PyErr_Occurred())) { - PyErr_SetString( - PyExc_SystemError, - "NULL result without error in PyObject_Call"); - } - return result; -} -#endif - -/* PyObjectCallOneArg */ -#if CYTHON_COMPILING_IN_CPYTHON -static PyObject* __Pyx__PyObject_CallOneArg(PyObject *func, PyObject *arg) { - PyObject *result; - PyObject *args = PyTuple_New(1); - if (unlikely(!args)) return NULL; - Py_INCREF(arg); - PyTuple_SET_ITEM(args, 0, arg); - result = __Pyx_PyObject_Call(func, args, NULL); - Py_DECREF(args); - return result; -} -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { -#if CYTHON_FAST_PYCALL - if (PyFunction_Check(func)) { - return __Pyx_PyFunction_FastCall(func, &arg, 1); - } -#endif - if (likely(PyCFunction_Check(func))) { - if (likely(PyCFunction_GET_FLAGS(func) & METH_O)) { - return __Pyx_PyObject_CallMethO(func, arg); -#if CYTHON_FAST_PYCCALL - } else if (PyCFunction_GET_FLAGS(func) & METH_FASTCALL) { - return __Pyx_PyCFunction_FastCall(func, &arg, 1); -#endif - } - } - return __Pyx__PyObject_CallOneArg(func, arg); -} -#else -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { - PyObject *result; - PyObject *args = PyTuple_Pack(1, arg); - if (unlikely(!args)) return NULL; - result = __Pyx_PyObject_Call(func, args, NULL); - Py_DECREF(args); - return result; -} -#endif - -/* PyErrFetchRestore */ -#if CYTHON_FAST_THREAD_STATE -static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { - PyObject *tmp_type, *tmp_value, *tmp_tb; - tmp_type = tstate->curexc_type; - tmp_value = tstate->curexc_value; - tmp_tb = tstate->curexc_traceback; - tstate->curexc_type = type; - tstate->curexc_value = value; - tstate->curexc_traceback = tb; - Py_XDECREF(tmp_type); - Py_XDECREF(tmp_value); - Py_XDECREF(tmp_tb); -} -static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { - *type = tstate->curexc_type; - *value = tstate->curexc_value; - *tb = tstate->curexc_traceback; - tstate->curexc_type = 0; - tstate->curexc_value = 0; - tstate->curexc_traceback = 0; -} -#endif - -/* RaiseException */ -#if PY_MAJOR_VERSION < 3 -static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, - CYTHON_UNUSED PyObject *cause) { - __Pyx_PyThreadState_declare - Py_XINCREF(type); - if (!value || value == Py_None) - value = NULL; - else - Py_INCREF(value); - if (!tb || tb == Py_None) - tb = NULL; - else { - Py_INCREF(tb); - if (!PyTraceBack_Check(tb)) { - PyErr_SetString(PyExc_TypeError, - "raise: arg 3 must be a traceback or None"); - goto raise_error; - } - } - if (PyType_Check(type)) { -#if CYTHON_COMPILING_IN_PYPY - if (!value) { - Py_INCREF(Py_None); - value = Py_None; - } -#endif - PyErr_NormalizeException(&type, &value, &tb); - } else { - if (value) { - PyErr_SetString(PyExc_TypeError, - "instance exception may not have a separate value"); - goto raise_error; - } - value = type; - type = (PyObject*) Py_TYPE(type); - Py_INCREF(type); - if (!PyType_IsSubtype((PyTypeObject *)type, (PyTypeObject *)PyExc_BaseException)) { - PyErr_SetString(PyExc_TypeError, - "raise: exception class must be a subclass of BaseException"); - goto raise_error; - } - } - __Pyx_PyThreadState_assign - __Pyx_ErrRestore(type, value, tb); - return; -raise_error: - Py_XDECREF(value); - Py_XDECREF(type); - Py_XDECREF(tb); - return; -} -#else -static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) { - PyObject* owned_instance = NULL; - if (tb == Py_None) { - tb = 0; - } else if (tb && !PyTraceBack_Check(tb)) { - PyErr_SetString(PyExc_TypeError, - "raise: arg 3 must be a traceback or None"); - goto bad; - } - if (value == Py_None) - value = 0; - if (PyExceptionInstance_Check(type)) { - if (value) { - PyErr_SetString(PyExc_TypeError, - "instance exception may not have a separate value"); - goto bad; - } - value = type; - type = (PyObject*) Py_TYPE(value); - } else if (PyExceptionClass_Check(type)) { - PyObject *instance_class = NULL; - if (value && PyExceptionInstance_Check(value)) { - instance_class = (PyObject*) Py_TYPE(value); - if (instance_class != type) { - int is_subclass = PyObject_IsSubclass(instance_class, type); - if (!is_subclass) { - instance_class = NULL; - } else if (unlikely(is_subclass == -1)) { - goto bad; - } else { - type = instance_class; - } - } - } - if (!instance_class) { - PyObject *args; - if (!value) - args = PyTuple_New(0); - else if (PyTuple_Check(value)) { - Py_INCREF(value); - args = value; - } else - args = PyTuple_Pack(1, value); - if (!args) - goto bad; - owned_instance = PyObject_Call(type, args, NULL); - Py_DECREF(args); - if (!owned_instance) - goto bad; - value = owned_instance; - if (!PyExceptionInstance_Check(value)) { - PyErr_Format(PyExc_TypeError, - "calling %R should have returned an instance of " - "BaseException, not %R", - type, Py_TYPE(value)); - goto bad; - } - } - } else { - PyErr_SetString(PyExc_TypeError, - "raise: exception class must be a subclass of BaseException"); - goto bad; - } - if (cause) { - PyObject *fixed_cause; - if (cause == Py_None) { - fixed_cause = NULL; - } else if (PyExceptionClass_Check(cause)) { - fixed_cause = PyObject_CallObject(cause, NULL); - if (fixed_cause == NULL) - goto bad; - } else if (PyExceptionInstance_Check(cause)) { - fixed_cause = cause; - Py_INCREF(fixed_cause); - } else { - PyErr_SetString(PyExc_TypeError, - "exception causes must derive from " - "BaseException"); - goto bad; - } - PyException_SetCause(value, fixed_cause); - } - PyErr_SetObject(type, value); - if (tb) { -#if CYTHON_COMPILING_IN_PYPY - PyObject *tmp_type, *tmp_value, *tmp_tb; - PyErr_Fetch(&tmp_type, &tmp_value, &tmp_tb); - Py_INCREF(tb); - PyErr_Restore(tmp_type, tmp_value, tb); - Py_XDECREF(tmp_tb); -#else - PyThreadState *tstate = __Pyx_PyThreadState_Current; - PyObject* tmp_tb = tstate->curexc_traceback; - if (tb != tmp_tb) { - Py_INCREF(tb); - tstate->curexc_traceback = tb; - Py_XDECREF(tmp_tb); - } -#endif - } -bad: - Py_XDECREF(owned_instance); - return; -} -#endif - -/* GetAttr */ -static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *o, PyObject *n) { -#if CYTHON_USE_TYPE_SLOTS -#if PY_MAJOR_VERSION >= 3 - if (likely(PyUnicode_Check(n))) -#else - if (likely(PyString_Check(n))) -#endif - return __Pyx_PyObject_GetAttrStr(o, n); -#endif - return PyObject_GetAttr(o, n); -} - -/* GetAttr3 */ -static PyObject *__Pyx_GetAttr3Default(PyObject *d) { - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - if (unlikely(!__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError))) - return NULL; - __Pyx_PyErr_Clear(); - Py_INCREF(d); - return d; -} -static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *o, PyObject *n, PyObject *d) { - PyObject *r = __Pyx_GetAttr(o, n); - return (likely(r)) ? r : __Pyx_GetAttr3Default(d); -} - -/* PyDictVersioning */ -#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS -static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj) { - PyObject *dict = Py_TYPE(obj)->tp_dict; - return likely(dict) ? __PYX_GET_DICT_VERSION(dict) : 0; -} -static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj) { - PyObject **dictptr = NULL; - Py_ssize_t offset = Py_TYPE(obj)->tp_dictoffset; - if (offset) { -#if CYTHON_COMPILING_IN_CPYTHON - dictptr = (likely(offset > 0)) ? (PyObject **) ((char *)obj + offset) : _PyObject_GetDictPtr(obj); -#else - dictptr = _PyObject_GetDictPtr(obj); -#endif - } - return (dictptr && *dictptr) ? __PYX_GET_DICT_VERSION(*dictptr) : 0; -} -static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version) { - PyObject *dict = Py_TYPE(obj)->tp_dict; - if (unlikely(!dict) || unlikely(tp_dict_version != __PYX_GET_DICT_VERSION(dict))) - return 0; - return obj_dict_version == __Pyx_get_object_dict_version(obj); -} -#endif - -/* GetModuleGlobalName */ -#if CYTHON_USE_DICT_VERSIONS -static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value) -#else -static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name) -#endif -{ - PyObject *result; -#if !CYTHON_AVOID_BORROWED_REFS -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 - result = _PyDict_GetItem_KnownHash(__pyx_d, name, ((PyASCIIObject *) name)->hash); - __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) - if (likely(result)) { - return __Pyx_NewRef(result); - } else if (unlikely(PyErr_Occurred())) { - return NULL; - } -#else - result = PyDict_GetItem(__pyx_d, name); - __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) - if (likely(result)) { - return __Pyx_NewRef(result); - } -#endif -#else - result = PyObject_GetItem(__pyx_d, name); - __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) - if (likely(result)) { - return __Pyx_NewRef(result); - } - PyErr_Clear(); -#endif - return __Pyx_GetBuiltinName(name); -} - -/* Import */ -static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level) { - PyObject *empty_list = 0; - PyObject *module = 0; - PyObject *global_dict = 0; - PyObject *empty_dict = 0; - PyObject *list; - #if PY_MAJOR_VERSION < 3 - PyObject *py_import; - py_import = __Pyx_PyObject_GetAttrStr(__pyx_b, __pyx_n_s_import); - if (!py_import) - goto bad; - #endif - if (from_list) - list = from_list; - else { - empty_list = PyList_New(0); - if (!empty_list) - goto bad; - list = empty_list; - } - global_dict = PyModule_GetDict(__pyx_m); - if (!global_dict) - goto bad; - empty_dict = PyDict_New(); - if (!empty_dict) - goto bad; - { - #if PY_MAJOR_VERSION >= 3 - if (level == -1) { - if ((1) && (strchr(__Pyx_MODULE_NAME, '.'))) { - module = PyImport_ImportModuleLevelObject( - name, global_dict, empty_dict, list, 1); - if (!module) { - if (!PyErr_ExceptionMatches(PyExc_ImportError)) - goto bad; - PyErr_Clear(); - } - } - level = 0; - } - #endif - if (!module) { - #if PY_MAJOR_VERSION < 3 - PyObject *py_level = PyInt_FromLong(level); - if (!py_level) - goto bad; - module = PyObject_CallFunctionObjArgs(py_import, - name, global_dict, empty_dict, list, py_level, (PyObject *)NULL); - Py_DECREF(py_level); - #else - module = PyImport_ImportModuleLevelObject( - name, global_dict, empty_dict, list, level); - #endif - } - } -bad: - #if PY_MAJOR_VERSION < 3 - Py_XDECREF(py_import); - #endif - Py_XDECREF(empty_list); - Py_XDECREF(empty_dict); - return module; -} - -/* ImportFrom */ -static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name) { - PyObject* value = __Pyx_PyObject_GetAttrStr(module, name); - if (unlikely(!value) && PyErr_ExceptionMatches(PyExc_AttributeError)) { - PyErr_Format(PyExc_ImportError, - #if PY_MAJOR_VERSION < 3 - "cannot import name %.230s", PyString_AS_STRING(name)); - #else - "cannot import name %S", name); - #endif - } - return value; -} - -/* HasAttr */ -static CYTHON_INLINE int __Pyx_HasAttr(PyObject *o, PyObject *n) { - PyObject *r; - if (unlikely(!__Pyx_PyBaseString_Check(n))) { - PyErr_SetString(PyExc_TypeError, - "hasattr(): attribute name must be string"); - return -1; - } - r = __Pyx_GetAttr(o, n); - if (unlikely(!r)) { - PyErr_Clear(); - return 0; - } else { - Py_DECREF(r); - return 1; - } -} - -/* PyObject_GenericGetAttrNoDict */ -#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 -static PyObject *__Pyx_RaiseGenericGetAttributeError(PyTypeObject *tp, PyObject *attr_name) { - PyErr_Format(PyExc_AttributeError, -#if PY_MAJOR_VERSION >= 3 - "'%.50s' object has no attribute '%U'", - tp->tp_name, attr_name); -#else - "'%.50s' object has no attribute '%.400s'", - tp->tp_name, PyString_AS_STRING(attr_name)); -#endif - return NULL; -} -static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name) { - PyObject *descr; - PyTypeObject *tp = Py_TYPE(obj); - if (unlikely(!PyString_Check(attr_name))) { - return PyObject_GenericGetAttr(obj, attr_name); - } - assert(!tp->tp_dictoffset); - descr = _PyType_Lookup(tp, attr_name); - if (unlikely(!descr)) { - return __Pyx_RaiseGenericGetAttributeError(tp, attr_name); - } - Py_INCREF(descr); - #if PY_MAJOR_VERSION < 3 - if (likely(PyType_HasFeature(Py_TYPE(descr), Py_TPFLAGS_HAVE_CLASS))) - #endif - { - descrgetfunc f = Py_TYPE(descr)->tp_descr_get; - if (unlikely(f)) { - PyObject *res = f(descr, obj, (PyObject *)tp); - Py_DECREF(descr); - return res; - } - } - return descr; -} -#endif - -/* PyObject_GenericGetAttr */ -#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 -static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name) { - if (unlikely(Py_TYPE(obj)->tp_dictoffset)) { - return PyObject_GenericGetAttr(obj, attr_name); - } - return __Pyx_PyObject_GenericGetAttrNoDict(obj, attr_name); -} -#endif - -/* PyObjectGetAttrStrNoError */ -static void __Pyx_PyObject_GetAttrStr_ClearAttributeError(void) { - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - if (likely(__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError))) - __Pyx_PyErr_Clear(); -} -static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name) { - PyObject *result; -#if CYTHON_COMPILING_IN_CPYTHON && CYTHON_USE_TYPE_SLOTS && PY_VERSION_HEX >= 0x030700B1 - PyTypeObject* tp = Py_TYPE(obj); - if (likely(tp->tp_getattro == PyObject_GenericGetAttr)) { - return _PyObject_GenericGetAttrWithDict(obj, attr_name, NULL, 1); - } -#endif - result = __Pyx_PyObject_GetAttrStr(obj, attr_name); - if (unlikely(!result)) { - __Pyx_PyObject_GetAttrStr_ClearAttributeError(); - } - return result; -} - -/* SetupReduce */ -static int __Pyx_setup_reduce_is_named(PyObject* meth, PyObject* name) { - int ret; - PyObject *name_attr; - name_attr = __Pyx_PyObject_GetAttrStr(meth, __pyx_n_s_name); - if (likely(name_attr)) { - ret = PyObject_RichCompareBool(name_attr, name, Py_EQ); - } else { - ret = -1; - } - if (unlikely(ret < 0)) { - PyErr_Clear(); - ret = 0; - } - Py_XDECREF(name_attr); - return ret; -} -static int __Pyx_setup_reduce(PyObject* type_obj) { - int ret = 0; - PyObject *object_reduce = NULL; - PyObject *object_reduce_ex = NULL; - PyObject *reduce = NULL; - PyObject *reduce_ex = NULL; - PyObject *reduce_cython = NULL; - PyObject *setstate = NULL; - PyObject *setstate_cython = NULL; -#if CYTHON_USE_PYTYPE_LOOKUP - if (_PyType_Lookup((PyTypeObject*)type_obj, __pyx_n_s_getstate)) goto __PYX_GOOD; -#else - if (PyObject_HasAttr(type_obj, __pyx_n_s_getstate)) goto __PYX_GOOD; -#endif -#if CYTHON_USE_PYTYPE_LOOKUP - object_reduce_ex = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto __PYX_BAD; -#else - object_reduce_ex = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto __PYX_BAD; -#endif - reduce_ex = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce_ex); if (unlikely(!reduce_ex)) goto __PYX_BAD; - if (reduce_ex == object_reduce_ex) { -#if CYTHON_USE_PYTYPE_LOOKUP - object_reduce = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto __PYX_BAD; -#else - object_reduce = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto __PYX_BAD; -#endif - reduce = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce); if (unlikely(!reduce)) goto __PYX_BAD; - if (reduce == object_reduce || __Pyx_setup_reduce_is_named(reduce, __pyx_n_s_reduce_cython)) { - reduce_cython = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_reduce_cython); - if (likely(reduce_cython)) { - ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce, reduce_cython); if (unlikely(ret < 0)) goto __PYX_BAD; - ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce_cython); if (unlikely(ret < 0)) goto __PYX_BAD; - } else if (reduce == object_reduce || PyErr_Occurred()) { - goto __PYX_BAD; - } - setstate = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_setstate); - if (!setstate) PyErr_Clear(); - if (!setstate || __Pyx_setup_reduce_is_named(setstate, __pyx_n_s_setstate_cython)) { - setstate_cython = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_setstate_cython); - if (likely(setstate_cython)) { - ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate, setstate_cython); if (unlikely(ret < 0)) goto __PYX_BAD; - ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate_cython); if (unlikely(ret < 0)) goto __PYX_BAD; - } else if (!setstate || PyErr_Occurred()) { - goto __PYX_BAD; - } - } - PyType_Modified((PyTypeObject*)type_obj); - } - } - goto __PYX_GOOD; -__PYX_BAD: - if (!PyErr_Occurred()) - PyErr_Format(PyExc_RuntimeError, "Unable to initialize pickling for %s", ((PyTypeObject*)type_obj)->tp_name); - ret = -1; -__PYX_GOOD: -#if !CYTHON_USE_PYTYPE_LOOKUP - Py_XDECREF(object_reduce); - Py_XDECREF(object_reduce_ex); -#endif - Py_XDECREF(reduce); - Py_XDECREF(reduce_ex); - Py_XDECREF(reduce_cython); - Py_XDECREF(setstate); - Py_XDECREF(setstate_cython); - return ret; -} - -/* CLineInTraceback */ -#ifndef CYTHON_CLINE_IN_TRACEBACK -static int __Pyx_CLineForTraceback(CYTHON_NCP_UNUSED PyThreadState *tstate, int c_line) { - PyObject *use_cline; - PyObject *ptype, *pvalue, *ptraceback; -#if CYTHON_COMPILING_IN_CPYTHON - PyObject **cython_runtime_dict; -#endif - if (unlikely(!__pyx_cython_runtime)) { - return c_line; - } - __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); -#if CYTHON_COMPILING_IN_CPYTHON - cython_runtime_dict = _PyObject_GetDictPtr(__pyx_cython_runtime); - if (likely(cython_runtime_dict)) { - __PYX_PY_DICT_LOOKUP_IF_MODIFIED( - use_cline, *cython_runtime_dict, - __Pyx_PyDict_GetItemStr(*cython_runtime_dict, __pyx_n_s_cline_in_traceback)) - } else -#endif - { - PyObject *use_cline_obj = __Pyx_PyObject_GetAttrStr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback); - if (use_cline_obj) { - use_cline = PyObject_Not(use_cline_obj) ? Py_False : Py_True; - Py_DECREF(use_cline_obj); - } else { - PyErr_Clear(); - use_cline = NULL; - } - } - if (!use_cline) { - c_line = 0; - PyObject_SetAttr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback, Py_False); - } - else if (use_cline == Py_False || (use_cline != Py_True && PyObject_Not(use_cline) != 0)) { - c_line = 0; - } - __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); - return c_line; -} -#endif - -/* CodeObjectCache */ -static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) { - int start = 0, mid = 0, end = count - 1; - if (end >= 0 && code_line > entries[end].code_line) { - return count; - } - while (start < end) { - mid = start + (end - start) / 2; - if (code_line < entries[mid].code_line) { - end = mid; - } else if (code_line > entries[mid].code_line) { - start = mid + 1; - } else { - return mid; - } - } - if (code_line <= entries[mid].code_line) { - return mid; - } else { - return mid + 1; - } -} -static PyCodeObject *__pyx_find_code_object(int code_line) { - PyCodeObject* code_object; - int pos; - if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) { - return NULL; - } - pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); - if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) { - return NULL; - } - code_object = __pyx_code_cache.entries[pos].code_object; - Py_INCREF(code_object); - return code_object; -} -static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) { - int pos, i; - __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries; - if (unlikely(!code_line)) { - return; - } - if (unlikely(!entries)) { - entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry)); - if (likely(entries)) { - __pyx_code_cache.entries = entries; - __pyx_code_cache.max_count = 64; - __pyx_code_cache.count = 1; - entries[0].code_line = code_line; - entries[0].code_object = code_object; - Py_INCREF(code_object); - } - return; - } - pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); - if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) { - PyCodeObject* tmp = entries[pos].code_object; - entries[pos].code_object = code_object; - Py_DECREF(tmp); - return; - } - if (__pyx_code_cache.count == __pyx_code_cache.max_count) { - int new_max = __pyx_code_cache.max_count + 64; - entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc( - __pyx_code_cache.entries, ((size_t)new_max) * sizeof(__Pyx_CodeObjectCacheEntry)); - if (unlikely(!entries)) { - return; - } - __pyx_code_cache.entries = entries; - __pyx_code_cache.max_count = new_max; - } - for (i=__pyx_code_cache.count; i>pos; i--) { - entries[i] = entries[i-1]; - } - entries[pos].code_line = code_line; - entries[pos].code_object = code_object; - __pyx_code_cache.count++; - Py_INCREF(code_object); -} - -/* AddTraceback */ -#include "compile.h" -#include "frameobject.h" -#include "traceback.h" -static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( - const char *funcname, int c_line, - int py_line, const char *filename) { - PyCodeObject *py_code = 0; - PyObject *py_srcfile = 0; - PyObject *py_funcname = 0; - #if PY_MAJOR_VERSION < 3 - py_srcfile = PyString_FromString(filename); - #else - py_srcfile = PyUnicode_FromString(filename); - #endif - if (!py_srcfile) goto bad; - if (c_line) { - #if PY_MAJOR_VERSION < 3 - py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); - #else - py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); - #endif - } - else { - #if PY_MAJOR_VERSION < 3 - py_funcname = PyString_FromString(funcname); - #else - py_funcname = PyUnicode_FromString(funcname); - #endif - } - if (!py_funcname) goto bad; - py_code = __Pyx_PyCode_New( - 0, - 0, - 0, - 0, - 0, - __pyx_empty_bytes, /*PyObject *code,*/ - __pyx_empty_tuple, /*PyObject *consts,*/ - __pyx_empty_tuple, /*PyObject *names,*/ - __pyx_empty_tuple, /*PyObject *varnames,*/ - __pyx_empty_tuple, /*PyObject *freevars,*/ - __pyx_empty_tuple, /*PyObject *cellvars,*/ - py_srcfile, /*PyObject *filename,*/ - py_funcname, /*PyObject *name,*/ - py_line, - __pyx_empty_bytes /*PyObject *lnotab*/ - ); - Py_DECREF(py_srcfile); - Py_DECREF(py_funcname); - return py_code; -bad: - Py_XDECREF(py_srcfile); - Py_XDECREF(py_funcname); - return NULL; -} -static void __Pyx_AddTraceback(const char *funcname, int c_line, - int py_line, const char *filename) { - PyCodeObject *py_code = 0; - PyFrameObject *py_frame = 0; - PyThreadState *tstate = __Pyx_PyThreadState_Current; - if (c_line) { - c_line = __Pyx_CLineForTraceback(tstate, c_line); - } - py_code = __pyx_find_code_object(c_line ? -c_line : py_line); - if (!py_code) { - py_code = __Pyx_CreateCodeObjectForTraceback( - funcname, c_line, py_line, filename); - if (!py_code) goto bad; - __pyx_insert_code_object(c_line ? -c_line : py_line, py_code); - } - py_frame = PyFrame_New( - tstate, /*PyThreadState *tstate,*/ - py_code, /*PyCodeObject *code,*/ - __pyx_d, /*PyObject *globals,*/ - 0 /*PyObject *locals*/ - ); - if (!py_frame) goto bad; - __Pyx_PyFrame_SetLineNumber(py_frame, py_line); - PyTraceBack_Here(py_frame); -bad: - Py_XDECREF(py_code); - Py_XDECREF(py_frame); -} - -/* CIntFromPyVerify */ -#define __PYX_VERIFY_RETURN_INT(target_type, func_type, func_value)\ - __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 0) -#define __PYX_VERIFY_RETURN_INT_EXC(target_type, func_type, func_value)\ - __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 1) -#define __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, exc)\ - {\ - func_type value = func_value;\ - if (sizeof(target_type) < sizeof(func_type)) {\ - if (unlikely(value != (func_type) (target_type) value)) {\ - func_type zero = 0;\ - if (exc && unlikely(value == (func_type)-1 && PyErr_Occurred()))\ - return (target_type) -1;\ - if (is_unsigned && unlikely(value < zero))\ - goto raise_neg_overflow;\ - else\ - goto raise_overflow;\ - }\ - }\ - return (target_type) value;\ - } - -/* CIntToPy */ -static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) { - const long neg_one = (long) ((long) 0 - (long) 1), const_zero = (long) 0; - const int is_unsigned = neg_one > const_zero; - if (is_unsigned) { - if (sizeof(long) < sizeof(long)) { - return PyInt_FromLong((long) value); - } else if (sizeof(long) <= sizeof(unsigned long)) { - return PyLong_FromUnsignedLong((unsigned long) value); -#ifdef HAVE_LONG_LONG - } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { - return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); -#endif - } - } else { - if (sizeof(long) <= sizeof(long)) { - return PyInt_FromLong((long) value); -#ifdef HAVE_LONG_LONG - } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { - return PyLong_FromLongLong((PY_LONG_LONG) value); -#endif - } - } - { - int one = 1; int little = (int)*(unsigned char *)&one; - unsigned char *bytes = (unsigned char *)&value; - return _PyLong_FromByteArray(bytes, sizeof(long), - little, !is_unsigned); - } -} - -/* CIntFromPy */ -static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) { - const long neg_one = (long) ((long) 0 - (long) 1), const_zero = (long) 0; - const int is_unsigned = neg_one > const_zero; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_Check(x))) { - if (sizeof(long) < sizeof(long)) { - __PYX_VERIFY_RETURN_INT(long, long, PyInt_AS_LONG(x)) - } else { - long val = PyInt_AS_LONG(x); - if (is_unsigned && unlikely(val < 0)) { - goto raise_neg_overflow; - } - return (long) val; - } - } else -#endif - if (likely(PyLong_Check(x))) { - if (is_unsigned) { -#if CYTHON_USE_PYLONG_INTERNALS - const digit* digits = ((PyLongObject*)x)->ob_digit; - switch (Py_SIZE(x)) { - case 0: return (long) 0; - case 1: __PYX_VERIFY_RETURN_INT(long, digit, digits[0]) - case 2: - if (8 * sizeof(long) > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) >= 2 * PyLong_SHIFT) { - return (long) (((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); - } - } - break; - case 3: - if (8 * sizeof(long) > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) >= 3 * PyLong_SHIFT) { - return (long) (((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); - } - } - break; - case 4: - if (8 * sizeof(long) > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) >= 4 * PyLong_SHIFT) { - return (long) (((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); - } - } - break; - } -#endif -#if CYTHON_COMPILING_IN_CPYTHON - if (unlikely(Py_SIZE(x) < 0)) { - goto raise_neg_overflow; - } -#else - { - int result = PyObject_RichCompareBool(x, Py_False, Py_LT); - if (unlikely(result < 0)) - return (long) -1; - if (unlikely(result == 1)) - goto raise_neg_overflow; - } -#endif - if (sizeof(long) <= sizeof(unsigned long)) { - __PYX_VERIFY_RETURN_INT_EXC(long, unsigned long, PyLong_AsUnsignedLong(x)) -#ifdef HAVE_LONG_LONG - } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { - __PYX_VERIFY_RETURN_INT_EXC(long, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) -#endif - } - } else { -#if CYTHON_USE_PYLONG_INTERNALS - const digit* digits = ((PyLongObject*)x)->ob_digit; - switch (Py_SIZE(x)) { - case 0: return (long) 0; - case -1: __PYX_VERIFY_RETURN_INT(long, sdigit, (sdigit) (-(sdigit)digits[0])) - case 1: __PYX_VERIFY_RETURN_INT(long, digit, +digits[0]) - case -2: - if (8 * sizeof(long) - 1 > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { - return (long) (((long)-1)*(((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case 2: - if (8 * sizeof(long) > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { - return (long) ((((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case -3: - if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { - return (long) (((long)-1)*(((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case 3: - if (8 * sizeof(long) > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { - return (long) ((((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case -4: - if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { - return (long) (((long)-1)*(((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case 4: - if (8 * sizeof(long) > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { - return (long) ((((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - } -#endif - if (sizeof(long) <= sizeof(long)) { - __PYX_VERIFY_RETURN_INT_EXC(long, long, PyLong_AsLong(x)) -#ifdef HAVE_LONG_LONG - } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { - __PYX_VERIFY_RETURN_INT_EXC(long, PY_LONG_LONG, PyLong_AsLongLong(x)) -#endif - } - } - { -#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) - PyErr_SetString(PyExc_RuntimeError, - "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); -#else - long val; - PyObject *v = __Pyx_PyNumber_IntOrLong(x); - #if PY_MAJOR_VERSION < 3 - if (likely(v) && !PyLong_Check(v)) { - PyObject *tmp = v; - v = PyNumber_Long(tmp); - Py_DECREF(tmp); - } - #endif - if (likely(v)) { - int one = 1; int is_little = (int)*(unsigned char *)&one; - unsigned char *bytes = (unsigned char *)&val; - int ret = _PyLong_AsByteArray((PyLongObject *)v, - bytes, sizeof(val), - is_little, !is_unsigned); - Py_DECREF(v); - if (likely(!ret)) - return val; - } -#endif - return (long) -1; - } - } else { - long val; - PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); - if (!tmp) return (long) -1; - val = __Pyx_PyInt_As_long(tmp); - Py_DECREF(tmp); - return val; - } -raise_overflow: - PyErr_SetString(PyExc_OverflowError, - "value too large to convert to long"); - return (long) -1; -raise_neg_overflow: - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to long"); - return (long) -1; -} - -/* CIntFromPy */ -static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *x) { - const int neg_one = (int) ((int) 0 - (int) 1), const_zero = (int) 0; - const int is_unsigned = neg_one > const_zero; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_Check(x))) { - if (sizeof(int) < sizeof(long)) { - __PYX_VERIFY_RETURN_INT(int, long, PyInt_AS_LONG(x)) - } else { - long val = PyInt_AS_LONG(x); - if (is_unsigned && unlikely(val < 0)) { - goto raise_neg_overflow; - } - return (int) val; - } - } else -#endif - if (likely(PyLong_Check(x))) { - if (is_unsigned) { -#if CYTHON_USE_PYLONG_INTERNALS - const digit* digits = ((PyLongObject*)x)->ob_digit; - switch (Py_SIZE(x)) { - case 0: return (int) 0; - case 1: __PYX_VERIFY_RETURN_INT(int, digit, digits[0]) - case 2: - if (8 * sizeof(int) > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) >= 2 * PyLong_SHIFT) { - return (int) (((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); - } - } - break; - case 3: - if (8 * sizeof(int) > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) >= 3 * PyLong_SHIFT) { - return (int) (((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); - } - } - break; - case 4: - if (8 * sizeof(int) > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) >= 4 * PyLong_SHIFT) { - return (int) (((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); - } - } - break; - } -#endif -#if CYTHON_COMPILING_IN_CPYTHON - if (unlikely(Py_SIZE(x) < 0)) { - goto raise_neg_overflow; - } -#else - { - int result = PyObject_RichCompareBool(x, Py_False, Py_LT); - if (unlikely(result < 0)) - return (int) -1; - if (unlikely(result == 1)) - goto raise_neg_overflow; - } -#endif - if (sizeof(int) <= sizeof(unsigned long)) { - __PYX_VERIFY_RETURN_INT_EXC(int, unsigned long, PyLong_AsUnsignedLong(x)) -#ifdef HAVE_LONG_LONG - } else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) { - __PYX_VERIFY_RETURN_INT_EXC(int, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) -#endif - } - } else { -#if CYTHON_USE_PYLONG_INTERNALS - const digit* digits = ((PyLongObject*)x)->ob_digit; - switch (Py_SIZE(x)) { - case 0: return (int) 0; - case -1: __PYX_VERIFY_RETURN_INT(int, sdigit, (sdigit) (-(sdigit)digits[0])) - case 1: __PYX_VERIFY_RETURN_INT(int, digit, +digits[0]) - case -2: - if (8 * sizeof(int) - 1 > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { - return (int) (((int)-1)*(((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case 2: - if (8 * sizeof(int) > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { - return (int) ((((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case -3: - if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { - return (int) (((int)-1)*(((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case 3: - if (8 * sizeof(int) > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { - return (int) ((((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case -4: - if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { - return (int) (((int)-1)*(((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case 4: - if (8 * sizeof(int) > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { - return (int) ((((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - } -#endif - if (sizeof(int) <= sizeof(long)) { - __PYX_VERIFY_RETURN_INT_EXC(int, long, PyLong_AsLong(x)) -#ifdef HAVE_LONG_LONG - } else if (sizeof(int) <= sizeof(PY_LONG_LONG)) { - __PYX_VERIFY_RETURN_INT_EXC(int, PY_LONG_LONG, PyLong_AsLongLong(x)) -#endif - } - } - { -#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) - PyErr_SetString(PyExc_RuntimeError, - "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); -#else - int val; - PyObject *v = __Pyx_PyNumber_IntOrLong(x); - #if PY_MAJOR_VERSION < 3 - if (likely(v) && !PyLong_Check(v)) { - PyObject *tmp = v; - v = PyNumber_Long(tmp); - Py_DECREF(tmp); - } - #endif - if (likely(v)) { - int one = 1; int is_little = (int)*(unsigned char *)&one; - unsigned char *bytes = (unsigned char *)&val; - int ret = _PyLong_AsByteArray((PyLongObject *)v, - bytes, sizeof(val), - is_little, !is_unsigned); - Py_DECREF(v); - if (likely(!ret)) - return val; - } -#endif - return (int) -1; - } - } else { - int val; - PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); - if (!tmp) return (int) -1; - val = __Pyx_PyInt_As_int(tmp); - Py_DECREF(tmp); - return val; - } -raise_overflow: - PyErr_SetString(PyExc_OverflowError, - "value too large to convert to int"); - return (int) -1; -raise_neg_overflow: - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to int"); - return (int) -1; -} - -/* FastTypeChecks */ -#if CYTHON_COMPILING_IN_CPYTHON -static int __Pyx_InBases(PyTypeObject *a, PyTypeObject *b) { - while (a) { - a = a->tp_base; - if (a == b) - return 1; - } - return b == &PyBaseObject_Type; -} -static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b) { - PyObject *mro; - if (a == b) return 1; - mro = a->tp_mro; - if (likely(mro)) { - Py_ssize_t i, n; - n = PyTuple_GET_SIZE(mro); - for (i = 0; i < n; i++) { - if (PyTuple_GET_ITEM(mro, i) == (PyObject *)b) - return 1; - } - return 0; - } - return __Pyx_InBases(a, b); -} -#if PY_MAJOR_VERSION == 2 -static int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject* exc_type2) { - PyObject *exception, *value, *tb; - int res; - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - __Pyx_ErrFetch(&exception, &value, &tb); - res = exc_type1 ? PyObject_IsSubclass(err, exc_type1) : 0; - if (unlikely(res == -1)) { - PyErr_WriteUnraisable(err); - res = 0; - } - if (!res) { - res = PyObject_IsSubclass(err, exc_type2); - if (unlikely(res == -1)) { - PyErr_WriteUnraisable(err); - res = 0; - } - } - __Pyx_ErrRestore(exception, value, tb); - return res; -} -#else -static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject *exc_type2) { - int res = exc_type1 ? __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type1) : 0; - if (!res) { - res = __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type2); - } - return res; -} -#endif -static int __Pyx_PyErr_GivenExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { - Py_ssize_t i, n; - assert(PyExceptionClass_Check(exc_type)); - n = PyTuple_GET_SIZE(tuple); -#if PY_MAJOR_VERSION >= 3 - for (i=0; ip) { - #if PY_MAJOR_VERSION < 3 - if (t->is_unicode) { - *t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL); - } else if (t->intern) { - *t->p = PyString_InternFromString(t->s); - } else { - *t->p = PyString_FromStringAndSize(t->s, t->n - 1); - } - #else - if (t->is_unicode | t->is_str) { - if (t->intern) { - *t->p = PyUnicode_InternFromString(t->s); - } else if (t->encoding) { - *t->p = PyUnicode_Decode(t->s, t->n - 1, t->encoding, NULL); - } else { - *t->p = PyUnicode_FromStringAndSize(t->s, t->n - 1); - } - } else { - *t->p = PyBytes_FromStringAndSize(t->s, t->n - 1); - } - #endif - if (!*t->p) - return -1; - if (PyObject_Hash(*t->p) == -1) - return -1; - ++t; - } - return 0; -} - -static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char* c_str) { - return __Pyx_PyUnicode_FromStringAndSize(c_str, (Py_ssize_t)strlen(c_str)); -} -static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject* o) { - Py_ssize_t ignore; - return __Pyx_PyObject_AsStringAndSize(o, &ignore); -} -#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT -#if !CYTHON_PEP393_ENABLED -static const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { - char* defenc_c; - PyObject* defenc = _PyUnicode_AsDefaultEncodedString(o, NULL); - if (!defenc) return NULL; - defenc_c = PyBytes_AS_STRING(defenc); -#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII - { - char* end = defenc_c + PyBytes_GET_SIZE(defenc); - char* c; - for (c = defenc_c; c < end; c++) { - if ((unsigned char) (*c) >= 128) { - PyUnicode_AsASCIIString(o); - return NULL; - } - } - } -#endif - *length = PyBytes_GET_SIZE(defenc); - return defenc_c; -} -#else -static CYTHON_INLINE const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { - if (unlikely(__Pyx_PyUnicode_READY(o) == -1)) return NULL; -#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII - if (likely(PyUnicode_IS_ASCII(o))) { - *length = PyUnicode_GET_LENGTH(o); - return PyUnicode_AsUTF8(o); - } else { - PyUnicode_AsASCIIString(o); - return NULL; - } -#else - return PyUnicode_AsUTF8AndSize(o, length); -#endif -} -#endif -#endif -static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) { -#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT - if ( -#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII - __Pyx_sys_getdefaultencoding_not_ascii && -#endif - PyUnicode_Check(o)) { - return __Pyx_PyUnicode_AsStringAndSize(o, length); - } else -#endif -#if (!CYTHON_COMPILING_IN_PYPY) || (defined(PyByteArray_AS_STRING) && defined(PyByteArray_GET_SIZE)) - if (PyByteArray_Check(o)) { - *length = PyByteArray_GET_SIZE(o); - return PyByteArray_AS_STRING(o); - } else -#endif - { - char* result; - int r = PyBytes_AsStringAndSize(o, &result, length); - if (unlikely(r < 0)) { - return NULL; - } else { - return result; - } - } -} -static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) { - int is_true = x == Py_True; - if (is_true | (x == Py_False) | (x == Py_None)) return is_true; - else return PyObject_IsTrue(x); -} -static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject* x) { - int retval; - if (unlikely(!x)) return -1; - retval = __Pyx_PyObject_IsTrue(x); - Py_DECREF(x); - return retval; -} -static PyObject* __Pyx_PyNumber_IntOrLongWrongResultType(PyObject* result, const char* type_name) { -#if PY_MAJOR_VERSION >= 3 - if (PyLong_Check(result)) { - if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1, - "__int__ returned non-int (type %.200s). " - "The ability to return an instance of a strict subclass of int " - "is deprecated, and may be removed in a future version of Python.", - Py_TYPE(result)->tp_name)) { - Py_DECREF(result); - return NULL; - } - return result; - } -#endif - PyErr_Format(PyExc_TypeError, - "__%.4s__ returned non-%.4s (type %.200s)", - type_name, type_name, Py_TYPE(result)->tp_name); - Py_DECREF(result); - return NULL; -} -static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x) { -#if CYTHON_USE_TYPE_SLOTS - PyNumberMethods *m; -#endif - const char *name = NULL; - PyObject *res = NULL; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_Check(x) || PyLong_Check(x))) -#else - if (likely(PyLong_Check(x))) -#endif - return __Pyx_NewRef(x); -#if CYTHON_USE_TYPE_SLOTS - m = Py_TYPE(x)->tp_as_number; - #if PY_MAJOR_VERSION < 3 - if (m && m->nb_int) { - name = "int"; - res = m->nb_int(x); - } - else if (m && m->nb_long) { - name = "long"; - res = m->nb_long(x); - } - #else - if (likely(m && m->nb_int)) { - name = "int"; - res = m->nb_int(x); - } - #endif -#else - if (!PyBytes_CheckExact(x) && !PyUnicode_CheckExact(x)) { - res = PyNumber_Int(x); - } -#endif - if (likely(res)) { -#if PY_MAJOR_VERSION < 3 - if (unlikely(!PyInt_Check(res) && !PyLong_Check(res))) { -#else - if (unlikely(!PyLong_CheckExact(res))) { -#endif - return __Pyx_PyNumber_IntOrLongWrongResultType(res, name); - } - } - else if (!PyErr_Occurred()) { - PyErr_SetString(PyExc_TypeError, - "an integer is required"); - } - return res; -} -static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { - Py_ssize_t ival; - PyObject *x; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_CheckExact(b))) { - if (sizeof(Py_ssize_t) >= sizeof(long)) - return PyInt_AS_LONG(b); - else - return PyInt_AsSsize_t(b); - } -#endif - if (likely(PyLong_CheckExact(b))) { - #if CYTHON_USE_PYLONG_INTERNALS - const digit* digits = ((PyLongObject*)b)->ob_digit; - const Py_ssize_t size = Py_SIZE(b); - if (likely(__Pyx_sst_abs(size) <= 1)) { - ival = likely(size) ? digits[0] : 0; - if (size == -1) ival = -ival; - return ival; - } else { - switch (size) { - case 2: - if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { - return (Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case -2: - if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { - return -(Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case 3: - if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { - return (Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case -3: - if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { - return -(Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case 4: - if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { - return (Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case -4: - if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { - return -(Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - } - } - #endif - return PyLong_AsSsize_t(b); - } - x = PyNumber_Index(b); - if (!x) return -1; - ival = PyInt_AsSsize_t(x); - Py_DECREF(x); - return ival; -} -static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b) { - return b ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False); -} -static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { - return PyInt_FromSize_t(ival); -} - - -#endif /* Py_PYTHON_H */ diff --git a/third_party/python/aiohttp/aiohttp/_helpers.pyi b/third_party/python/aiohttp/aiohttp/_helpers.pyi deleted file mode 100644 index 1e358937024a..000000000000 --- a/third_party/python/aiohttp/aiohttp/_helpers.pyi +++ /dev/null @@ -1,6 +0,0 @@ -from typing import Any - -class reify: - def __init__(self, wrapped: Any) -> None: ... - def __get__(self, inst: Any, owner: Any) -> Any: ... - def __set__(self, inst: Any, value: Any) -> None: ... diff --git a/third_party/python/aiohttp/aiohttp/_helpers.pyx b/third_party/python/aiohttp/aiohttp/_helpers.pyx deleted file mode 100644 index 665f367c5dec..000000000000 --- a/third_party/python/aiohttp/aiohttp/_helpers.pyx +++ /dev/null @@ -1,35 +0,0 @@ -cdef class reify: - """Use as a class method decorator. It operates almost exactly like - the Python `@property` decorator, but it puts the result of the - method it decorates into the instance dict after the first call, - effectively replacing the function it decorates with an instance - variable. It is, in Python parlance, a data descriptor. - - """ - - cdef object wrapped - cdef object name - - def __init__(self, wrapped): - self.wrapped = wrapped - self.name = wrapped.__name__ - - @property - def __doc__(self): - return self.wrapped.__doc__ - - def __get__(self, inst, owner): - try: - try: - return inst._cache[self.name] - except KeyError: - val = self.wrapped(inst) - inst._cache[self.name] = val - return val - except AttributeError: - if inst is None: - return self - raise - - def __set__(self, inst, value): - raise AttributeError("reified property is read-only") diff --git a/third_party/python/aiohttp/aiohttp/_http_parser.c b/third_party/python/aiohttp/aiohttp/_http_parser.c deleted file mode 100644 index 096446e8d82a..000000000000 --- a/third_party/python/aiohttp/aiohttp/_http_parser.c +++ /dev/null @@ -1,24607 +0,0 @@ -/* Generated by Cython 0.29.21 */ - -#define PY_SSIZE_T_CLEAN -#include "Python.h" -#ifndef Py_PYTHON_H - #error Python headers needed to compile C extensions, please install development version of Python. -#elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000) - #error Cython requires Python 2.6+ or Python 3.3+. -#else -#define CYTHON_ABI "0_29_21" -#define CYTHON_HEX_VERSION 0x001D15F0 -#define CYTHON_FUTURE_DIVISION 1 -#include -#ifndef offsetof - #define offsetof(type, member) ( (size_t) & ((type*)0) -> member ) -#endif -#if !defined(WIN32) && !defined(MS_WINDOWS) - #ifndef __stdcall - #define __stdcall - #endif - #ifndef __cdecl - #define __cdecl - #endif - #ifndef __fastcall - #define __fastcall - #endif -#endif -#ifndef DL_IMPORT - #define DL_IMPORT(t) t -#endif -#ifndef DL_EXPORT - #define DL_EXPORT(t) t -#endif -#define __PYX_COMMA , -#ifndef HAVE_LONG_LONG - #if PY_VERSION_HEX >= 0x02070000 - #define HAVE_LONG_LONG - #endif -#endif -#ifndef PY_LONG_LONG - #define PY_LONG_LONG LONG_LONG -#endif -#ifndef Py_HUGE_VAL - #define Py_HUGE_VAL HUGE_VAL -#endif -#ifdef PYPY_VERSION - #define CYTHON_COMPILING_IN_PYPY 1 - #define CYTHON_COMPILING_IN_PYSTON 0 - #define CYTHON_COMPILING_IN_CPYTHON 0 - #undef CYTHON_USE_TYPE_SLOTS - #define CYTHON_USE_TYPE_SLOTS 0 - #undef CYTHON_USE_PYTYPE_LOOKUP - #define CYTHON_USE_PYTYPE_LOOKUP 0 - #if PY_VERSION_HEX < 0x03050000 - #undef CYTHON_USE_ASYNC_SLOTS - #define CYTHON_USE_ASYNC_SLOTS 0 - #elif !defined(CYTHON_USE_ASYNC_SLOTS) - #define CYTHON_USE_ASYNC_SLOTS 1 - #endif - #undef CYTHON_USE_PYLIST_INTERNALS - #define CYTHON_USE_PYLIST_INTERNALS 0 - #undef CYTHON_USE_UNICODE_INTERNALS - #define CYTHON_USE_UNICODE_INTERNALS 0 - #undef CYTHON_USE_UNICODE_WRITER - #define CYTHON_USE_UNICODE_WRITER 0 - #undef CYTHON_USE_PYLONG_INTERNALS - #define CYTHON_USE_PYLONG_INTERNALS 0 - #undef CYTHON_AVOID_BORROWED_REFS - #define CYTHON_AVOID_BORROWED_REFS 1 - #undef CYTHON_ASSUME_SAFE_MACROS - #define CYTHON_ASSUME_SAFE_MACROS 0 - #undef CYTHON_UNPACK_METHODS - #define CYTHON_UNPACK_METHODS 0 - #undef CYTHON_FAST_THREAD_STATE - #define CYTHON_FAST_THREAD_STATE 0 - #undef CYTHON_FAST_PYCALL - #define CYTHON_FAST_PYCALL 0 - #undef CYTHON_PEP489_MULTI_PHASE_INIT - #define CYTHON_PEP489_MULTI_PHASE_INIT 0 - #undef CYTHON_USE_TP_FINALIZE - #define CYTHON_USE_TP_FINALIZE 0 - #undef CYTHON_USE_DICT_VERSIONS - #define CYTHON_USE_DICT_VERSIONS 0 - #undef CYTHON_USE_EXC_INFO_STACK - #define CYTHON_USE_EXC_INFO_STACK 0 -#elif defined(PYSTON_VERSION) - #define CYTHON_COMPILING_IN_PYPY 0 - #define CYTHON_COMPILING_IN_PYSTON 1 - #define CYTHON_COMPILING_IN_CPYTHON 0 - #ifndef CYTHON_USE_TYPE_SLOTS - #define CYTHON_USE_TYPE_SLOTS 1 - #endif - #undef CYTHON_USE_PYTYPE_LOOKUP - #define CYTHON_USE_PYTYPE_LOOKUP 0 - #undef CYTHON_USE_ASYNC_SLOTS - #define CYTHON_USE_ASYNC_SLOTS 0 - #undef CYTHON_USE_PYLIST_INTERNALS - #define CYTHON_USE_PYLIST_INTERNALS 0 - #ifndef CYTHON_USE_UNICODE_INTERNALS - #define CYTHON_USE_UNICODE_INTERNALS 1 - #endif - #undef CYTHON_USE_UNICODE_WRITER - #define CYTHON_USE_UNICODE_WRITER 0 - #undef CYTHON_USE_PYLONG_INTERNALS - #define CYTHON_USE_PYLONG_INTERNALS 0 - #ifndef CYTHON_AVOID_BORROWED_REFS - #define CYTHON_AVOID_BORROWED_REFS 0 - #endif - #ifndef CYTHON_ASSUME_SAFE_MACROS - #define CYTHON_ASSUME_SAFE_MACROS 1 - #endif - #ifndef CYTHON_UNPACK_METHODS - #define CYTHON_UNPACK_METHODS 1 - #endif - #undef CYTHON_FAST_THREAD_STATE - #define CYTHON_FAST_THREAD_STATE 0 - #undef CYTHON_FAST_PYCALL - #define CYTHON_FAST_PYCALL 0 - #undef CYTHON_PEP489_MULTI_PHASE_INIT - #define CYTHON_PEP489_MULTI_PHASE_INIT 0 - #undef CYTHON_USE_TP_FINALIZE - #define CYTHON_USE_TP_FINALIZE 0 - #undef CYTHON_USE_DICT_VERSIONS - #define CYTHON_USE_DICT_VERSIONS 0 - #undef CYTHON_USE_EXC_INFO_STACK - #define CYTHON_USE_EXC_INFO_STACK 0 -#else - #define CYTHON_COMPILING_IN_PYPY 0 - #define CYTHON_COMPILING_IN_PYSTON 0 - #define CYTHON_COMPILING_IN_CPYTHON 1 - #ifndef CYTHON_USE_TYPE_SLOTS - #define CYTHON_USE_TYPE_SLOTS 1 - #endif - #if PY_VERSION_HEX < 0x02070000 - #undef CYTHON_USE_PYTYPE_LOOKUP - #define CYTHON_USE_PYTYPE_LOOKUP 0 - #elif !defined(CYTHON_USE_PYTYPE_LOOKUP) - #define CYTHON_USE_PYTYPE_LOOKUP 1 - #endif - #if PY_MAJOR_VERSION < 3 - #undef CYTHON_USE_ASYNC_SLOTS - #define CYTHON_USE_ASYNC_SLOTS 0 - #elif !defined(CYTHON_USE_ASYNC_SLOTS) - #define CYTHON_USE_ASYNC_SLOTS 1 - #endif - #if PY_VERSION_HEX < 0x02070000 - #undef CYTHON_USE_PYLONG_INTERNALS - #define CYTHON_USE_PYLONG_INTERNALS 0 - #elif !defined(CYTHON_USE_PYLONG_INTERNALS) - #define CYTHON_USE_PYLONG_INTERNALS 1 - #endif - #ifndef CYTHON_USE_PYLIST_INTERNALS - #define CYTHON_USE_PYLIST_INTERNALS 1 - #endif - #ifndef CYTHON_USE_UNICODE_INTERNALS - #define CYTHON_USE_UNICODE_INTERNALS 1 - #endif - #if PY_VERSION_HEX < 0x030300F0 - #undef CYTHON_USE_UNICODE_WRITER - #define CYTHON_USE_UNICODE_WRITER 0 - #elif !defined(CYTHON_USE_UNICODE_WRITER) - #define CYTHON_USE_UNICODE_WRITER 1 - #endif - #ifndef CYTHON_AVOID_BORROWED_REFS - #define CYTHON_AVOID_BORROWED_REFS 0 - #endif - #ifndef CYTHON_ASSUME_SAFE_MACROS - #define CYTHON_ASSUME_SAFE_MACROS 1 - #endif - #ifndef CYTHON_UNPACK_METHODS - #define CYTHON_UNPACK_METHODS 1 - #endif - #ifndef CYTHON_FAST_THREAD_STATE - #define CYTHON_FAST_THREAD_STATE 1 - #endif - #ifndef CYTHON_FAST_PYCALL - #define CYTHON_FAST_PYCALL 1 - #endif - #ifndef CYTHON_PEP489_MULTI_PHASE_INIT - #define CYTHON_PEP489_MULTI_PHASE_INIT (PY_VERSION_HEX >= 0x03050000) - #endif - #ifndef CYTHON_USE_TP_FINALIZE - #define CYTHON_USE_TP_FINALIZE (PY_VERSION_HEX >= 0x030400a1) - #endif - #ifndef CYTHON_USE_DICT_VERSIONS - #define CYTHON_USE_DICT_VERSIONS (PY_VERSION_HEX >= 0x030600B1) - #endif - #ifndef CYTHON_USE_EXC_INFO_STACK - #define CYTHON_USE_EXC_INFO_STACK (PY_VERSION_HEX >= 0x030700A3) - #endif -#endif -#if !defined(CYTHON_FAST_PYCCALL) -#define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1) -#endif -#if CYTHON_USE_PYLONG_INTERNALS - #include "longintrepr.h" - #undef SHIFT - #undef BASE - #undef MASK - #ifdef SIZEOF_VOID_P - enum { __pyx_check_sizeof_voidp = 1 / (int)(SIZEOF_VOID_P == sizeof(void*)) }; - #endif -#endif -#ifndef __has_attribute - #define __has_attribute(x) 0 -#endif -#ifndef __has_cpp_attribute - #define __has_cpp_attribute(x) 0 -#endif -#ifndef CYTHON_RESTRICT - #if defined(__GNUC__) - #define CYTHON_RESTRICT __restrict__ - #elif defined(_MSC_VER) && _MSC_VER >= 1400 - #define CYTHON_RESTRICT __restrict - #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L - #define CYTHON_RESTRICT restrict - #else - #define CYTHON_RESTRICT - #endif -#endif -#ifndef CYTHON_UNUSED -# if defined(__GNUC__) -# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) -# define CYTHON_UNUSED __attribute__ ((__unused__)) -# else -# define CYTHON_UNUSED -# endif -# elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER)) -# define CYTHON_UNUSED __attribute__ ((__unused__)) -# else -# define CYTHON_UNUSED -# endif -#endif -#ifndef CYTHON_MAYBE_UNUSED_VAR -# if defined(__cplusplus) - template void CYTHON_MAYBE_UNUSED_VAR( const T& ) { } -# else -# define CYTHON_MAYBE_UNUSED_VAR(x) (void)(x) -# endif -#endif -#ifndef CYTHON_NCP_UNUSED -# if CYTHON_COMPILING_IN_CPYTHON -# define CYTHON_NCP_UNUSED -# else -# define CYTHON_NCP_UNUSED CYTHON_UNUSED -# endif -#endif -#define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None) -#ifdef _MSC_VER - #ifndef _MSC_STDINT_H_ - #if _MSC_VER < 1300 - typedef unsigned char uint8_t; - typedef unsigned int uint32_t; - #else - typedef unsigned __int8 uint8_t; - typedef unsigned __int32 uint32_t; - #endif - #endif -#else - #include -#endif -#ifndef CYTHON_FALLTHROUGH - #if defined(__cplusplus) && __cplusplus >= 201103L - #if __has_cpp_attribute(fallthrough) - #define CYTHON_FALLTHROUGH [[fallthrough]] - #elif __has_cpp_attribute(clang::fallthrough) - #define CYTHON_FALLTHROUGH [[clang::fallthrough]] - #elif __has_cpp_attribute(gnu::fallthrough) - #define CYTHON_FALLTHROUGH [[gnu::fallthrough]] - #endif - #endif - #ifndef CYTHON_FALLTHROUGH - #if __has_attribute(fallthrough) - #define CYTHON_FALLTHROUGH __attribute__((fallthrough)) - #else - #define CYTHON_FALLTHROUGH - #endif - #endif - #if defined(__clang__ ) && defined(__apple_build_version__) - #if __apple_build_version__ < 7000000 - #undef CYTHON_FALLTHROUGH - #define CYTHON_FALLTHROUGH - #endif - #endif -#endif - -#ifndef CYTHON_INLINE - #if defined(__clang__) - #define CYTHON_INLINE __inline__ __attribute__ ((__unused__)) - #elif defined(__GNUC__) - #define CYTHON_INLINE __inline__ - #elif defined(_MSC_VER) - #define CYTHON_INLINE __inline - #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L - #define CYTHON_INLINE inline - #else - #define CYTHON_INLINE - #endif -#endif - -#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x02070600 && !defined(Py_OptimizeFlag) - #define Py_OptimizeFlag 0 -#endif -#define __PYX_BUILD_PY_SSIZE_T "n" -#define CYTHON_FORMAT_SSIZE_T "z" -#if PY_MAJOR_VERSION < 3 - #define __Pyx_BUILTIN_MODULE_NAME "__builtin__" - #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ - PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) - #define __Pyx_DefaultClassType PyClass_Type -#else - #define __Pyx_BUILTIN_MODULE_NAME "builtins" -#if PY_VERSION_HEX >= 0x030800A4 && PY_VERSION_HEX < 0x030800B2 - #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ - PyCode_New(a, 0, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) -#else - #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ - PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) -#endif - #define __Pyx_DefaultClassType PyType_Type -#endif -#ifndef Py_TPFLAGS_CHECKTYPES - #define Py_TPFLAGS_CHECKTYPES 0 -#endif -#ifndef Py_TPFLAGS_HAVE_INDEX - #define Py_TPFLAGS_HAVE_INDEX 0 -#endif -#ifndef Py_TPFLAGS_HAVE_NEWBUFFER - #define Py_TPFLAGS_HAVE_NEWBUFFER 0 -#endif -#ifndef Py_TPFLAGS_HAVE_FINALIZE - #define Py_TPFLAGS_HAVE_FINALIZE 0 -#endif -#ifndef METH_STACKLESS - #define METH_STACKLESS 0 -#endif -#if PY_VERSION_HEX <= 0x030700A3 || !defined(METH_FASTCALL) - #ifndef METH_FASTCALL - #define METH_FASTCALL 0x80 - #endif - typedef PyObject *(*__Pyx_PyCFunctionFast) (PyObject *self, PyObject *const *args, Py_ssize_t nargs); - typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject *const *args, - Py_ssize_t nargs, PyObject *kwnames); -#else - #define __Pyx_PyCFunctionFast _PyCFunctionFast - #define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords -#endif -#if CYTHON_FAST_PYCCALL -#define __Pyx_PyFastCFunction_Check(func)\ - ((PyCFunction_Check(func) && (METH_FASTCALL == (PyCFunction_GET_FLAGS(func) & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS | METH_STACKLESS))))) -#else -#define __Pyx_PyFastCFunction_Check(func) 0 -#endif -#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc) - #define PyObject_Malloc(s) PyMem_Malloc(s) - #define PyObject_Free(p) PyMem_Free(p) - #define PyObject_Realloc(p) PyMem_Realloc(p) -#endif -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030400A1 - #define PyMem_RawMalloc(n) PyMem_Malloc(n) - #define PyMem_RawRealloc(p, n) PyMem_Realloc(p, n) - #define PyMem_RawFree(p) PyMem_Free(p) -#endif -#if CYTHON_COMPILING_IN_PYSTON - #define __Pyx_PyCode_HasFreeVars(co) PyCode_HasFreeVars(co) - #define __Pyx_PyFrame_SetLineNumber(frame, lineno) PyFrame_SetLineNumber(frame, lineno) -#else - #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) - #define __Pyx_PyFrame_SetLineNumber(frame, lineno) (frame)->f_lineno = (lineno) -#endif -#if !CYTHON_FAST_THREAD_STATE || PY_VERSION_HEX < 0x02070000 - #define __Pyx_PyThreadState_Current PyThreadState_GET() -#elif PY_VERSION_HEX >= 0x03060000 - #define __Pyx_PyThreadState_Current _PyThreadState_UncheckedGet() -#elif PY_VERSION_HEX >= 0x03000000 - #define __Pyx_PyThreadState_Current PyThreadState_GET() -#else - #define __Pyx_PyThreadState_Current _PyThreadState_Current -#endif -#if PY_VERSION_HEX < 0x030700A2 && !defined(PyThread_tss_create) && !defined(Py_tss_NEEDS_INIT) -#include "pythread.h" -#define Py_tss_NEEDS_INIT 0 -typedef int Py_tss_t; -static CYTHON_INLINE int PyThread_tss_create(Py_tss_t *key) { - *key = PyThread_create_key(); - return 0; -} -static CYTHON_INLINE Py_tss_t * PyThread_tss_alloc(void) { - Py_tss_t *key = (Py_tss_t *)PyObject_Malloc(sizeof(Py_tss_t)); - *key = Py_tss_NEEDS_INIT; - return key; -} -static CYTHON_INLINE void PyThread_tss_free(Py_tss_t *key) { - PyObject_Free(key); -} -static CYTHON_INLINE int PyThread_tss_is_created(Py_tss_t *key) { - return *key != Py_tss_NEEDS_INIT; -} -static CYTHON_INLINE void PyThread_tss_delete(Py_tss_t *key) { - PyThread_delete_key(*key); - *key = Py_tss_NEEDS_INIT; -} -static CYTHON_INLINE int PyThread_tss_set(Py_tss_t *key, void *value) { - return PyThread_set_key_value(*key, value); -} -static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) { - return PyThread_get_key_value(*key); -} -#endif -#if CYTHON_COMPILING_IN_CPYTHON || defined(_PyDict_NewPresized) -#define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n)) -#else -#define __Pyx_PyDict_NewPresized(n) PyDict_New() -#endif -#if PY_MAJOR_VERSION >= 3 || CYTHON_FUTURE_DIVISION - #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y) - #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y) -#else - #define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y) - #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y) -#endif -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 && CYTHON_USE_UNICODE_INTERNALS -#define __Pyx_PyDict_GetItemStr(dict, name) _PyDict_GetItem_KnownHash(dict, name, ((PyASCIIObject *) name)->hash) -#else -#define __Pyx_PyDict_GetItemStr(dict, name) PyDict_GetItem(dict, name) -#endif -#if PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND) - #define CYTHON_PEP393_ENABLED 1 - #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\ - 0 : _PyUnicode_Ready((PyObject *)(op))) - #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u) - #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i) - #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u) - #define __Pyx_PyUnicode_KIND(u) PyUnicode_KIND(u) - #define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u) - #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i) - #define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, ch) - #if defined(PyUnicode_IS_READY) && defined(PyUnicode_GET_SIZE) - #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u))) - #else - #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_LENGTH(u)) - #endif -#else - #define CYTHON_PEP393_ENABLED 0 - #define PyUnicode_1BYTE_KIND 1 - #define PyUnicode_2BYTE_KIND 2 - #define PyUnicode_4BYTE_KIND 4 - #define __Pyx_PyUnicode_READY(op) (0) - #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u) - #define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i])) - #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((sizeof(Py_UNICODE) == 2) ? 65535 : 1114111) - #define __Pyx_PyUnicode_KIND(u) (sizeof(Py_UNICODE)) - #define __Pyx_PyUnicode_DATA(u) ((void*)PyUnicode_AS_UNICODE(u)) - #define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i])) - #define __Pyx_PyUnicode_WRITE(k, d, i, ch) (((void)(k)), ((Py_UNICODE*)d)[i] = ch) - #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_SIZE(u)) -#endif -#if CYTHON_COMPILING_IN_PYPY - #define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b) - #define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b) -#else - #define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b) - #define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ?\ - PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b)) -#endif -#if CYTHON_COMPILING_IN_PYPY && !defined(PyUnicode_Contains) - #define PyUnicode_Contains(u, s) PySequence_Contains(u, s) -#endif -#if CYTHON_COMPILING_IN_PYPY && !defined(PyByteArray_Check) - #define PyByteArray_Check(obj) PyObject_TypeCheck(obj, &PyByteArray_Type) -#endif -#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Format) - #define PyObject_Format(obj, fmt) PyObject_CallMethod(obj, "__format__", "O", fmt) -#endif -#define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyString_Check(b) && !PyString_CheckExact(b)))) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b)) -#define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyUnicode_Check(b) && !PyUnicode_CheckExact(b)))) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b)) -#if PY_MAJOR_VERSION >= 3 - #define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b) -#else - #define __Pyx_PyString_Format(a, b) PyString_Format(a, b) -#endif -#if PY_MAJOR_VERSION < 3 && !defined(PyObject_ASCII) - #define PyObject_ASCII(o) PyObject_Repr(o) -#endif -#if PY_MAJOR_VERSION >= 3 - #define PyBaseString_Type PyUnicode_Type - #define PyStringObject PyUnicodeObject - #define PyString_Type PyUnicode_Type - #define PyString_Check PyUnicode_Check - #define PyString_CheckExact PyUnicode_CheckExact -#ifndef PyObject_Unicode - #define PyObject_Unicode PyObject_Str -#endif -#endif -#if PY_MAJOR_VERSION >= 3 - #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj) - #define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj) -#else - #define __Pyx_PyBaseString_Check(obj) (PyString_Check(obj) || PyUnicode_Check(obj)) - #define __Pyx_PyBaseString_CheckExact(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj)) -#endif -#ifndef PySet_CheckExact - #define PySet_CheckExact(obj) (Py_TYPE(obj) == &PySet_Type) -#endif -#if PY_VERSION_HEX >= 0x030900A4 - #define __Pyx_SET_REFCNT(obj, refcnt) Py_SET_REFCNT(obj, refcnt) - #define __Pyx_SET_SIZE(obj, size) Py_SET_SIZE(obj, size) -#else - #define __Pyx_SET_REFCNT(obj, refcnt) Py_REFCNT(obj) = (refcnt) - #define __Pyx_SET_SIZE(obj, size) Py_SIZE(obj) = (size) -#endif -#if CYTHON_ASSUME_SAFE_MACROS - #define __Pyx_PySequence_SIZE(seq) Py_SIZE(seq) -#else - #define __Pyx_PySequence_SIZE(seq) PySequence_Size(seq) -#endif -#if PY_MAJOR_VERSION >= 3 - #define PyIntObject PyLongObject - #define PyInt_Type PyLong_Type - #define PyInt_Check(op) PyLong_Check(op) - #define PyInt_CheckExact(op) PyLong_CheckExact(op) - #define PyInt_FromString PyLong_FromString - #define PyInt_FromUnicode PyLong_FromUnicode - #define PyInt_FromLong PyLong_FromLong - #define PyInt_FromSize_t PyLong_FromSize_t - #define PyInt_FromSsize_t PyLong_FromSsize_t - #define PyInt_AsLong PyLong_AsLong - #define PyInt_AS_LONG PyLong_AS_LONG - #define PyInt_AsSsize_t PyLong_AsSsize_t - #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask - #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask - #define PyNumber_Int PyNumber_Long -#endif -#if PY_MAJOR_VERSION >= 3 - #define PyBoolObject PyLongObject -#endif -#if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_PYPY - #ifndef PyUnicode_InternFromString - #define PyUnicode_InternFromString(s) PyUnicode_FromString(s) - #endif -#endif -#if PY_VERSION_HEX < 0x030200A4 - typedef long Py_hash_t; - #define __Pyx_PyInt_FromHash_t PyInt_FromLong - #define __Pyx_PyInt_AsHash_t PyInt_AsLong -#else - #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t - #define __Pyx_PyInt_AsHash_t PyInt_AsSsize_t -#endif -#if PY_MAJOR_VERSION >= 3 - #define __Pyx_PyMethod_New(func, self, klass) ((self) ? ((void)(klass), PyMethod_New(func, self)) : __Pyx_NewRef(func)) -#else - #define __Pyx_PyMethod_New(func, self, klass) PyMethod_New(func, self, klass) -#endif -#if CYTHON_USE_ASYNC_SLOTS - #if PY_VERSION_HEX >= 0x030500B1 - #define __Pyx_PyAsyncMethodsStruct PyAsyncMethods - #define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async) - #else - #define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved)) - #endif -#else - #define __Pyx_PyType_AsAsync(obj) NULL -#endif -#ifndef __Pyx_PyAsyncMethodsStruct - typedef struct { - unaryfunc am_await; - unaryfunc am_aiter; - unaryfunc am_anext; - } __Pyx_PyAsyncMethodsStruct; -#endif - -#if defined(WIN32) || defined(MS_WINDOWS) - #define _USE_MATH_DEFINES -#endif -#include -#ifdef NAN -#define __PYX_NAN() ((float) NAN) -#else -static CYTHON_INLINE float __PYX_NAN() { - float value; - memset(&value, 0xFF, sizeof(value)); - return value; -} -#endif -#if defined(__CYGWIN__) && defined(_LDBL_EQ_DBL) -#define __Pyx_truncl trunc -#else -#define __Pyx_truncl truncl -#endif - -#define __PYX_MARK_ERR_POS(f_index, lineno) \ - { __pyx_filename = __pyx_f[f_index]; (void)__pyx_filename; __pyx_lineno = lineno; (void)__pyx_lineno; __pyx_clineno = __LINE__; (void)__pyx_clineno; } -#define __PYX_ERR(f_index, lineno, Ln_error) \ - { __PYX_MARK_ERR_POS(f_index, lineno) goto Ln_error; } - -#ifndef __PYX_EXTERN_C - #ifdef __cplusplus - #define __PYX_EXTERN_C extern "C" - #else - #define __PYX_EXTERN_C extern - #endif -#endif - -#define __PYX_HAVE__aiohttp___http_parser -#define __PYX_HAVE_API__aiohttp___http_parser -/* Early includes */ -#include -#include -#include "pythread.h" -#include -#include -#include "../vendor/http-parser/http_parser.h" -#include "_find_header.h" -#ifdef _OPENMP -#include -#endif /* _OPENMP */ - -#if defined(PYREX_WITHOUT_ASSERTIONS) && !defined(CYTHON_WITHOUT_ASSERTIONS) -#define CYTHON_WITHOUT_ASSERTIONS -#endif - -typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* encoding; - const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; - -#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0 -#define __PYX_DEFAULT_STRING_ENCODING_IS_UTF8 0 -#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT (PY_MAJOR_VERSION >= 3 && __PYX_DEFAULT_STRING_ENCODING_IS_UTF8) -#define __PYX_DEFAULT_STRING_ENCODING "" -#define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString -#define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize -#define __Pyx_uchar_cast(c) ((unsigned char)c) -#define __Pyx_long_cast(x) ((long)x) -#define __Pyx_fits_Py_ssize_t(v, type, is_signed) (\ - (sizeof(type) < sizeof(Py_ssize_t)) ||\ - (sizeof(type) > sizeof(Py_ssize_t) &&\ - likely(v < (type)PY_SSIZE_T_MAX ||\ - v == (type)PY_SSIZE_T_MAX) &&\ - (!is_signed || likely(v > (type)PY_SSIZE_T_MIN ||\ - v == (type)PY_SSIZE_T_MIN))) ||\ - (sizeof(type) == sizeof(Py_ssize_t) &&\ - (is_signed || likely(v < (type)PY_SSIZE_T_MAX ||\ - v == (type)PY_SSIZE_T_MAX))) ) -static CYTHON_INLINE int __Pyx_is_valid_index(Py_ssize_t i, Py_ssize_t limit) { - return (size_t) i < (size_t) limit; -} -#if defined (__cplusplus) && __cplusplus >= 201103L - #include - #define __Pyx_sst_abs(value) std::abs(value) -#elif SIZEOF_INT >= SIZEOF_SIZE_T - #define __Pyx_sst_abs(value) abs(value) -#elif SIZEOF_LONG >= SIZEOF_SIZE_T - #define __Pyx_sst_abs(value) labs(value) -#elif defined (_MSC_VER) - #define __Pyx_sst_abs(value) ((Py_ssize_t)_abs64(value)) -#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L - #define __Pyx_sst_abs(value) llabs(value) -#elif defined (__GNUC__) - #define __Pyx_sst_abs(value) __builtin_llabs(value) -#else - #define __Pyx_sst_abs(value) ((value<0) ? -value : value) -#endif -static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject*); -static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length); -#define __Pyx_PyByteArray_FromString(s) PyByteArray_FromStringAndSize((const char*)s, strlen((const char*)s)) -#define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l) -#define __Pyx_PyBytes_FromString PyBytes_FromString -#define __Pyx_PyBytes_FromStringAndSize PyBytes_FromStringAndSize -static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*); -#if PY_MAJOR_VERSION < 3 - #define __Pyx_PyStr_FromString __Pyx_PyBytes_FromString - #define __Pyx_PyStr_FromStringAndSize __Pyx_PyBytes_FromStringAndSize -#else - #define __Pyx_PyStr_FromString __Pyx_PyUnicode_FromString - #define __Pyx_PyStr_FromStringAndSize __Pyx_PyUnicode_FromStringAndSize -#endif -#define __Pyx_PyBytes_AsWritableString(s) ((char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsWritableSString(s) ((signed char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsWritableUString(s) ((unsigned char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsString(s) ((const char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsSString(s) ((const signed char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsUString(s) ((const unsigned char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyObject_AsWritableString(s) ((char*) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_AsWritableSString(s) ((signed char*) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_AsWritableUString(s) ((unsigned char*) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_AsSString(s) ((const signed char*) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_AsUString(s) ((const unsigned char*) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_FromCString(s) __Pyx_PyObject_FromString((const char*)s) -#define __Pyx_PyBytes_FromCString(s) __Pyx_PyBytes_FromString((const char*)s) -#define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s) -#define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s) -#define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s) -static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u) { - const Py_UNICODE *u_end = u; - while (*u_end++) ; - return (size_t)(u_end - u - 1); -} -#define __Pyx_PyUnicode_FromUnicode(u) PyUnicode_FromUnicode(u, __Pyx_Py_UNICODE_strlen(u)) -#define __Pyx_PyUnicode_FromUnicodeAndLength PyUnicode_FromUnicode -#define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode -#define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) -#define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) -static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b); -static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); -static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject*); -static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x); -#define __Pyx_PySequence_Tuple(obj)\ - (likely(PyTuple_CheckExact(obj)) ? __Pyx_NewRef(obj) : PySequence_Tuple(obj)) -static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*); -static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t); -#if CYTHON_ASSUME_SAFE_MACROS -#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x)) -#else -#define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x) -#endif -#define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x)) -#if PY_MAJOR_VERSION >= 3 -#define __Pyx_PyNumber_Int(x) (PyLong_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Long(x)) -#else -#define __Pyx_PyNumber_Int(x) (PyInt_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Int(x)) -#endif -#define __Pyx_PyNumber_Float(x) (PyFloat_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Float(x)) -#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII -static int __Pyx_sys_getdefaultencoding_not_ascii; -static int __Pyx_init_sys_getdefaultencoding_params(void) { - PyObject* sys; - PyObject* default_encoding = NULL; - PyObject* ascii_chars_u = NULL; - PyObject* ascii_chars_b = NULL; - const char* default_encoding_c; - sys = PyImport_ImportModule("sys"); - if (!sys) goto bad; - default_encoding = PyObject_CallMethod(sys, (char*) "getdefaultencoding", NULL); - Py_DECREF(sys); - if (!default_encoding) goto bad; - default_encoding_c = PyBytes_AsString(default_encoding); - if (!default_encoding_c) goto bad; - if (strcmp(default_encoding_c, "ascii") == 0) { - __Pyx_sys_getdefaultencoding_not_ascii = 0; - } else { - char ascii_chars[128]; - int c; - for (c = 0; c < 128; c++) { - ascii_chars[c] = c; - } - __Pyx_sys_getdefaultencoding_not_ascii = 1; - ascii_chars_u = PyUnicode_DecodeASCII(ascii_chars, 128, NULL); - if (!ascii_chars_u) goto bad; - ascii_chars_b = PyUnicode_AsEncodedString(ascii_chars_u, default_encoding_c, NULL); - if (!ascii_chars_b || !PyBytes_Check(ascii_chars_b) || memcmp(ascii_chars, PyBytes_AS_STRING(ascii_chars_b), 128) != 0) { - PyErr_Format( - PyExc_ValueError, - "This module compiled with c_string_encoding=ascii, but default encoding '%.200s' is not a superset of ascii.", - default_encoding_c); - goto bad; - } - Py_DECREF(ascii_chars_u); - Py_DECREF(ascii_chars_b); - } - Py_DECREF(default_encoding); - return 0; -bad: - Py_XDECREF(default_encoding); - Py_XDECREF(ascii_chars_u); - Py_XDECREF(ascii_chars_b); - return -1; -} -#endif -#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT && PY_MAJOR_VERSION >= 3 -#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeUTF8(c_str, size, NULL) -#else -#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_Decode(c_str, size, __PYX_DEFAULT_STRING_ENCODING, NULL) -#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT -static char* __PYX_DEFAULT_STRING_ENCODING; -static int __Pyx_init_sys_getdefaultencoding_params(void) { - PyObject* sys; - PyObject* default_encoding = NULL; - char* default_encoding_c; - sys = PyImport_ImportModule("sys"); - if (!sys) goto bad; - default_encoding = PyObject_CallMethod(sys, (char*) (const char*) "getdefaultencoding", NULL); - Py_DECREF(sys); - if (!default_encoding) goto bad; - default_encoding_c = PyBytes_AsString(default_encoding); - if (!default_encoding_c) goto bad; - __PYX_DEFAULT_STRING_ENCODING = (char*) malloc(strlen(default_encoding_c) + 1); - if (!__PYX_DEFAULT_STRING_ENCODING) goto bad; - strcpy(__PYX_DEFAULT_STRING_ENCODING, default_encoding_c); - Py_DECREF(default_encoding); - return 0; -bad: - Py_XDECREF(default_encoding); - return -1; -} -#endif -#endif - - -/* Test for GCC > 2.95 */ -#if defined(__GNUC__) && (__GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95))) - #define likely(x) __builtin_expect(!!(x), 1) - #define unlikely(x) __builtin_expect(!!(x), 0) -#else /* !__GNUC__ or GCC < 2.95 */ - #define likely(x) (x) - #define unlikely(x) (x) -#endif /* __GNUC__ */ -static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; } - -static PyObject *__pyx_m = NULL; -static PyObject *__pyx_d; -static PyObject *__pyx_b; -static PyObject *__pyx_cython_runtime = NULL; -static PyObject *__pyx_empty_tuple; -static PyObject *__pyx_empty_bytes; -static PyObject *__pyx_empty_unicode; -static int __pyx_lineno; -static int __pyx_clineno = 0; -static const char * __pyx_cfilenm= __FILE__; -static const char *__pyx_filename; - - -static const char *__pyx_f[] = { - "aiohttp/_http_parser.pyx", - "stringsource", - "type.pxd", - "bool.pxd", - "complex.pxd", - "aiohttp/_headers.pxi", -}; - -/*--- Type declarations ---*/ -struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage; -struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage; -struct __pyx_obj_7aiohttp_12_http_parser_HttpParser; -struct __pyx_obj_7aiohttp_12_http_parser_HttpRequestParser; -struct __pyx_obj_7aiohttp_12_http_parser_HttpResponseParser; -struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct____repr__; -struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr; -struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__; -struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr; -struct __pyx_opt_args_7aiohttp_12_http_parser_10HttpParser__init; - -/* "aiohttp/_http_parser.pyx":327 - * PyMem_Free(self._csettings) - * - * cdef _init(self, cparser.http_parser_type mode, # <<<<<<<<<<<<<< - * object protocol, object loop, int limit, - * object timer=None, - */ -struct __pyx_opt_args_7aiohttp_12_http_parser_10HttpParser__init { - int __pyx_n; - PyObject *timer; - size_t max_line_size; - size_t max_headers; - size_t max_field_size; - PyObject *payload_exception; - int response_with_body; - int read_until_eof; - int auto_decompress; -}; - -/* "aiohttp/_http_parser.pyx":110 - * - * @cython.freelist(DEFAULT_FREELIST_SIZE) - * cdef class RawRequestMessage: # <<<<<<<<<<<<<< - * cdef readonly str method - * cdef readonly str path - */ -struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage { - PyObject_HEAD - PyObject *method; - PyObject *path; - PyObject *version; - PyObject *headers; - PyObject *raw_headers; - PyObject *should_close; - PyObject *compression; - PyObject *upgrade; - PyObject *chunked; - PyObject *url; -}; - - -/* "aiohttp/_http_parser.pyx":210 - * - * @cython.freelist(DEFAULT_FREELIST_SIZE) - * cdef class RawResponseMessage: # <<<<<<<<<<<<<< - * cdef readonly object version # HttpVersion - * cdef readonly int code - */ -struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage { - PyObject_HEAD - PyObject *version; - int code; - PyObject *reason; - PyObject *headers; - PyObject *raw_headers; - PyObject *should_close; - PyObject *compression; - PyObject *upgrade; - PyObject *chunked; -}; - - -/* "aiohttp/_http_parser.pyx":272 - * - * @cython.internal - * cdef class HttpParser: # <<<<<<<<<<<<<< - * - * cdef: - */ -struct __pyx_obj_7aiohttp_12_http_parser_HttpParser { - PyObject_HEAD - struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *__pyx_vtab; - struct http_parser *_cparser; - struct http_parser_settings *_csettings; - PyObject *_raw_name; - PyObject *_raw_value; - int _has_value; - PyObject *_protocol; - PyObject *_loop; - PyObject *_timer; - size_t _max_line_size; - size_t _max_field_size; - size_t _max_headers; - int _response_with_body; - int _read_until_eof; - int _started; - PyObject *_url; - PyObject *_buf; - PyObject *_path; - PyObject *_reason; - PyObject *_headers; - PyObject *_raw_headers; - int _upgraded; - PyObject *_messages; - PyObject *_payload; - int _payload_error; - PyObject *_payload_exception; - PyObject *_last_error; - int _auto_decompress; - int _limit; - PyObject *_content_encoding; - Py_buffer py_buf; -}; - - -/* "aiohttp/_http_parser.pyx":563 - * - * - * cdef class HttpRequestParser(HttpParser): # <<<<<<<<<<<<<< - * - * def __init__(self, protocol, loop, int limit, timer=None, - */ -struct __pyx_obj_7aiohttp_12_http_parser_HttpRequestParser { - struct __pyx_obj_7aiohttp_12_http_parser_HttpParser __pyx_base; -}; - - -/* "aiohttp/_http_parser.pyx":591 - * - * - * cdef class HttpResponseParser(HttpParser): # <<<<<<<<<<<<<< - * - * def __init__(self, protocol, loop, int limit, timer=None, - */ -struct __pyx_obj_7aiohttp_12_http_parser_HttpResponseParser { - struct __pyx_obj_7aiohttp_12_http_parser_HttpParser __pyx_base; -}; - - -/* "aiohttp/_http_parser.pyx":135 - * self.url = url - * - * def __repr__(self): # <<<<<<<<<<<<<< - * info = [] - * info.append(("method", self.method)) - */ -struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct____repr__ { - PyObject_HEAD - PyObject *__pyx_v_info; -}; - - -/* "aiohttp/_http_parser.pyx":147 - * info.append(("chunked", self.chunked)) - * info.append(("url", self.url)) - * sinfo = ', '.join(name + '=' + repr(val) for name, val in info) # <<<<<<<<<<<<<< - * return '' - * - */ -struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr { - PyObject_HEAD - struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct____repr__ *__pyx_outer_scope; - PyObject *__pyx_v_name; - PyObject *__pyx_v_val; -}; - - -/* "aiohttp/_http_parser.pyx":233 - * self.chunked = chunked - * - * def __repr__(self): # <<<<<<<<<<<<<< - * info = [] - * info.append(("version", self.version)) - */ -struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__ { - PyObject_HEAD - PyObject *__pyx_v_info; -}; - - -/* "aiohttp/_http_parser.pyx":244 - * info.append(("upgrade", self.upgrade)) - * info.append(("chunked", self.chunked)) - * sinfo = ', '.join(name + '=' + repr(val) for name, val in info) # <<<<<<<<<<<<<< - * return '' - * - */ -struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr { - PyObject_HEAD - struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__ *__pyx_outer_scope; - PyObject *__pyx_v_name; - PyObject *__pyx_v_val; -}; - - - -/* "aiohttp/_http_parser.pyx":272 - * - * @cython.internal - * cdef class HttpParser: # <<<<<<<<<<<<<< - * - * cdef: - */ - -struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser { - PyObject *(*_init)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *, enum http_parser_type, PyObject *, PyObject *, int, struct __pyx_opt_args_7aiohttp_12_http_parser_10HttpParser__init *__pyx_optional_args); - PyObject *(*_process_header)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *); - PyObject *(*_on_header_field)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *, char *, size_t); - PyObject *(*_on_header_value)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *, char *, size_t); - PyObject *(*_on_headers_complete)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *); - PyObject *(*_on_message_complete)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *); - PyObject *(*_on_chunk_header)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *); - PyObject *(*_on_chunk_complete)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *); - PyObject *(*_on_status_complete)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *); - PyObject *(*http_version)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *); -}; -static struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *__pyx_vtabptr_7aiohttp_12_http_parser_HttpParser; -static CYTHON_INLINE PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser_http_version(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *); - - -/* "aiohttp/_http_parser.pyx":563 - * - * - * cdef class HttpRequestParser(HttpParser): # <<<<<<<<<<<<<< - * - * def __init__(self, protocol, loop, int limit, timer=None, - */ - -struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpRequestParser { - struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser __pyx_base; -}; -static struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpRequestParser *__pyx_vtabptr_7aiohttp_12_http_parser_HttpRequestParser; - - -/* "aiohttp/_http_parser.pyx":591 - * - * - * cdef class HttpResponseParser(HttpParser): # <<<<<<<<<<<<<< - * - * def __init__(self, protocol, loop, int limit, timer=None, - */ - -struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpResponseParser { - struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser __pyx_base; -}; -static struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpResponseParser *__pyx_vtabptr_7aiohttp_12_http_parser_HttpResponseParser; - -/* --- Runtime support code (head) --- */ -/* Refnanny.proto */ -#ifndef CYTHON_REFNANNY - #define CYTHON_REFNANNY 0 -#endif -#if CYTHON_REFNANNY - typedef struct { - void (*INCREF)(void*, PyObject*, int); - void (*DECREF)(void*, PyObject*, int); - void (*GOTREF)(void*, PyObject*, int); - void (*GIVEREF)(void*, PyObject*, int); - void* (*SetupContext)(const char*, int, const char*); - void (*FinishContext)(void**); - } __Pyx_RefNannyAPIStruct; - static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL; - static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname); - #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL; -#ifdef WITH_THREAD - #define __Pyx_RefNannySetupContext(name, acquire_gil)\ - if (acquire_gil) {\ - PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ - __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ - PyGILState_Release(__pyx_gilstate_save);\ - } else {\ - __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ - } -#else - #define __Pyx_RefNannySetupContext(name, acquire_gil)\ - __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__) -#endif - #define __Pyx_RefNannyFinishContext()\ - __Pyx_RefNanny->FinishContext(&__pyx_refnanny) - #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), __LINE__) - #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), __LINE__) - #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), __LINE__) - #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), __LINE__) - #define __Pyx_XINCREF(r) do { if((r) != NULL) {__Pyx_INCREF(r); }} while(0) - #define __Pyx_XDECREF(r) do { if((r) != NULL) {__Pyx_DECREF(r); }} while(0) - #define __Pyx_XGOTREF(r) do { if((r) != NULL) {__Pyx_GOTREF(r); }} while(0) - #define __Pyx_XGIVEREF(r) do { if((r) != NULL) {__Pyx_GIVEREF(r);}} while(0) -#else - #define __Pyx_RefNannyDeclarations - #define __Pyx_RefNannySetupContext(name, acquire_gil) - #define __Pyx_RefNannyFinishContext() - #define __Pyx_INCREF(r) Py_INCREF(r) - #define __Pyx_DECREF(r) Py_DECREF(r) - #define __Pyx_GOTREF(r) - #define __Pyx_GIVEREF(r) - #define __Pyx_XINCREF(r) Py_XINCREF(r) - #define __Pyx_XDECREF(r) Py_XDECREF(r) - #define __Pyx_XGOTREF(r) - #define __Pyx_XGIVEREF(r) -#endif -#define __Pyx_XDECREF_SET(r, v) do {\ - PyObject *tmp = (PyObject *) r;\ - r = v; __Pyx_XDECREF(tmp);\ - } while (0) -#define __Pyx_DECREF_SET(r, v) do {\ - PyObject *tmp = (PyObject *) r;\ - r = v; __Pyx_DECREF(tmp);\ - } while (0) -#define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0) -#define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0) - -/* PyObjectGetAttrStr.proto */ -#if CYTHON_USE_TYPE_SLOTS -static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name); -#else -#define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n) -#endif - -/* GetBuiltinName.proto */ -static PyObject *__Pyx_GetBuiltinName(PyObject *name); - -/* GetItemInt.proto */ -#define __Pyx_GetItemInt(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ - (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ - __Pyx_GetItemInt_Fast(o, (Py_ssize_t)i, is_list, wraparound, boundscheck) :\ - (is_list ? (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL) :\ - __Pyx_GetItemInt_Generic(o, to_py_func(i)))) -#define __Pyx_GetItemInt_List(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ - (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ - __Pyx_GetItemInt_List_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ - (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL)) -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i, - int wraparound, int boundscheck); -#define __Pyx_GetItemInt_Tuple(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ - (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ - __Pyx_GetItemInt_Tuple_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ - (PyErr_SetString(PyExc_IndexError, "tuple index out of range"), (PyObject*)NULL)) -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i, - int wraparound, int boundscheck); -static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j); -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, - int is_list, int wraparound, int boundscheck); - -/* decode_c_string_utf16.proto */ -static CYTHON_INLINE PyObject *__Pyx_PyUnicode_DecodeUTF16(const char *s, Py_ssize_t size, const char *errors) { - int byteorder = 0; - return PyUnicode_DecodeUTF16(s, size, errors, &byteorder); -} -static CYTHON_INLINE PyObject *__Pyx_PyUnicode_DecodeUTF16LE(const char *s, Py_ssize_t size, const char *errors) { - int byteorder = -1; - return PyUnicode_DecodeUTF16(s, size, errors, &byteorder); -} -static CYTHON_INLINE PyObject *__Pyx_PyUnicode_DecodeUTF16BE(const char *s, Py_ssize_t size, const char *errors) { - int byteorder = 1; - return PyUnicode_DecodeUTF16(s, size, errors, &byteorder); -} - -/* decode_c_bytes.proto */ -static CYTHON_INLINE PyObject* __Pyx_decode_c_bytes( - const char* cstring, Py_ssize_t length, Py_ssize_t start, Py_ssize_t stop, - const char* encoding, const char* errors, - PyObject* (*decode_func)(const char *s, Py_ssize_t size, const char *errors)); - -/* decode_bytes.proto */ -static CYTHON_INLINE PyObject* __Pyx_decode_bytes( - PyObject* string, Py_ssize_t start, Py_ssize_t stop, - const char* encoding, const char* errors, - PyObject* (*decode_func)(const char *s, Py_ssize_t size, const char *errors)) { - return __Pyx_decode_c_bytes( - PyBytes_AS_STRING(string), PyBytes_GET_SIZE(string), - start, stop, encoding, errors, decode_func); -} - -/* RaiseArgTupleInvalid.proto */ -static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact, - Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found); - -/* RaiseDoubleKeywords.proto */ -static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name); - -/* ParseKeywords.proto */ -static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject **argnames[],\ - PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args,\ - const char* function_name); - -/* None.proto */ -static CYTHON_INLINE void __Pyx_RaiseClosureNameError(const char *varname); - -/* RaiseTooManyValuesToUnpack.proto */ -static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected); - -/* RaiseNeedMoreValuesToUnpack.proto */ -static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index); - -/* IterFinish.proto */ -static CYTHON_INLINE int __Pyx_IterFinish(void); - -/* UnpackItemEndCheck.proto */ -static int __Pyx_IternextUnpackEndCheck(PyObject *retval, Py_ssize_t expected); - -/* ListCompAppend.proto */ -#if CYTHON_USE_PYLIST_INTERNALS && CYTHON_ASSUME_SAFE_MACROS -static CYTHON_INLINE int __Pyx_ListComp_Append(PyObject* list, PyObject* x) { - PyListObject* L = (PyListObject*) list; - Py_ssize_t len = Py_SIZE(list); - if (likely(L->allocated > len)) { - Py_INCREF(x); - PyList_SET_ITEM(list, len, x); - __Pyx_SET_SIZE(list, len + 1); - return 0; - } - return PyList_Append(list, x); -} -#else -#define __Pyx_ListComp_Append(L,x) PyList_Append(L,x) -#endif - -/* ListAppend.proto */ -#if CYTHON_USE_PYLIST_INTERNALS && CYTHON_ASSUME_SAFE_MACROS -static CYTHON_INLINE int __Pyx_PyList_Append(PyObject* list, PyObject* x) { - PyListObject* L = (PyListObject*) list; - Py_ssize_t len = Py_SIZE(list); - if (likely(L->allocated > len) & likely(len > (L->allocated >> 1))) { - Py_INCREF(x); - PyList_SET_ITEM(list, len, x); - __Pyx_SET_SIZE(list, len + 1); - return 0; - } - return PyList_Append(list, x); -} -#else -#define __Pyx_PyList_Append(L,x) PyList_Append(L,x) -#endif - -/* KeywordStringCheck.proto */ -static int __Pyx_CheckKeywordStrings(PyObject *kwdict, const char* function_name, int kw_allowed); - -/* ExtTypeTest.proto */ -static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type); - -/* PyDictContains.proto */ -static CYTHON_INLINE int __Pyx_PyDict_ContainsTF(PyObject* item, PyObject* dict, int eq) { - int result = PyDict_Contains(dict, item); - return unlikely(result < 0) ? result : (result == (eq == Py_EQ)); -} - -/* DictGetItem.proto */ -#if PY_MAJOR_VERSION >= 3 && !CYTHON_COMPILING_IN_PYPY -static PyObject *__Pyx_PyDict_GetItem(PyObject *d, PyObject* key); -#define __Pyx_PyObject_Dict_GetItem(obj, name)\ - (likely(PyDict_CheckExact(obj)) ?\ - __Pyx_PyDict_GetItem(obj, name) : PyObject_GetItem(obj, name)) -#else -#define __Pyx_PyDict_GetItem(d, key) PyObject_GetItem(d, key) -#define __Pyx_PyObject_Dict_GetItem(obj, name) PyObject_GetItem(obj, name) -#endif - -/* PyErrExceptionMatches.proto */ -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_PyErr_ExceptionMatches(err) __Pyx_PyErr_ExceptionMatchesInState(__pyx_tstate, err) -static CYTHON_INLINE int __Pyx_PyErr_ExceptionMatchesInState(PyThreadState* tstate, PyObject* err); -#else -#define __Pyx_PyErr_ExceptionMatches(err) PyErr_ExceptionMatches(err) -#endif - -/* PyThreadStateGet.proto */ -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_PyThreadState_declare PyThreadState *__pyx_tstate; -#define __Pyx_PyThreadState_assign __pyx_tstate = __Pyx_PyThreadState_Current; -#define __Pyx_PyErr_Occurred() __pyx_tstate->curexc_type -#else -#define __Pyx_PyThreadState_declare -#define __Pyx_PyThreadState_assign -#define __Pyx_PyErr_Occurred() PyErr_Occurred() -#endif - -/* PyErrFetchRestore.proto */ -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_PyErr_Clear() __Pyx_ErrRestore(NULL, NULL, NULL) -#define __Pyx_ErrRestoreWithState(type, value, tb) __Pyx_ErrRestoreInState(PyThreadState_GET(), type, value, tb) -#define __Pyx_ErrFetchWithState(type, value, tb) __Pyx_ErrFetchInState(PyThreadState_GET(), type, value, tb) -#define __Pyx_ErrRestore(type, value, tb) __Pyx_ErrRestoreInState(__pyx_tstate, type, value, tb) -#define __Pyx_ErrFetch(type, value, tb) __Pyx_ErrFetchInState(__pyx_tstate, type, value, tb) -static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); -static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); -#if CYTHON_COMPILING_IN_CPYTHON -#define __Pyx_PyErr_SetNone(exc) (Py_INCREF(exc), __Pyx_ErrRestore((exc), NULL, NULL)) -#else -#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) -#endif -#else -#define __Pyx_PyErr_Clear() PyErr_Clear() -#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) -#define __Pyx_ErrRestoreWithState(type, value, tb) PyErr_Restore(type, value, tb) -#define __Pyx_ErrFetchWithState(type, value, tb) PyErr_Fetch(type, value, tb) -#define __Pyx_ErrRestoreInState(tstate, type, value, tb) PyErr_Restore(type, value, tb) -#define __Pyx_ErrFetchInState(tstate, type, value, tb) PyErr_Fetch(type, value, tb) -#define __Pyx_ErrRestore(type, value, tb) PyErr_Restore(type, value, tb) -#define __Pyx_ErrFetch(type, value, tb) PyErr_Fetch(type, value, tb) -#endif - -/* GetAttr.proto */ -static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *, PyObject *); - -/* GetAttr3.proto */ -static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *, PyObject *, PyObject *); - -/* PyDictVersioning.proto */ -#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS -#define __PYX_DICT_VERSION_INIT ((PY_UINT64_T) -1) -#define __PYX_GET_DICT_VERSION(dict) (((PyDictObject*)(dict))->ma_version_tag) -#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)\ - (version_var) = __PYX_GET_DICT_VERSION(dict);\ - (cache_var) = (value); -#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) {\ - static PY_UINT64_T __pyx_dict_version = 0;\ - static PyObject *__pyx_dict_cached_value = NULL;\ - if (likely(__PYX_GET_DICT_VERSION(DICT) == __pyx_dict_version)) {\ - (VAR) = __pyx_dict_cached_value;\ - } else {\ - (VAR) = __pyx_dict_cached_value = (LOOKUP);\ - __pyx_dict_version = __PYX_GET_DICT_VERSION(DICT);\ - }\ -} -static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj); -static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj); -static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version); -#else -#define __PYX_GET_DICT_VERSION(dict) (0) -#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var) -#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) (VAR) = (LOOKUP); -#endif - -/* GetModuleGlobalName.proto */ -#if CYTHON_USE_DICT_VERSIONS -#define __Pyx_GetModuleGlobalName(var, name) {\ - static PY_UINT64_T __pyx_dict_version = 0;\ - static PyObject *__pyx_dict_cached_value = NULL;\ - (var) = (likely(__pyx_dict_version == __PYX_GET_DICT_VERSION(__pyx_d))) ?\ - (likely(__pyx_dict_cached_value) ? __Pyx_NewRef(__pyx_dict_cached_value) : __Pyx_GetBuiltinName(name)) :\ - __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\ -} -#define __Pyx_GetModuleGlobalNameUncached(var, name) {\ - PY_UINT64_T __pyx_dict_version;\ - PyObject *__pyx_dict_cached_value;\ - (var) = __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\ -} -static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value); -#else -#define __Pyx_GetModuleGlobalName(var, name) (var) = __Pyx__GetModuleGlobalName(name) -#define __Pyx_GetModuleGlobalNameUncached(var, name) (var) = __Pyx__GetModuleGlobalName(name) -static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name); -#endif - -/* PyFunctionFastCall.proto */ -#if CYTHON_FAST_PYCALL -#define __Pyx_PyFunction_FastCall(func, args, nargs)\ - __Pyx_PyFunction_FastCallDict((func), (args), (nargs), NULL) -#if 1 || PY_VERSION_HEX < 0x030600B1 -static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs); -#else -#define __Pyx_PyFunction_FastCallDict(func, args, nargs, kwargs) _PyFunction_FastCallDict(func, args, nargs, kwargs) -#endif -#define __Pyx_BUILD_ASSERT_EXPR(cond)\ - (sizeof(char [1 - 2*!(cond)]) - 1) -#ifndef Py_MEMBER_SIZE -#define Py_MEMBER_SIZE(type, member) sizeof(((type *)0)->member) -#endif - static size_t __pyx_pyframe_localsplus_offset = 0; - #include "frameobject.h" - #define __Pxy_PyFrame_Initialize_Offsets()\ - ((void)__Pyx_BUILD_ASSERT_EXPR(sizeof(PyFrameObject) == offsetof(PyFrameObject, f_localsplus) + Py_MEMBER_SIZE(PyFrameObject, f_localsplus)),\ - (void)(__pyx_pyframe_localsplus_offset = ((size_t)PyFrame_Type.tp_basicsize) - Py_MEMBER_SIZE(PyFrameObject, f_localsplus))) - #define __Pyx_PyFrame_GetLocalsplus(frame)\ - (assert(__pyx_pyframe_localsplus_offset), (PyObject **)(((char *)(frame)) + __pyx_pyframe_localsplus_offset)) -#endif - -/* PyObjectCall.proto */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw); -#else -#define __Pyx_PyObject_Call(func, arg, kw) PyObject_Call(func, arg, kw) -#endif - -/* PyObjectCallMethO.proto */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg); -#endif - -/* PyObjectCallNoArg.proto */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func); -#else -#define __Pyx_PyObject_CallNoArg(func) __Pyx_PyObject_Call(func, __pyx_empty_tuple, NULL) -#endif - -/* PyCFunctionFastCall.proto */ -#if CYTHON_FAST_PYCCALL -static CYTHON_INLINE PyObject *__Pyx_PyCFunction_FastCall(PyObject *func, PyObject **args, Py_ssize_t nargs); -#else -#define __Pyx_PyCFunction_FastCall(func, args, nargs) (assert(0), NULL) -#endif - -/* PyObjectCallOneArg.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg); - -/* PyObjectCall2Args.proto */ -static CYTHON_UNUSED PyObject* __Pyx_PyObject_Call2Args(PyObject* function, PyObject* arg1, PyObject* arg2); - -/* PySequenceContains.proto */ -static CYTHON_INLINE int __Pyx_PySequence_ContainsTF(PyObject* item, PyObject* seq, int eq) { - int result = PySequence_Contains(seq, item); - return unlikely(result < 0) ? result : (result == (eq == Py_EQ)); -} - -/* RaiseException.proto */ -static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause); - -/* IncludeStringH.proto */ -#include - -/* BytesEquals.proto */ -static CYTHON_INLINE int __Pyx_PyBytes_Equals(PyObject* s1, PyObject* s2, int equals); - -/* UnicodeEquals.proto */ -static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int equals); - -/* SliceObject.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyObject_GetSlice( - PyObject* obj, Py_ssize_t cstart, Py_ssize_t cstop, - PyObject** py_start, PyObject** py_stop, PyObject** py_slice, - int has_cstart, int has_cstop, int wraparound); - -/* decode_bytearray.proto */ -static CYTHON_INLINE PyObject* __Pyx_decode_bytearray( - PyObject* string, Py_ssize_t start, Py_ssize_t stop, - const char* encoding, const char* errors, - PyObject* (*decode_func)(const char *s, Py_ssize_t size, const char *errors)) { - return __Pyx_decode_c_bytes( - PyByteArray_AS_STRING(string), PyByteArray_GET_SIZE(string), - start, stop, encoding, errors, decode_func); -} - -/* GetException.proto */ -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_GetException(type, value, tb) __Pyx__GetException(__pyx_tstate, type, value, tb) -static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); -#else -static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb); -#endif - -/* SwapException.proto */ -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_ExceptionSwap(type, value, tb) __Pyx__ExceptionSwap(__pyx_tstate, type, value, tb) -static CYTHON_INLINE void __Pyx__ExceptionSwap(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); -#else -static CYTHON_INLINE void __Pyx_ExceptionSwap(PyObject **type, PyObject **value, PyObject **tb); -#endif - -/* GetTopmostException.proto */ -#if CYTHON_USE_EXC_INFO_STACK -static _PyErr_StackItem * __Pyx_PyErr_GetTopmostException(PyThreadState *tstate); -#endif - -/* SaveResetException.proto */ -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_ExceptionSave(type, value, tb) __Pyx__ExceptionSave(__pyx_tstate, type, value, tb) -static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); -#define __Pyx_ExceptionReset(type, value, tb) __Pyx__ExceptionReset(__pyx_tstate, type, value, tb) -static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); -#else -#define __Pyx_ExceptionSave(type, value, tb) PyErr_GetExcInfo(type, value, tb) -#define __Pyx_ExceptionReset(type, value, tb) PyErr_SetExcInfo(type, value, tb) -#endif - -/* decode_c_string.proto */ -static CYTHON_INLINE PyObject* __Pyx_decode_c_string( - const char* cstring, Py_ssize_t start, Py_ssize_t stop, - const char* encoding, const char* errors, - PyObject* (*decode_func)(const char *s, Py_ssize_t size, const char *errors)); - -/* UnpackUnboundCMethod.proto */ -typedef struct { - PyObject *type; - PyObject **method_name; - PyCFunction func; - PyObject *method; - int flag; -} __Pyx_CachedCFunction; - -/* CallUnboundCMethod1.proto */ -static PyObject* __Pyx__CallUnboundCMethod1(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg); -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_CallUnboundCMethod1(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg); -#else -#define __Pyx_CallUnboundCMethod1(cfunc, self, arg) __Pyx__CallUnboundCMethod1(cfunc, self, arg) -#endif - -/* Import.proto */ -static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level); - -/* ImportFrom.proto */ -static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name); - -/* HasAttr.proto */ -static CYTHON_INLINE int __Pyx_HasAttr(PyObject *, PyObject *); - -/* PyObject_GenericGetAttrNoDict.proto */ -#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 -static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name); -#else -#define __Pyx_PyObject_GenericGetAttrNoDict PyObject_GenericGetAttr -#endif - -/* PyObject_GenericGetAttr.proto */ -#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 -static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name); -#else -#define __Pyx_PyObject_GenericGetAttr PyObject_GenericGetAttr -#endif - -/* PyObjectGetAttrStrNoError.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name); - -/* SetupReduce.proto */ -static int __Pyx_setup_reduce(PyObject* type_obj); - -/* SetVTable.proto */ -static int __Pyx_SetVtable(PyObject *dict, void *vtable); - -/* TypeImport.proto */ -#ifndef __PYX_HAVE_RT_ImportType_proto -#define __PYX_HAVE_RT_ImportType_proto -enum __Pyx_ImportType_CheckSize { - __Pyx_ImportType_CheckSize_Error = 0, - __Pyx_ImportType_CheckSize_Warn = 1, - __Pyx_ImportType_CheckSize_Ignore = 2 -}; -static PyTypeObject *__Pyx_ImportType(PyObject* module, const char *module_name, const char *class_name, size_t size, enum __Pyx_ImportType_CheckSize check_size); -#endif - -/* CLineInTraceback.proto */ -#ifdef CYTHON_CLINE_IN_TRACEBACK -#define __Pyx_CLineForTraceback(tstate, c_line) (((CYTHON_CLINE_IN_TRACEBACK)) ? c_line : 0) -#else -static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line); -#endif - -/* CodeObjectCache.proto */ -typedef struct { - PyCodeObject* code_object; - int code_line; -} __Pyx_CodeObjectCacheEntry; -struct __Pyx_CodeObjectCache { - int count; - int max_count; - __Pyx_CodeObjectCacheEntry* entries; -}; -static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL}; -static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line); -static PyCodeObject *__pyx_find_code_object(int code_line); -static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object); - -/* AddTraceback.proto */ -static void __Pyx_AddTraceback(const char *funcname, int c_line, - int py_line, const char *filename); - -/* CIntToPy.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyInt_From_int(int value); - -/* CIntToPy.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyInt_From_unsigned_int(unsigned int value); - -/* CIntToPy.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyInt_From_unsigned_short(unsigned short value); - -/* CIntToPy.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value); - -/* CIntToPy.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyInt_From_uint16_t(uint16_t value); - -/* CIntFromPy.proto */ -static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *); - -/* CIntFromPy.proto */ -static CYTHON_INLINE enum http_method __Pyx_PyInt_As_enum__http_method(PyObject *); - -/* CIntFromPy.proto */ -static CYTHON_INLINE size_t __Pyx_PyInt_As_size_t(PyObject *); - -/* CIntFromPy.proto */ -static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *); - -/* FastTypeChecks.proto */ -#if CYTHON_COMPILING_IN_CPYTHON -#define __Pyx_TypeCheck(obj, type) __Pyx_IsSubtype(Py_TYPE(obj), (PyTypeObject *)type) -static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b); -static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject *type); -static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *type1, PyObject *type2); -#else -#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type) -#define __Pyx_PyErr_GivenExceptionMatches(err, type) PyErr_GivenExceptionMatches(err, type) -#define __Pyx_PyErr_GivenExceptionMatches2(err, type1, type2) (PyErr_GivenExceptionMatches(err, type1) || PyErr_GivenExceptionMatches(err, type2)) -#endif -#define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception) - -/* FetchCommonType.proto */ -static PyTypeObject* __Pyx_FetchCommonType(PyTypeObject* type); - -/* PyObjectGetMethod.proto */ -static int __Pyx_PyObject_GetMethod(PyObject *obj, PyObject *name, PyObject **method); - -/* PyObjectCallMethod1.proto */ -static PyObject* __Pyx_PyObject_CallMethod1(PyObject* obj, PyObject* method_name, PyObject* arg); - -/* CoroutineBase.proto */ -typedef PyObject *(*__pyx_coroutine_body_t)(PyObject *, PyThreadState *, PyObject *); -#if CYTHON_USE_EXC_INFO_STACK -#define __Pyx_ExcInfoStruct _PyErr_StackItem -#else -typedef struct { - PyObject *exc_type; - PyObject *exc_value; - PyObject *exc_traceback; -} __Pyx_ExcInfoStruct; -#endif -typedef struct { - PyObject_HEAD - __pyx_coroutine_body_t body; - PyObject *closure; - __Pyx_ExcInfoStruct gi_exc_state; - PyObject *gi_weakreflist; - PyObject *classobj; - PyObject *yieldfrom; - PyObject *gi_name; - PyObject *gi_qualname; - PyObject *gi_modulename; - PyObject *gi_code; - int resume_label; - char is_running; -} __pyx_CoroutineObject; -static __pyx_CoroutineObject *__Pyx__Coroutine_New( - PyTypeObject *type, __pyx_coroutine_body_t body, PyObject *code, PyObject *closure, - PyObject *name, PyObject *qualname, PyObject *module_name); -static __pyx_CoroutineObject *__Pyx__Coroutine_NewInit( - __pyx_CoroutineObject *gen, __pyx_coroutine_body_t body, PyObject *code, PyObject *closure, - PyObject *name, PyObject *qualname, PyObject *module_name); -static CYTHON_INLINE void __Pyx_Coroutine_ExceptionClear(__Pyx_ExcInfoStruct *self); -static int __Pyx_Coroutine_clear(PyObject *self); -static PyObject *__Pyx_Coroutine_Send(PyObject *self, PyObject *value); -static PyObject *__Pyx_Coroutine_Close(PyObject *self); -static PyObject *__Pyx_Coroutine_Throw(PyObject *gen, PyObject *args); -#if CYTHON_USE_EXC_INFO_STACK -#define __Pyx_Coroutine_SwapException(self) -#define __Pyx_Coroutine_ResetAndClearException(self) __Pyx_Coroutine_ExceptionClear(&(self)->gi_exc_state) -#else -#define __Pyx_Coroutine_SwapException(self) {\ - __Pyx_ExceptionSwap(&(self)->gi_exc_state.exc_type, &(self)->gi_exc_state.exc_value, &(self)->gi_exc_state.exc_traceback);\ - __Pyx_Coroutine_ResetFrameBackpointer(&(self)->gi_exc_state);\ - } -#define __Pyx_Coroutine_ResetAndClearException(self) {\ - __Pyx_ExceptionReset((self)->gi_exc_state.exc_type, (self)->gi_exc_state.exc_value, (self)->gi_exc_state.exc_traceback);\ - (self)->gi_exc_state.exc_type = (self)->gi_exc_state.exc_value = (self)->gi_exc_state.exc_traceback = NULL;\ - } -#endif -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_PyGen_FetchStopIterationValue(pvalue)\ - __Pyx_PyGen__FetchStopIterationValue(__pyx_tstate, pvalue) -#else -#define __Pyx_PyGen_FetchStopIterationValue(pvalue)\ - __Pyx_PyGen__FetchStopIterationValue(__Pyx_PyThreadState_Current, pvalue) -#endif -static int __Pyx_PyGen__FetchStopIterationValue(PyThreadState *tstate, PyObject **pvalue); -static CYTHON_INLINE void __Pyx_Coroutine_ResetFrameBackpointer(__Pyx_ExcInfoStruct *exc_state); - -/* PatchModuleWithCoroutine.proto */ -static PyObject* __Pyx_Coroutine_patch_module(PyObject* module, const char* py_code); - -/* PatchGeneratorABC.proto */ -static int __Pyx_patch_abc(void); - -/* Generator.proto */ -#define __Pyx_Generator_USED -static PyTypeObject *__pyx_GeneratorType = 0; -#define __Pyx_Generator_CheckExact(obj) (Py_TYPE(obj) == __pyx_GeneratorType) -#define __Pyx_Generator_New(body, code, closure, name, qualname, module_name)\ - __Pyx__Coroutine_New(__pyx_GeneratorType, body, code, closure, name, qualname, module_name) -static PyObject *__Pyx_Generator_Next(PyObject *self); -static int __pyx_Generator_init(void); - -/* CheckBinaryVersion.proto */ -static int __Pyx_check_binary_version(void); - -/* InitStrings.proto */ -static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); - -static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__init(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self, enum http_parser_type __pyx_v_mode, PyObject *__pyx_v_protocol, PyObject *__pyx_v_loop, int __pyx_v_limit, struct __pyx_opt_args_7aiohttp_12_http_parser_10HttpParser__init *__pyx_optional_args); /* proto*/ -static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__process_header(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self); /* proto*/ -static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_header_field(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self, char *__pyx_v_at, size_t __pyx_v_length); /* proto*/ -static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_header_value(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self, char *__pyx_v_at, size_t __pyx_v_length); /* proto*/ -static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_headers_complete(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self); /* proto*/ -static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_message_complete(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self); /* proto*/ -static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_chunk_header(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self); /* proto*/ -static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_chunk_complete(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self); /* proto*/ -static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_status_complete(CYTHON_UNUSED struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self); /* proto*/ -static CYTHON_INLINE PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser_http_version(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self); /* proto*/ -static PyObject *__pyx_f_7aiohttp_12_http_parser_17HttpRequestParser__on_status_complete(struct __pyx_obj_7aiohttp_12_http_parser_HttpRequestParser *__pyx_v_self); /* proto*/ -static PyObject *__pyx_f_7aiohttp_12_http_parser_18HttpResponseParser__on_status_complete(struct __pyx_obj_7aiohttp_12_http_parser_HttpResponseParser *__pyx_v_self); /* proto*/ - -/* Module declarations from 'cpython.version' */ - -/* Module declarations from '__builtin__' */ - -/* Module declarations from 'cpython.type' */ -static PyTypeObject *__pyx_ptype_7cpython_4type_type = 0; - -/* Module declarations from 'libc.string' */ - -/* Module declarations from 'libc.stdio' */ - -/* Module declarations from 'cpython.object' */ - -/* Module declarations from 'cpython.ref' */ - -/* Module declarations from 'cpython.exc' */ - -/* Module declarations from 'cpython.module' */ - -/* Module declarations from 'cpython.mem' */ - -/* Module declarations from 'cpython.tuple' */ - -/* Module declarations from 'cpython.list' */ - -/* Module declarations from 'cpython.sequence' */ - -/* Module declarations from 'cpython.mapping' */ - -/* Module declarations from 'cpython.iterator' */ - -/* Module declarations from 'cpython.number' */ - -/* Module declarations from 'cpython.int' */ - -/* Module declarations from '__builtin__' */ - -/* Module declarations from 'cpython.bool' */ -static PyTypeObject *__pyx_ptype_7cpython_4bool_bool = 0; - -/* Module declarations from 'cpython.long' */ - -/* Module declarations from 'cpython.float' */ - -/* Module declarations from '__builtin__' */ - -/* Module declarations from 'cpython.complex' */ -static PyTypeObject *__pyx_ptype_7cpython_7complex_complex = 0; - -/* Module declarations from 'cpython.string' */ - -/* Module declarations from 'cpython.unicode' */ - -/* Module declarations from 'cpython.dict' */ - -/* Module declarations from 'cpython.instance' */ - -/* Module declarations from 'cpython.function' */ - -/* Module declarations from 'cpython.method' */ - -/* Module declarations from 'cpython.weakref' */ - -/* Module declarations from 'cpython.getargs' */ - -/* Module declarations from 'cpython.pythread' */ - -/* Module declarations from 'cpython.pystate' */ - -/* Module declarations from 'cpython.cobject' */ - -/* Module declarations from 'cpython.oldbuffer' */ - -/* Module declarations from 'cpython.set' */ - -/* Module declarations from 'cpython.buffer' */ - -/* Module declarations from 'cpython.bytes' */ - -/* Module declarations from 'cpython.pycapsule' */ - -/* Module declarations from 'cpython' */ - -/* Module declarations from 'libc.limits' */ - -/* Module declarations from 'cython' */ - -/* Module declarations from 'aiohttp' */ - -/* Module declarations from 'libc.stdint' */ - -/* Module declarations from 'aiohttp._cparser' */ - -/* Module declarations from 'aiohttp._find_header' */ - -/* Module declarations from 'aiohttp._http_parser' */ -static PyTypeObject *__pyx_ptype_7aiohttp_12_http_parser_RawRequestMessage = 0; -static PyTypeObject *__pyx_ptype_7aiohttp_12_http_parser_RawResponseMessage = 0; -static PyTypeObject *__pyx_ptype_7aiohttp_12_http_parser_HttpParser = 0; -static PyTypeObject *__pyx_ptype_7aiohttp_12_http_parser_HttpRequestParser = 0; -static PyTypeObject *__pyx_ptype_7aiohttp_12_http_parser_HttpResponseParser = 0; -static PyTypeObject *__pyx_ptype_7aiohttp_12_http_parser___pyx_scope_struct____repr__ = 0; -static PyTypeObject *__pyx_ptype_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr = 0; -static PyTypeObject *__pyx_ptype_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__ = 0; -static PyTypeObject *__pyx_ptype_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr = 0; -static PyObject *__pyx_v_7aiohttp_12_http_parser_headers = 0; -static PyObject *__pyx_v_7aiohttp_12_http_parser_URL = 0; -static PyObject *__pyx_v_7aiohttp_12_http_parser_URL_build = 0; -static PyObject *__pyx_v_7aiohttp_12_http_parser_CIMultiDict = 0; -static PyObject *__pyx_v_7aiohttp_12_http_parser_CIMultiDictProxy = 0; -static PyObject *__pyx_v_7aiohttp_12_http_parser_HttpVersion = 0; -static PyObject *__pyx_v_7aiohttp_12_http_parser_HttpVersion10 = 0; -static PyObject *__pyx_v_7aiohttp_12_http_parser_HttpVersion11 = 0; -static PyObject *__pyx_v_7aiohttp_12_http_parser_SEC_WEBSOCKET_KEY1 = 0; -static PyObject *__pyx_v_7aiohttp_12_http_parser_CONTENT_ENCODING = 0; -static PyObject *__pyx_v_7aiohttp_12_http_parser_EMPTY_PAYLOAD = 0; -static PyObject *__pyx_v_7aiohttp_12_http_parser_StreamReader = 0; -static PyObject *__pyx_v_7aiohttp_12_http_parser_DeflateBuffer = 0; -static PyObject *__pyx_v_7aiohttp_12_http_parser__http_method = 0; -static CYTHON_INLINE PyObject *__pyx_f_7aiohttp_12_http_parser_extend(PyObject *, char const *, size_t); /*proto*/ -static CYTHON_INLINE PyObject *__pyx_f_7aiohttp_12_http_parser_http_method_str(int); /*proto*/ -static CYTHON_INLINE PyObject *__pyx_f_7aiohttp_12_http_parser_find_header(PyObject *); /*proto*/ -static PyObject *__pyx_f_7aiohttp_12_http_parser__new_request_message(PyObject *, PyObject *, PyObject *, PyObject *, PyObject *, int, PyObject *, int, int, PyObject *); /*proto*/ -static PyObject *__pyx_f_7aiohttp_12_http_parser__new_response_message(PyObject *, int, PyObject *, PyObject *, PyObject *, int, PyObject *, int, int); /*proto*/ -static int __pyx_f_7aiohttp_12_http_parser_cb_on_message_begin(struct http_parser *); /*proto*/ -static int __pyx_f_7aiohttp_12_http_parser_cb_on_url(struct http_parser *, char const *, size_t); /*proto*/ -static int __pyx_f_7aiohttp_12_http_parser_cb_on_status(struct http_parser *, char const *, size_t); /*proto*/ -static int __pyx_f_7aiohttp_12_http_parser_cb_on_header_field(struct http_parser *, char const *, size_t); /*proto*/ -static int __pyx_f_7aiohttp_12_http_parser_cb_on_header_value(struct http_parser *, char const *, size_t); /*proto*/ -static int __pyx_f_7aiohttp_12_http_parser_cb_on_headers_complete(struct http_parser *); /*proto*/ -static int __pyx_f_7aiohttp_12_http_parser_cb_on_body(struct http_parser *, char const *, size_t); /*proto*/ -static int __pyx_f_7aiohttp_12_http_parser_cb_on_message_complete(struct http_parser *); /*proto*/ -static int __pyx_f_7aiohttp_12_http_parser_cb_on_chunk_header(struct http_parser *); /*proto*/ -static int __pyx_f_7aiohttp_12_http_parser_cb_on_chunk_complete(struct http_parser *); /*proto*/ -static PyObject *__pyx_f_7aiohttp_12_http_parser_parser_error_from_errno(enum http_errno); /*proto*/ -static PyObject *__pyx_f_7aiohttp_12_http_parser__parse_url(char *, size_t); /*proto*/ -static PyObject *__pyx_f_7aiohttp_12_http_parser___pyx_unpickle_RawRequestMessage__set_state(struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *, PyObject *); /*proto*/ -static PyObject *__pyx_f_7aiohttp_12_http_parser___pyx_unpickle_RawResponseMessage__set_state(struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *, PyObject *); /*proto*/ -#define __Pyx_MODULE_NAME "aiohttp._http_parser" -extern int __pyx_module_is_main_aiohttp___http_parser; -int __pyx_module_is_main_aiohttp___http_parser = 0; - -/* Implementation of 'aiohttp._http_parser' */ -static PyObject *__pyx_builtin_range; -static PyObject *__pyx_builtin_MemoryError; -static PyObject *__pyx_builtin_TypeError; -static PyObject *__pyx_builtin_BaseException; -static const char __pyx_k_[] = "="; -static const char __pyx_k_i[] = "i"; -static const char __pyx_k_TE[] = "TE"; -static const char __pyx_k__2[] = ", "; -static const char __pyx_k__3[] = ")>"; -static const char __pyx_k__4[] = ""; -static const char __pyx_k_br[] = "br"; -static const char __pyx_k_AGE[] = "AGE"; -static const char __pyx_k_URI[] = "URI"; -static const char __pyx_k_URL[] = "URL"; -static const char __pyx_k_VIA[] = "VIA"; -static const char __pyx_k__11[] = ":"; -static const char __pyx_k_add[] = "add"; -static const char __pyx_k_all[] = "__all__"; -static const char __pyx_k_new[] = "__new__"; -static const char __pyx_k_url[] = "url"; -static const char __pyx_k_DATE[] = "DATE"; -static const char __pyx_k_ETAG[] = "ETAG"; -static const char __pyx_k_FROM[] = "FROM"; -static const char __pyx_k_HOST[] = "HOST"; -static const char __pyx_k_LINK[] = "LINK"; -static const char __pyx_k_VARY[] = "VARY"; -static const char __pyx_k_args[] = "args"; -static const char __pyx_k_code[] = "code"; -static const char __pyx_k_dict[] = "__dict__"; -static const char __pyx_k_gzip[] = "gzip"; -static const char __pyx_k_hdrs[] = "hdrs"; -static const char __pyx_k_host[] = "host"; -static const char __pyx_k_loop[] = "loop"; -static const char __pyx_k_main[] = "__main__"; -static const char __pyx_k_name[] = "__name__"; -static const char __pyx_k_path[] = "path"; -static const char __pyx_k_port[] = "port"; -static const char __pyx_k_send[] = "send"; -static const char __pyx_k_test[] = "__test__"; -static const char __pyx_k_user[] = "user"; -static const char __pyx_k_yarl[] = "yarl"; -static const char __pyx_k_ALLOW[] = "ALLOW"; -static const char __pyx_k_RANGE[] = "RANGE"; -static const char __pyx_k_URL_2[] = "_URL"; -static const char __pyx_k_build[] = "build"; -static const char __pyx_k_close[] = "close"; -static const char __pyx_k_limit[] = "limit"; -static const char __pyx_k_lower[] = "lower"; -static const char __pyx_k_range[] = "range"; -static const char __pyx_k_throw[] = "throw"; -static const char __pyx_k_timer[] = "timer"; -static const char __pyx_k_ACCEPT[] = "ACCEPT"; -static const char __pyx_k_COOKIE[] = "COOKIE"; -static const char __pyx_k_DIGEST[] = "DIGEST"; -static const char __pyx_k_EXPECT[] = "EXPECT"; -static const char __pyx_k_ORIGIN[] = "ORIGIN"; -static const char __pyx_k_PRAGMA[] = "PRAGMA"; -static const char __pyx_k_SERVER[] = "SERVER"; -static const char __pyx_k_format[] = "format"; -static const char __pyx_k_import[] = "__import__"; -static const char __pyx_k_method[] = "method"; -static const char __pyx_k_pickle[] = "pickle"; -static const char __pyx_k_py_buf[] = "py_buf"; -static const char __pyx_k_reason[] = "reason"; -static const char __pyx_k_reduce[] = "__reduce__"; -static const char __pyx_k_scheme[] = "scheme"; -static const char __pyx_k_update[] = "update"; -static const char __pyx_k_EXPIRES[] = "EXPIRES"; -static const char __pyx_k_REFERER[] = "REFERER"; -static const char __pyx_k_TRAILER[] = "TRAILER"; -static const char __pyx_k_UPGRADE[] = "UPGRADE"; -static const char __pyx_k_WARNING[] = "WARNING"; -static const char __pyx_k_aiohttp[] = "aiohttp"; -static const char __pyx_k_chunked[] = "chunked"; -static const char __pyx_k_deflate[] = "deflate"; -static const char __pyx_k_encoded[] = "encoded"; -static const char __pyx_k_genexpr[] = "genexpr"; -static const char __pyx_k_headers[] = "headers"; -static const char __pyx_k_streams[] = "streams"; -static const char __pyx_k_unknown[] = ""; -static const char __pyx_k_upgrade[] = "upgrade"; -static const char __pyx_k_version[] = "version"; -static const char __pyx_k_IF_MATCH[] = "IF_MATCH"; -static const char __pyx_k_IF_RANGE[] = "IF_RANGE"; -static const char __pyx_k_LOCATION[] = "LOCATION"; -static const char __pyx_k_buf_data[] = "buf_data"; -static const char __pyx_k_feed_eof[] = "feed_eof"; -static const char __pyx_k_fragment[] = "fragment"; -static const char __pyx_k_getstate[] = "__getstate__"; -static const char __pyx_k_password[] = "password"; -static const char __pyx_k_protocol[] = "protocol"; -static const char __pyx_k_pyx_type[] = "__pyx_type"; -static const char __pyx_k_setstate[] = "__setstate__"; -static const char __pyx_k_FORWARDED[] = "FORWARDED"; -static const char __pyx_k_TypeError[] = "TypeError"; -static const char __pyx_k_feed_data[] = "feed_data"; -static const char __pyx_k_multidict[] = "multidict"; -static const char __pyx_k_parse_url[] = "parse_url"; -static const char __pyx_k_partition[] = "partition"; -static const char __pyx_k_pyx_state[] = "__pyx_state"; -static const char __pyx_k_reduce_ex[] = "__reduce_ex__"; -static const char __pyx_k_CONNECTION[] = "CONNECTION"; -static const char __pyx_k_KEEP_ALIVE[] = "KEEP_ALIVE"; -static const char __pyx_k_SET_COOKIE[] = "SET_COOKIE"; -static const char __pyx_k_USER_AGENT[] = "USER_AGENT"; -static const char __pyx_k_pyx_result[] = "__pyx_result"; -static const char __pyx_k_pyx_vtable[] = "__pyx_vtable__"; -static const char __pyx_k_CIMultiDict[] = "CIMultiDict"; -static const char __pyx_k_CONTENT_MD5[] = "CONTENT_MD5"; -static const char __pyx_k_DESTINATION[] = "DESTINATION"; -static const char __pyx_k_HttpVersion[] = "HttpVersion"; -static const char __pyx_k_LineTooLong[] = "LineTooLong"; -static const char __pyx_k_MemoryError[] = "MemoryError"; -static const char __pyx_k_PickleError[] = "PickleError"; -static const char __pyx_k_RETRY_AFTER[] = "RETRY_AFTER"; -static const char __pyx_k_WANT_DIGEST[] = "WANT_DIGEST"; -static const char __pyx_k_compression[] = "compression"; -static const char __pyx_k_http_parser[] = "http_parser"; -static const char __pyx_k_http_writer[] = "http_writer"; -static const char __pyx_k_max_headers[] = "max_headers"; -static const char __pyx_k_raw_headers[] = "raw_headers"; -static const char __pyx_k_CONTENT_TYPE[] = "CONTENT_TYPE"; -static const char __pyx_k_MAX_FORWARDS[] = "MAX_FORWARDS"; -static const char __pyx_k_StreamReader[] = "StreamReader"; -static const char __pyx_k_pyx_checksum[] = "__pyx_checksum"; -static const char __pyx_k_query_string[] = "query_string"; -static const char __pyx_k_should_close[] = "should_close"; -static const char __pyx_k_stringsource[] = "stringsource"; -static const char __pyx_k_ACCEPT_RANGES[] = "ACCEPT_RANGES"; -static const char __pyx_k_AUTHORIZATION[] = "AUTHORIZATION"; -static const char __pyx_k_BadStatusLine[] = "BadStatusLine"; -static const char __pyx_k_BaseException[] = "BaseException"; -static const char __pyx_k_CACHE_CONTROL[] = "CACHE_CONTROL"; -static const char __pyx_k_CIMultiDict_2[] = "_CIMultiDict"; -static const char __pyx_k_CONTENT_RANGE[] = "CONTENT_RANGE"; -static const char __pyx_k_DeflateBuffer[] = "DeflateBuffer"; -static const char __pyx_k_EMPTY_PAYLOAD[] = "EMPTY_PAYLOAD"; -static const char __pyx_k_HttpVersion10[] = "HttpVersion10"; -static const char __pyx_k_HttpVersion11[] = "HttpVersion11"; -static const char __pyx_k_HttpVersion_2[] = "_HttpVersion"; -static const char __pyx_k_IF_NONE_MATCH[] = "IF_NONE_MATCH"; -static const char __pyx_k_InvalidHeader[] = "InvalidHeader"; -static const char __pyx_k_LAST_EVENT_ID[] = "LAST_EVENT_ID"; -static const char __pyx_k_LAST_MODIFIED[] = "LAST_MODIFIED"; -static const char __pyx_k_invalid_url_r[] = "invalid url {!r}"; -static const char __pyx_k_max_line_size[] = "max_line_size"; -static const char __pyx_k_reduce_cython[] = "__reduce_cython__"; -static const char __pyx_k_set_exception[] = "set_exception"; -static const char __pyx_k_ACCEPT_CHARSET[] = "ACCEPT_CHARSET"; -static const char __pyx_k_BadHttpMessage[] = "BadHttpMessage"; -static const char __pyx_k_CONTENT_LENGTH[] = "CONTENT_LENGTH"; -static const char __pyx_k_StreamReader_2[] = "_StreamReader"; -static const char __pyx_k_max_field_size[] = "max_field_size"; -static const char __pyx_k_read_until_eof[] = "read_until_eof"; -static const char __pyx_k_ACCEPT_ENCODING[] = "ACCEPT_ENCODING"; -static const char __pyx_k_ACCEPT_LANGUAGE[] = "ACCEPT_LANGUAGE"; -static const char __pyx_k_DeflateBuffer_2[] = "_DeflateBuffer"; -static const char __pyx_k_EMPTY_PAYLOAD_2[] = "_EMPTY_PAYLOAD"; -static const char __pyx_k_HttpVersion10_2[] = "_HttpVersion10"; -static const char __pyx_k_HttpVersion11_2[] = "_HttpVersion11"; -static const char __pyx_k_InvalidURLError[] = "InvalidURLError"; -static const char __pyx_k_X_FORWARDED_FOR[] = "X_FORWARDED_FOR"; -static const char __pyx_k_auto_decompress[] = "auto_decompress"; -static const char __pyx_k_http_exceptions[] = "http_exceptions"; -static const char __pyx_k_pyx_PickleError[] = "__pyx_PickleError"; -static const char __pyx_k_setstate_cython[] = "__setstate_cython__"; -static const char __pyx_k_CIMultiDictProxy[] = "CIMultiDictProxy"; -static const char __pyx_k_CONTENT_ENCODING[] = "CONTENT_ENCODING"; -static const char __pyx_k_CONTENT_LANGUAGE[] = "CONTENT_LANGUAGE"; -static const char __pyx_k_CONTENT_LOCATION[] = "CONTENT_LOCATION"; -static const char __pyx_k_WWW_AUTHENTICATE[] = "WWW_AUTHENTICATE"; -static const char __pyx_k_X_FORWARDED_HOST[] = "X_FORWARDED_HOST"; -static const char __pyx_k_HttpRequestParser[] = "HttpRequestParser"; -static const char __pyx_k_IF_MODIFIED_SINCE[] = "IF_MODIFIED_SINCE"; -static const char __pyx_k_RawRequestMessage[] = "_http_method[i] - */ - -static CYTHON_INLINE PyObject *__pyx_f_7aiohttp_12_http_parser_http_method_str(int __pyx_v_i) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - PyObject *__pyx_t_2 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("http_method_str", 0); - - /* "aiohttp/_http_parser.pyx":93 - * - * cdef inline str http_method_str(int i): - * if i < METHODS_COUNT: # <<<<<<<<<<<<<< - * return _http_method[i] - * else: - */ - __pyx_t_1 = ((__pyx_v_i < 34) != 0); - if (__pyx_t_1) { - - /* "aiohttp/_http_parser.pyx":94 - * cdef inline str http_method_str(int i): - * if i < METHODS_COUNT: - * return _http_method[i] # <<<<<<<<<<<<<< - * else: - * return "" - */ - __Pyx_XDECREF(__pyx_r); - if (unlikely(__pyx_v_7aiohttp_12_http_parser__http_method == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(0, 94, __pyx_L1_error) - } - __pyx_t_2 = __Pyx_GetItemInt_List(__pyx_v_7aiohttp_12_http_parser__http_method, __pyx_v_i, int, 1, __Pyx_PyInt_From_int, 1, 1, 1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 94, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(((PyObject*)__pyx_t_2)); - __pyx_r = ((PyObject*)__pyx_t_2); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - goto __pyx_L0; - - /* "aiohttp/_http_parser.pyx":93 - * - * cdef inline str http_method_str(int i): - * if i < METHODS_COUNT: # <<<<<<<<<<<<<< - * return _http_method[i] - * else: - */ - } - - /* "aiohttp/_http_parser.pyx":96 - * return _http_method[i] - * else: - * return "" # <<<<<<<<<<<<<< - * - * cdef inline object find_header(bytes raw_header): - */ - /*else*/ { - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_kp_u_unknown); - __pyx_r = __pyx_kp_u_unknown; - goto __pyx_L0; - } - - /* "aiohttp/_http_parser.pyx":92 - * - * - * cdef inline str http_method_str(int i): # <<<<<<<<<<<<<< - * if i < METHODS_COUNT: - * return _http_method[i] - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_AddTraceback("aiohttp._http_parser.http_method_str", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":98 - * return "" - * - * cdef inline object find_header(bytes raw_header): # <<<<<<<<<<<<<< - * cdef Py_ssize_t size - * cdef char *buf - */ - -static CYTHON_INLINE PyObject *__pyx_f_7aiohttp_12_http_parser_find_header(PyObject *__pyx_v_raw_header) { - Py_ssize_t __pyx_v_size; - char *__pyx_v_buf; - int __pyx_v_idx; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("find_header", 0); - - /* "aiohttp/_http_parser.pyx":102 - * cdef char *buf - * cdef int idx - * PyBytes_AsStringAndSize(raw_header, &buf, &size) # <<<<<<<<<<<<<< - * idx = _find_header.find_header(buf, size) - * if idx == -1: - */ - __pyx_t_1 = PyBytes_AsStringAndSize(__pyx_v_raw_header, (&__pyx_v_buf), (&__pyx_v_size)); if (unlikely(__pyx_t_1 == ((int)-1))) __PYX_ERR(0, 102, __pyx_L1_error) - - /* "aiohttp/_http_parser.pyx":103 - * cdef int idx - * PyBytes_AsStringAndSize(raw_header, &buf, &size) - * idx = _find_header.find_header(buf, size) # <<<<<<<<<<<<<< - * if idx == -1: - * return raw_header.decode('utf-8', 'surrogateescape') - */ - __pyx_v_idx = find_header(__pyx_v_buf, __pyx_v_size); - - /* "aiohttp/_http_parser.pyx":104 - * PyBytes_AsStringAndSize(raw_header, &buf, &size) - * idx = _find_header.find_header(buf, size) - * if idx == -1: # <<<<<<<<<<<<<< - * return raw_header.decode('utf-8', 'surrogateescape') - * return headers[idx] - */ - __pyx_t_2 = ((__pyx_v_idx == -1L) != 0); - if (__pyx_t_2) { - - /* "aiohttp/_http_parser.pyx":105 - * idx = _find_header.find_header(buf, size) - * if idx == -1: - * return raw_header.decode('utf-8', 'surrogateescape') # <<<<<<<<<<<<<< - * return headers[idx] - * - */ - __Pyx_XDECREF(__pyx_r); - if (unlikely(__pyx_v_raw_header == Py_None)) { - PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "decode"); - __PYX_ERR(0, 105, __pyx_L1_error) - } - __pyx_t_3 = __Pyx_decode_bytes(__pyx_v_raw_header, 0, PY_SSIZE_T_MAX, NULL, ((char const *)"surrogateescape"), PyUnicode_DecodeUTF8); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 105, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_r = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L0; - - /* "aiohttp/_http_parser.pyx":104 - * PyBytes_AsStringAndSize(raw_header, &buf, &size) - * idx = _find_header.find_header(buf, size) - * if idx == -1: # <<<<<<<<<<<<<< - * return raw_header.decode('utf-8', 'surrogateescape') - * return headers[idx] - */ - } - - /* "aiohttp/_http_parser.pyx":106 - * if idx == -1: - * return raw_header.decode('utf-8', 'surrogateescape') - * return headers[idx] # <<<<<<<<<<<<<< - * - * - */ - __Pyx_XDECREF(__pyx_r); - if (unlikely(__pyx_v_7aiohttp_12_http_parser_headers == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(0, 106, __pyx_L1_error) - } - __pyx_t_3 = __Pyx_GetItemInt_Tuple(__pyx_v_7aiohttp_12_http_parser_headers, __pyx_v_idx, int, 1, __Pyx_PyInt_From_int, 0, 1, 1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 106, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_r = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L0; - - /* "aiohttp/_http_parser.pyx":98 - * return "" - * - * cdef inline object find_header(bytes raw_header): # <<<<<<<<<<<<<< - * cdef Py_ssize_t size - * cdef char *buf - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("aiohttp._http_parser.find_header", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":122 - * cdef readonly object url # yarl.URL - * - * def __init__(self, method, path, version, headers, raw_headers, # <<<<<<<<<<<<<< - * should_close, compression, upgrade, chunked, url): - * self.method = method - */ - -/* Python wrapper */ -static int __pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ -static int __pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { - PyObject *__pyx_v_method = 0; - PyObject *__pyx_v_path = 0; - PyObject *__pyx_v_version = 0; - PyObject *__pyx_v_headers = 0; - PyObject *__pyx_v_raw_headers = 0; - PyObject *__pyx_v_should_close = 0; - PyObject *__pyx_v_compression = 0; - PyObject *__pyx_v_upgrade = 0; - PyObject *__pyx_v_chunked = 0; - PyObject *__pyx_v_url = 0; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - int __pyx_r; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__init__ (wrapper)", 0); - { - static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_method,&__pyx_n_s_path,&__pyx_n_s_version,&__pyx_n_s_headers,&__pyx_n_s_raw_headers,&__pyx_n_s_should_close,&__pyx_n_s_compression,&__pyx_n_s_upgrade,&__pyx_n_s_chunked,&__pyx_n_s_url,0}; - PyObject* values[10] = {0,0,0,0,0,0,0,0,0,0}; - if (unlikely(__pyx_kwds)) { - Py_ssize_t kw_args; - const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); - switch (pos_args) { - case 10: values[9] = PyTuple_GET_ITEM(__pyx_args, 9); - CYTHON_FALLTHROUGH; - case 9: values[8] = PyTuple_GET_ITEM(__pyx_args, 8); - CYTHON_FALLTHROUGH; - case 8: values[7] = PyTuple_GET_ITEM(__pyx_args, 7); - CYTHON_FALLTHROUGH; - case 7: values[6] = PyTuple_GET_ITEM(__pyx_args, 6); - CYTHON_FALLTHROUGH; - case 6: values[5] = PyTuple_GET_ITEM(__pyx_args, 5); - CYTHON_FALLTHROUGH; - case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4); - CYTHON_FALLTHROUGH; - case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); - CYTHON_FALLTHROUGH; - case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); - CYTHON_FALLTHROUGH; - case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); - CYTHON_FALLTHROUGH; - case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = PyDict_Size(__pyx_kwds); - switch (pos_args) { - case 0: - if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_method)) != 0)) kw_args--; - else goto __pyx_L5_argtuple_error; - CYTHON_FALLTHROUGH; - case 1: - if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_path)) != 0)) kw_args--; - else { - __Pyx_RaiseArgtupleInvalid("__init__", 1, 10, 10, 1); __PYX_ERR(0, 122, __pyx_L3_error) - } - CYTHON_FALLTHROUGH; - case 2: - if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_version)) != 0)) kw_args--; - else { - __Pyx_RaiseArgtupleInvalid("__init__", 1, 10, 10, 2); __PYX_ERR(0, 122, __pyx_L3_error) - } - CYTHON_FALLTHROUGH; - case 3: - if (likely((values[3] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_headers)) != 0)) kw_args--; - else { - __Pyx_RaiseArgtupleInvalid("__init__", 1, 10, 10, 3); __PYX_ERR(0, 122, __pyx_L3_error) - } - CYTHON_FALLTHROUGH; - case 4: - if (likely((values[4] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_raw_headers)) != 0)) kw_args--; - else { - __Pyx_RaiseArgtupleInvalid("__init__", 1, 10, 10, 4); __PYX_ERR(0, 122, __pyx_L3_error) - } - CYTHON_FALLTHROUGH; - case 5: - if (likely((values[5] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_should_close)) != 0)) kw_args--; - else { - __Pyx_RaiseArgtupleInvalid("__init__", 1, 10, 10, 5); __PYX_ERR(0, 122, __pyx_L3_error) - } - CYTHON_FALLTHROUGH; - case 6: - if (likely((values[6] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_compression)) != 0)) kw_args--; - else { - __Pyx_RaiseArgtupleInvalid("__init__", 1, 10, 10, 6); __PYX_ERR(0, 122, __pyx_L3_error) - } - CYTHON_FALLTHROUGH; - case 7: - if (likely((values[7] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_upgrade)) != 0)) kw_args--; - else { - __Pyx_RaiseArgtupleInvalid("__init__", 1, 10, 10, 7); __PYX_ERR(0, 122, __pyx_L3_error) - } - CYTHON_FALLTHROUGH; - case 8: - if (likely((values[8] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_chunked)) != 0)) kw_args--; - else { - __Pyx_RaiseArgtupleInvalid("__init__", 1, 10, 10, 8); __PYX_ERR(0, 122, __pyx_L3_error) - } - CYTHON_FALLTHROUGH; - case 9: - if (likely((values[9] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_url)) != 0)) kw_args--; - else { - __Pyx_RaiseArgtupleInvalid("__init__", 1, 10, 10, 9); __PYX_ERR(0, 122, __pyx_L3_error) - } - } - if (unlikely(kw_args > 0)) { - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__init__") < 0)) __PYX_ERR(0, 122, __pyx_L3_error) - } - } else if (PyTuple_GET_SIZE(__pyx_args) != 10) { - goto __pyx_L5_argtuple_error; - } else { - values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - values[1] = PyTuple_GET_ITEM(__pyx_args, 1); - values[2] = PyTuple_GET_ITEM(__pyx_args, 2); - values[3] = PyTuple_GET_ITEM(__pyx_args, 3); - values[4] = PyTuple_GET_ITEM(__pyx_args, 4); - values[5] = PyTuple_GET_ITEM(__pyx_args, 5); - values[6] = PyTuple_GET_ITEM(__pyx_args, 6); - values[7] = PyTuple_GET_ITEM(__pyx_args, 7); - values[8] = PyTuple_GET_ITEM(__pyx_args, 8); - values[9] = PyTuple_GET_ITEM(__pyx_args, 9); - } - __pyx_v_method = values[0]; - __pyx_v_path = values[1]; - __pyx_v_version = values[2]; - __pyx_v_headers = values[3]; - __pyx_v_raw_headers = values[4]; - __pyx_v_should_close = values[5]; - __pyx_v_compression = values[6]; - __pyx_v_upgrade = values[7]; - __pyx_v_chunked = values[8]; - __pyx_v_url = values[9]; - } - goto __pyx_L4_argument_unpacking_done; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("__init__", 1, 10, 10, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 122, __pyx_L3_error) - __pyx_L3_error:; - __Pyx_AddTraceback("aiohttp._http_parser.RawRequestMessage.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return -1; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage___init__(((struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *)__pyx_v_self), __pyx_v_method, __pyx_v_path, __pyx_v_version, __pyx_v_headers, __pyx_v_raw_headers, __pyx_v_should_close, __pyx_v_compression, __pyx_v_upgrade, __pyx_v_chunked, __pyx_v_url); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static int __pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage___init__(struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *__pyx_v_self, PyObject *__pyx_v_method, PyObject *__pyx_v_path, PyObject *__pyx_v_version, PyObject *__pyx_v_headers, PyObject *__pyx_v_raw_headers, PyObject *__pyx_v_should_close, PyObject *__pyx_v_compression, PyObject *__pyx_v_upgrade, PyObject *__pyx_v_chunked, PyObject *__pyx_v_url) { - int __pyx_r; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__init__", 0); - - /* "aiohttp/_http_parser.pyx":124 - * def __init__(self, method, path, version, headers, raw_headers, - * should_close, compression, upgrade, chunked, url): - * self.method = method # <<<<<<<<<<<<<< - * self.path = path - * self.version = version - */ - if (!(likely(PyUnicode_CheckExact(__pyx_v_method))||((__pyx_v_method) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "unicode", Py_TYPE(__pyx_v_method)->tp_name), 0))) __PYX_ERR(0, 124, __pyx_L1_error) - __pyx_t_1 = __pyx_v_method; - __Pyx_INCREF(__pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_v_self->method); - __Pyx_DECREF(__pyx_v_self->method); - __pyx_v_self->method = ((PyObject*)__pyx_t_1); - __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":125 - * should_close, compression, upgrade, chunked, url): - * self.method = method - * self.path = path # <<<<<<<<<<<<<< - * self.version = version - * self.headers = headers - */ - if (!(likely(PyUnicode_CheckExact(__pyx_v_path))||((__pyx_v_path) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "unicode", Py_TYPE(__pyx_v_path)->tp_name), 0))) __PYX_ERR(0, 125, __pyx_L1_error) - __pyx_t_1 = __pyx_v_path; - __Pyx_INCREF(__pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_v_self->path); - __Pyx_DECREF(__pyx_v_self->path); - __pyx_v_self->path = ((PyObject*)__pyx_t_1); - __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":126 - * self.method = method - * self.path = path - * self.version = version # <<<<<<<<<<<<<< - * self.headers = headers - * self.raw_headers = raw_headers - */ - __Pyx_INCREF(__pyx_v_version); - __Pyx_GIVEREF(__pyx_v_version); - __Pyx_GOTREF(__pyx_v_self->version); - __Pyx_DECREF(__pyx_v_self->version); - __pyx_v_self->version = __pyx_v_version; - - /* "aiohttp/_http_parser.pyx":127 - * self.path = path - * self.version = version - * self.headers = headers # <<<<<<<<<<<<<< - * self.raw_headers = raw_headers - * self.should_close = should_close - */ - __Pyx_INCREF(__pyx_v_headers); - __Pyx_GIVEREF(__pyx_v_headers); - __Pyx_GOTREF(__pyx_v_self->headers); - __Pyx_DECREF(__pyx_v_self->headers); - __pyx_v_self->headers = __pyx_v_headers; - - /* "aiohttp/_http_parser.pyx":128 - * self.version = version - * self.headers = headers - * self.raw_headers = raw_headers # <<<<<<<<<<<<<< - * self.should_close = should_close - * self.compression = compression - */ - __Pyx_INCREF(__pyx_v_raw_headers); - __Pyx_GIVEREF(__pyx_v_raw_headers); - __Pyx_GOTREF(__pyx_v_self->raw_headers); - __Pyx_DECREF(__pyx_v_self->raw_headers); - __pyx_v_self->raw_headers = __pyx_v_raw_headers; - - /* "aiohttp/_http_parser.pyx":129 - * self.headers = headers - * self.raw_headers = raw_headers - * self.should_close = should_close # <<<<<<<<<<<<<< - * self.compression = compression - * self.upgrade = upgrade - */ - __Pyx_INCREF(__pyx_v_should_close); - __Pyx_GIVEREF(__pyx_v_should_close); - __Pyx_GOTREF(__pyx_v_self->should_close); - __Pyx_DECREF(__pyx_v_self->should_close); - __pyx_v_self->should_close = __pyx_v_should_close; - - /* "aiohttp/_http_parser.pyx":130 - * self.raw_headers = raw_headers - * self.should_close = should_close - * self.compression = compression # <<<<<<<<<<<<<< - * self.upgrade = upgrade - * self.chunked = chunked - */ - __Pyx_INCREF(__pyx_v_compression); - __Pyx_GIVEREF(__pyx_v_compression); - __Pyx_GOTREF(__pyx_v_self->compression); - __Pyx_DECREF(__pyx_v_self->compression); - __pyx_v_self->compression = __pyx_v_compression; - - /* "aiohttp/_http_parser.pyx":131 - * self.should_close = should_close - * self.compression = compression - * self.upgrade = upgrade # <<<<<<<<<<<<<< - * self.chunked = chunked - * self.url = url - */ - __Pyx_INCREF(__pyx_v_upgrade); - __Pyx_GIVEREF(__pyx_v_upgrade); - __Pyx_GOTREF(__pyx_v_self->upgrade); - __Pyx_DECREF(__pyx_v_self->upgrade); - __pyx_v_self->upgrade = __pyx_v_upgrade; - - /* "aiohttp/_http_parser.pyx":132 - * self.compression = compression - * self.upgrade = upgrade - * self.chunked = chunked # <<<<<<<<<<<<<< - * self.url = url - * - */ - __Pyx_INCREF(__pyx_v_chunked); - __Pyx_GIVEREF(__pyx_v_chunked); - __Pyx_GOTREF(__pyx_v_self->chunked); - __Pyx_DECREF(__pyx_v_self->chunked); - __pyx_v_self->chunked = __pyx_v_chunked; - - /* "aiohttp/_http_parser.pyx":133 - * self.upgrade = upgrade - * self.chunked = chunked - * self.url = url # <<<<<<<<<<<<<< - * - * def __repr__(self): - */ - __Pyx_INCREF(__pyx_v_url); - __Pyx_GIVEREF(__pyx_v_url); - __Pyx_GOTREF(__pyx_v_self->url); - __Pyx_DECREF(__pyx_v_self->url); - __pyx_v_self->url = __pyx_v_url; - - /* "aiohttp/_http_parser.pyx":122 - * cdef readonly object url # yarl.URL - * - * def __init__(self, method, path, version, headers, raw_headers, # <<<<<<<<<<<<<< - * should_close, compression, upgrade, chunked, url): - * self.method = method - */ - - /* function exit code */ - __pyx_r = 0; - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("aiohttp._http_parser.RawRequestMessage.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = -1; - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":135 - * self.url = url - * - * def __repr__(self): # <<<<<<<<<<<<<< - * info = [] - * info.append(("method", self.method)) - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_3__repr__(PyObject *__pyx_v_self); /*proto*/ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_3__repr__(PyObject *__pyx_v_self) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__repr__ (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_2__repr__(((struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} -static PyObject *__pyx_gb_7aiohttp_12_http_parser_17RawRequestMessage_8__repr___2generator(__pyx_CoroutineObject *__pyx_generator, CYTHON_UNUSED PyThreadState *__pyx_tstate, PyObject *__pyx_sent_value); /* proto */ - -/* "aiohttp/_http_parser.pyx":147 - * info.append(("chunked", self.chunked)) - * info.append(("url", self.url)) - * sinfo = ', '.join(name + '=' + repr(val) for name, val in info) # <<<<<<<<<<<<<< - * return '' - * - */ - -static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_8__repr___genexpr(PyObject *__pyx_self) { - struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr *__pyx_cur_scope; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("genexpr", 0); - __pyx_cur_scope = (struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr *)__pyx_tp_new_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr(__pyx_ptype_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr, __pyx_empty_tuple, NULL); - if (unlikely(!__pyx_cur_scope)) { - __pyx_cur_scope = ((struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr *)Py_None); - __Pyx_INCREF(Py_None); - __PYX_ERR(0, 147, __pyx_L1_error) - } else { - __Pyx_GOTREF(__pyx_cur_scope); - } - __pyx_cur_scope->__pyx_outer_scope = (struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct____repr__ *) __pyx_self; - __Pyx_INCREF(((PyObject *)__pyx_cur_scope->__pyx_outer_scope)); - __Pyx_GIVEREF(__pyx_cur_scope->__pyx_outer_scope); - { - __pyx_CoroutineObject *gen = __Pyx_Generator_New((__pyx_coroutine_body_t) __pyx_gb_7aiohttp_12_http_parser_17RawRequestMessage_8__repr___2generator, NULL, (PyObject *) __pyx_cur_scope, __pyx_n_s_genexpr, __pyx_n_s_repr___locals_genexpr, __pyx_n_s_aiohttp__http_parser); if (unlikely(!gen)) __PYX_ERR(0, 147, __pyx_L1_error) - __Pyx_DECREF(__pyx_cur_scope); - __Pyx_RefNannyFinishContext(); - return (PyObject *) gen; - } - - /* function exit code */ - __pyx_L1_error:; - __Pyx_AddTraceback("aiohttp._http_parser.RawRequestMessage.__repr__.genexpr", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __Pyx_DECREF(((PyObject *)__pyx_cur_scope)); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_gb_7aiohttp_12_http_parser_17RawRequestMessage_8__repr___2generator(__pyx_CoroutineObject *__pyx_generator, CYTHON_UNUSED PyThreadState *__pyx_tstate, PyObject *__pyx_sent_value) /* generator body */ -{ - struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr *__pyx_cur_scope = ((struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr *)__pyx_generator->closure); - PyObject *__pyx_r = NULL; - PyObject *__pyx_t_1 = NULL; - Py_ssize_t __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - PyObject *__pyx_t_5 = NULL; - PyObject *__pyx_t_6 = NULL; - PyObject *(*__pyx_t_7)(PyObject *); - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("genexpr", 0); - switch (__pyx_generator->resume_label) { - case 0: goto __pyx_L3_first_run; - default: /* CPython raises the right error here */ - __Pyx_RefNannyFinishContext(); - return NULL; - } - __pyx_L3_first_run:; - if (unlikely(!__pyx_sent_value)) __PYX_ERR(0, 147, __pyx_L1_error) - __pyx_r = PyList_New(0); if (unlikely(!__pyx_r)) __PYX_ERR(0, 147, __pyx_L1_error) - __Pyx_GOTREF(__pyx_r); - if (unlikely(!__pyx_cur_scope->__pyx_outer_scope->__pyx_v_info)) { __Pyx_RaiseClosureNameError("info"); __PYX_ERR(0, 147, __pyx_L1_error) } - if (unlikely(__pyx_cur_scope->__pyx_outer_scope->__pyx_v_info == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); - __PYX_ERR(0, 147, __pyx_L1_error) - } - __pyx_t_1 = __pyx_cur_scope->__pyx_outer_scope->__pyx_v_info; __Pyx_INCREF(__pyx_t_1); __pyx_t_2 = 0; - for (;;) { - if (__pyx_t_2 >= PyList_GET_SIZE(__pyx_t_1)) break; - #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_3 = PyList_GET_ITEM(__pyx_t_1, __pyx_t_2); __Pyx_INCREF(__pyx_t_3); __pyx_t_2++; if (unlikely(0 < 0)) __PYX_ERR(0, 147, __pyx_L1_error) - #else - __pyx_t_3 = PySequence_ITEM(__pyx_t_1, __pyx_t_2); __pyx_t_2++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 147, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - #endif - if ((likely(PyTuple_CheckExact(__pyx_t_3))) || (PyList_CheckExact(__pyx_t_3))) { - PyObject* sequence = __pyx_t_3; - Py_ssize_t size = __Pyx_PySequence_SIZE(sequence); - if (unlikely(size != 2)) { - if (size > 2) __Pyx_RaiseTooManyValuesError(2); - else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); - __PYX_ERR(0, 147, __pyx_L1_error) - } - #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - if (likely(PyTuple_CheckExact(sequence))) { - __pyx_t_4 = PyTuple_GET_ITEM(sequence, 0); - __pyx_t_5 = PyTuple_GET_ITEM(sequence, 1); - } else { - __pyx_t_4 = PyList_GET_ITEM(sequence, 0); - __pyx_t_5 = PyList_GET_ITEM(sequence, 1); - } - __Pyx_INCREF(__pyx_t_4); - __Pyx_INCREF(__pyx_t_5); - #else - __pyx_t_4 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 147, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_5 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 147, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_5); - #endif - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - } else { - Py_ssize_t index = -1; - __pyx_t_6 = PyObject_GetIter(__pyx_t_3); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 147, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_6); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_7 = Py_TYPE(__pyx_t_6)->tp_iternext; - index = 0; __pyx_t_4 = __pyx_t_7(__pyx_t_6); if (unlikely(!__pyx_t_4)) goto __pyx_L6_unpacking_failed; - __Pyx_GOTREF(__pyx_t_4); - index = 1; __pyx_t_5 = __pyx_t_7(__pyx_t_6); if (unlikely(!__pyx_t_5)) goto __pyx_L6_unpacking_failed; - __Pyx_GOTREF(__pyx_t_5); - if (__Pyx_IternextUnpackEndCheck(__pyx_t_7(__pyx_t_6), 2) < 0) __PYX_ERR(0, 147, __pyx_L1_error) - __pyx_t_7 = NULL; - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - goto __pyx_L7_unpacking_done; - __pyx_L6_unpacking_failed:; - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - __pyx_t_7 = NULL; - if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index); - __PYX_ERR(0, 147, __pyx_L1_error) - __pyx_L7_unpacking_done:; - } - __Pyx_XGOTREF(__pyx_cur_scope->__pyx_v_name); - __Pyx_XDECREF_SET(__pyx_cur_scope->__pyx_v_name, __pyx_t_4); - __Pyx_GIVEREF(__pyx_t_4); - __pyx_t_4 = 0; - __Pyx_XGOTREF(__pyx_cur_scope->__pyx_v_val); - __Pyx_XDECREF_SET(__pyx_cur_scope->__pyx_v_val, __pyx_t_5); - __Pyx_GIVEREF(__pyx_t_5); - __pyx_t_5 = 0; - __pyx_t_3 = PyNumber_Add(__pyx_cur_scope->__pyx_v_name, __pyx_kp_u_); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 147, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_5 = PyObject_Repr(__pyx_cur_scope->__pyx_v_val); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 147, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_5); - __pyx_t_4 = PyNumber_Add(__pyx_t_3, __pyx_t_5); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 147, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - if (unlikely(__Pyx_ListComp_Append(__pyx_r, (PyObject*)__pyx_t_4))) __PYX_ERR(0, 147, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - } - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - CYTHON_MAYBE_UNUSED_VAR(__pyx_cur_scope); - - /* function exit code */ - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_r); __pyx_r = 0; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_XDECREF(__pyx_t_5); - __Pyx_XDECREF(__pyx_t_6); - __Pyx_AddTraceback("genexpr", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - #if !CYTHON_USE_EXC_INFO_STACK - __Pyx_Coroutine_ResetAndClearException(__pyx_generator); - #endif - __pyx_generator->resume_label = -1; - __Pyx_Coroutine_clear((PyObject*)__pyx_generator); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":135 - * self.url = url - * - * def __repr__(self): # <<<<<<<<<<<<<< - * info = [] - * info.append(("method", self.method)) - */ - -static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_2__repr__(struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *__pyx_v_self) { - struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct____repr__ *__pyx_cur_scope; - PyObject *__pyx_v_sinfo = NULL; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__repr__", 0); - __pyx_cur_scope = (struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct____repr__ *)__pyx_tp_new_7aiohttp_12_http_parser___pyx_scope_struct____repr__(__pyx_ptype_7aiohttp_12_http_parser___pyx_scope_struct____repr__, __pyx_empty_tuple, NULL); - if (unlikely(!__pyx_cur_scope)) { - __pyx_cur_scope = ((struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct____repr__ *)Py_None); - __Pyx_INCREF(Py_None); - __PYX_ERR(0, 135, __pyx_L1_error) - } else { - __Pyx_GOTREF(__pyx_cur_scope); - } - - /* "aiohttp/_http_parser.pyx":136 - * - * def __repr__(self): - * info = [] # <<<<<<<<<<<<<< - * info.append(("method", self.method)) - * info.append(("path", self.path)) - */ - __pyx_t_1 = PyList_New(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 136, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - __pyx_cur_scope->__pyx_v_info = ((PyObject*)__pyx_t_1); - __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":137 - * def __repr__(self): - * info = [] - * info.append(("method", self.method)) # <<<<<<<<<<<<<< - * info.append(("path", self.path)) - * info.append(("version", self.version)) - */ - __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 137, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF(__pyx_n_u_method); - __Pyx_GIVEREF(__pyx_n_u_method); - PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_n_u_method); - __Pyx_INCREF(__pyx_v_self->method); - __Pyx_GIVEREF(__pyx_v_self->method); - PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_self->method); - __pyx_t_2 = __Pyx_PyList_Append(__pyx_cur_scope->__pyx_v_info, __pyx_t_1); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 137, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":138 - * info = [] - * info.append(("method", self.method)) - * info.append(("path", self.path)) # <<<<<<<<<<<<<< - * info.append(("version", self.version)) - * info.append(("headers", self.headers)) - */ - __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 138, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF(__pyx_n_u_path); - __Pyx_GIVEREF(__pyx_n_u_path); - PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_n_u_path); - __Pyx_INCREF(__pyx_v_self->path); - __Pyx_GIVEREF(__pyx_v_self->path); - PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_self->path); - __pyx_t_2 = __Pyx_PyList_Append(__pyx_cur_scope->__pyx_v_info, __pyx_t_1); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 138, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":139 - * info.append(("method", self.method)) - * info.append(("path", self.path)) - * info.append(("version", self.version)) # <<<<<<<<<<<<<< - * info.append(("headers", self.headers)) - * info.append(("raw_headers", self.raw_headers)) - */ - __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 139, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF(__pyx_n_u_version); - __Pyx_GIVEREF(__pyx_n_u_version); - PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_n_u_version); - __Pyx_INCREF(__pyx_v_self->version); - __Pyx_GIVEREF(__pyx_v_self->version); - PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_self->version); - __pyx_t_2 = __Pyx_PyList_Append(__pyx_cur_scope->__pyx_v_info, __pyx_t_1); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 139, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":140 - * info.append(("path", self.path)) - * info.append(("version", self.version)) - * info.append(("headers", self.headers)) # <<<<<<<<<<<<<< - * info.append(("raw_headers", self.raw_headers)) - * info.append(("should_close", self.should_close)) - */ - __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 140, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF(__pyx_n_u_headers); - __Pyx_GIVEREF(__pyx_n_u_headers); - PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_n_u_headers); - __Pyx_INCREF(__pyx_v_self->headers); - __Pyx_GIVEREF(__pyx_v_self->headers); - PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_self->headers); - __pyx_t_2 = __Pyx_PyList_Append(__pyx_cur_scope->__pyx_v_info, __pyx_t_1); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 140, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":141 - * info.append(("version", self.version)) - * info.append(("headers", self.headers)) - * info.append(("raw_headers", self.raw_headers)) # <<<<<<<<<<<<<< - * info.append(("should_close", self.should_close)) - * info.append(("compression", self.compression)) - */ - __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 141, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF(__pyx_n_u_raw_headers); - __Pyx_GIVEREF(__pyx_n_u_raw_headers); - PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_n_u_raw_headers); - __Pyx_INCREF(__pyx_v_self->raw_headers); - __Pyx_GIVEREF(__pyx_v_self->raw_headers); - PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_self->raw_headers); - __pyx_t_2 = __Pyx_PyList_Append(__pyx_cur_scope->__pyx_v_info, __pyx_t_1); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 141, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":142 - * info.append(("headers", self.headers)) - * info.append(("raw_headers", self.raw_headers)) - * info.append(("should_close", self.should_close)) # <<<<<<<<<<<<<< - * info.append(("compression", self.compression)) - * info.append(("upgrade", self.upgrade)) - */ - __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 142, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF(__pyx_n_u_should_close); - __Pyx_GIVEREF(__pyx_n_u_should_close); - PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_n_u_should_close); - __Pyx_INCREF(__pyx_v_self->should_close); - __Pyx_GIVEREF(__pyx_v_self->should_close); - PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_self->should_close); - __pyx_t_2 = __Pyx_PyList_Append(__pyx_cur_scope->__pyx_v_info, __pyx_t_1); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 142, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":143 - * info.append(("raw_headers", self.raw_headers)) - * info.append(("should_close", self.should_close)) - * info.append(("compression", self.compression)) # <<<<<<<<<<<<<< - * info.append(("upgrade", self.upgrade)) - * info.append(("chunked", self.chunked)) - */ - __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 143, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF(__pyx_n_u_compression); - __Pyx_GIVEREF(__pyx_n_u_compression); - PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_n_u_compression); - __Pyx_INCREF(__pyx_v_self->compression); - __Pyx_GIVEREF(__pyx_v_self->compression); - PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_self->compression); - __pyx_t_2 = __Pyx_PyList_Append(__pyx_cur_scope->__pyx_v_info, __pyx_t_1); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 143, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":144 - * info.append(("should_close", self.should_close)) - * info.append(("compression", self.compression)) - * info.append(("upgrade", self.upgrade)) # <<<<<<<<<<<<<< - * info.append(("chunked", self.chunked)) - * info.append(("url", self.url)) - */ - __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 144, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF(__pyx_n_u_upgrade); - __Pyx_GIVEREF(__pyx_n_u_upgrade); - PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_n_u_upgrade); - __Pyx_INCREF(__pyx_v_self->upgrade); - __Pyx_GIVEREF(__pyx_v_self->upgrade); - PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_self->upgrade); - __pyx_t_2 = __Pyx_PyList_Append(__pyx_cur_scope->__pyx_v_info, __pyx_t_1); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 144, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":145 - * info.append(("compression", self.compression)) - * info.append(("upgrade", self.upgrade)) - * info.append(("chunked", self.chunked)) # <<<<<<<<<<<<<< - * info.append(("url", self.url)) - * sinfo = ', '.join(name + '=' + repr(val) for name, val in info) - */ - __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 145, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF(__pyx_n_u_chunked); - __Pyx_GIVEREF(__pyx_n_u_chunked); - PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_n_u_chunked); - __Pyx_INCREF(__pyx_v_self->chunked); - __Pyx_GIVEREF(__pyx_v_self->chunked); - PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_self->chunked); - __pyx_t_2 = __Pyx_PyList_Append(__pyx_cur_scope->__pyx_v_info, __pyx_t_1); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 145, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":146 - * info.append(("upgrade", self.upgrade)) - * info.append(("chunked", self.chunked)) - * info.append(("url", self.url)) # <<<<<<<<<<<<<< - * sinfo = ', '.join(name + '=' + repr(val) for name, val in info) - * return '' - */ - __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 146, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF(__pyx_n_u_url); - __Pyx_GIVEREF(__pyx_n_u_url); - PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_n_u_url); - __Pyx_INCREF(__pyx_v_self->url); - __Pyx_GIVEREF(__pyx_v_self->url); - PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_self->url); - __pyx_t_2 = __Pyx_PyList_Append(__pyx_cur_scope->__pyx_v_info, __pyx_t_1); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 146, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":147 - * info.append(("chunked", self.chunked)) - * info.append(("url", self.url)) - * sinfo = ', '.join(name + '=' + repr(val) for name, val in info) # <<<<<<<<<<<<<< - * return '' - * - */ - __pyx_t_1 = __pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_8__repr___genexpr(((PyObject*)__pyx_cur_scope)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 147, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = __Pyx_Generator_Next(__pyx_t_1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 147, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyUnicode_Join(__pyx_kp_u__2, __pyx_t_3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 147, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_v_sinfo = ((PyObject*)__pyx_t_1); - __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":148 - * info.append(("url", self.url)) - * sinfo = ', '.join(name + '=' + repr(val) for name, val in info) - * return '' # <<<<<<<<<<<<<< - * - * def _replace(self, **dct): - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = __Pyx_PyUnicode_ConcatSafe(__pyx_kp_u_RawRequestMessage, __pyx_v_sinfo); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 148, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = __Pyx_PyUnicode_Concat(__pyx_t_1, __pyx_kp_u__3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 148, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_r = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L0; - - /* "aiohttp/_http_parser.pyx":135 - * self.url = url - * - * def __repr__(self): # <<<<<<<<<<<<<< - * info = [] - * info.append(("method", self.method)) - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("aiohttp._http_parser.RawRequestMessage.__repr__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v_sinfo); - __Pyx_DECREF(((PyObject *)__pyx_cur_scope)); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":150 - * return '' - * - * def _replace(self, **dct): # <<<<<<<<<<<<<< - * cdef RawRequestMessage ret - * ret = _new_request_message(self.method, - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_5_replace(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_5_replace(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { - PyObject *__pyx_v_dct = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("_replace (wrapper)", 0); - if (unlikely(PyTuple_GET_SIZE(__pyx_args) > 0)) { - __Pyx_RaiseArgtupleInvalid("_replace", 1, 0, 0, PyTuple_GET_SIZE(__pyx_args)); return NULL;} - if (__pyx_kwds && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "_replace", 1))) return NULL; - __pyx_v_dct = (__pyx_kwds) ? PyDict_Copy(__pyx_kwds) : PyDict_New(); if (unlikely(!__pyx_v_dct)) return NULL; - __Pyx_GOTREF(__pyx_v_dct); - __pyx_r = __pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_4_replace(((struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *)__pyx_v_self), __pyx_v_dct); - - /* function exit code */ - __Pyx_XDECREF(__pyx_v_dct); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_4_replace(struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *__pyx_v_self, PyObject *__pyx_v_dct) { - struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *__pyx_v_ret = 0; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - PyObject *__pyx_t_5 = NULL; - int __pyx_t_6; - PyObject *__pyx_t_7 = NULL; - int __pyx_t_8; - int __pyx_t_9; - PyObject *__pyx_t_10 = NULL; - PyObject *__pyx_t_11 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("_replace", 0); - - /* "aiohttp/_http_parser.pyx":152 - * def _replace(self, **dct): - * cdef RawRequestMessage ret - * ret = _new_request_message(self.method, # <<<<<<<<<<<<<< - * self.path, - * self.version, - */ - __pyx_t_1 = __pyx_v_self->method; - __Pyx_INCREF(__pyx_t_1); - - /* "aiohttp/_http_parser.pyx":153 - * cdef RawRequestMessage ret - * ret = _new_request_message(self.method, - * self.path, # <<<<<<<<<<<<<< - * self.version, - * self.headers, - */ - __pyx_t_2 = __pyx_v_self->path; - __Pyx_INCREF(__pyx_t_2); - - /* "aiohttp/_http_parser.pyx":154 - * ret = _new_request_message(self.method, - * self.path, - * self.version, # <<<<<<<<<<<<<< - * self.headers, - * self.raw_headers, - */ - __pyx_t_3 = __pyx_v_self->version; - __Pyx_INCREF(__pyx_t_3); - - /* "aiohttp/_http_parser.pyx":155 - * self.path, - * self.version, - * self.headers, # <<<<<<<<<<<<<< - * self.raw_headers, - * self.should_close, - */ - __pyx_t_4 = __pyx_v_self->headers; - __Pyx_INCREF(__pyx_t_4); - - /* "aiohttp/_http_parser.pyx":156 - * self.version, - * self.headers, - * self.raw_headers, # <<<<<<<<<<<<<< - * self.should_close, - * self.compression, - */ - __pyx_t_5 = __pyx_v_self->raw_headers; - __Pyx_INCREF(__pyx_t_5); - - /* "aiohttp/_http_parser.pyx":157 - * self.headers, - * self.raw_headers, - * self.should_close, # <<<<<<<<<<<<<< - * self.compression, - * self.upgrade, - */ - __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_v_self->should_close); if (unlikely((__pyx_t_6 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 157, __pyx_L1_error) - - /* "aiohttp/_http_parser.pyx":158 - * self.raw_headers, - * self.should_close, - * self.compression, # <<<<<<<<<<<<<< - * self.upgrade, - * self.chunked, - */ - __pyx_t_7 = __pyx_v_self->compression; - __Pyx_INCREF(__pyx_t_7); - - /* "aiohttp/_http_parser.pyx":159 - * self.should_close, - * self.compression, - * self.upgrade, # <<<<<<<<<<<<<< - * self.chunked, - * self.url) - */ - __pyx_t_8 = __Pyx_PyObject_IsTrue(__pyx_v_self->upgrade); if (unlikely((__pyx_t_8 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 159, __pyx_L1_error) - - /* "aiohttp/_http_parser.pyx":160 - * self.compression, - * self.upgrade, - * self.chunked, # <<<<<<<<<<<<<< - * self.url) - * if "method" in dct: - */ - __pyx_t_9 = __Pyx_PyObject_IsTrue(__pyx_v_self->chunked); if (unlikely((__pyx_t_9 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 160, __pyx_L1_error) - - /* "aiohttp/_http_parser.pyx":161 - * self.upgrade, - * self.chunked, - * self.url) # <<<<<<<<<<<<<< - * if "method" in dct: - * ret.method = dct["method"] - */ - __pyx_t_10 = __pyx_v_self->url; - __Pyx_INCREF(__pyx_t_10); - - /* "aiohttp/_http_parser.pyx":152 - * def _replace(self, **dct): - * cdef RawRequestMessage ret - * ret = _new_request_message(self.method, # <<<<<<<<<<<<<< - * self.path, - * self.version, - */ - __pyx_t_11 = __pyx_f_7aiohttp_12_http_parser__new_request_message(((PyObject*)__pyx_t_1), ((PyObject*)__pyx_t_2), __pyx_t_3, __pyx_t_4, __pyx_t_5, __pyx_t_6, __pyx_t_7, __pyx_t_8, __pyx_t_9, __pyx_t_10); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 152, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_11); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; - if (!(likely(((__pyx_t_11) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_11, __pyx_ptype_7aiohttp_12_http_parser_RawRequestMessage))))) __PYX_ERR(0, 152, __pyx_L1_error) - __pyx_v_ret = ((struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *)__pyx_t_11); - __pyx_t_11 = 0; - - /* "aiohttp/_http_parser.pyx":162 - * self.chunked, - * self.url) - * if "method" in dct: # <<<<<<<<<<<<<< - * ret.method = dct["method"] - * if "path" in dct: - */ - __pyx_t_9 = (__Pyx_PyDict_ContainsTF(__pyx_n_u_method, __pyx_v_dct, Py_EQ)); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 162, __pyx_L1_error) - __pyx_t_8 = (__pyx_t_9 != 0); - if (__pyx_t_8) { - - /* "aiohttp/_http_parser.pyx":163 - * self.url) - * if "method" in dct: - * ret.method = dct["method"] # <<<<<<<<<<<<<< - * if "path" in dct: - * ret.path = dct["path"] - */ - __pyx_t_11 = __Pyx_PyDict_GetItem(__pyx_v_dct, __pyx_n_u_method); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 163, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_11); - if (!(likely(PyUnicode_CheckExact(__pyx_t_11))||((__pyx_t_11) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "unicode", Py_TYPE(__pyx_t_11)->tp_name), 0))) __PYX_ERR(0, 163, __pyx_L1_error) - __Pyx_GIVEREF(__pyx_t_11); - __Pyx_GOTREF(__pyx_v_ret->method); - __Pyx_DECREF(__pyx_v_ret->method); - __pyx_v_ret->method = ((PyObject*)__pyx_t_11); - __pyx_t_11 = 0; - - /* "aiohttp/_http_parser.pyx":162 - * self.chunked, - * self.url) - * if "method" in dct: # <<<<<<<<<<<<<< - * ret.method = dct["method"] - * if "path" in dct: - */ - } - - /* "aiohttp/_http_parser.pyx":164 - * if "method" in dct: - * ret.method = dct["method"] - * if "path" in dct: # <<<<<<<<<<<<<< - * ret.path = dct["path"] - * if "version" in dct: - */ - __pyx_t_8 = (__Pyx_PyDict_ContainsTF(__pyx_n_u_path, __pyx_v_dct, Py_EQ)); if (unlikely(__pyx_t_8 < 0)) __PYX_ERR(0, 164, __pyx_L1_error) - __pyx_t_9 = (__pyx_t_8 != 0); - if (__pyx_t_9) { - - /* "aiohttp/_http_parser.pyx":165 - * ret.method = dct["method"] - * if "path" in dct: - * ret.path = dct["path"] # <<<<<<<<<<<<<< - * if "version" in dct: - * ret.version = dct["version"] - */ - __pyx_t_11 = __Pyx_PyDict_GetItem(__pyx_v_dct, __pyx_n_u_path); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 165, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_11); - if (!(likely(PyUnicode_CheckExact(__pyx_t_11))||((__pyx_t_11) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "unicode", Py_TYPE(__pyx_t_11)->tp_name), 0))) __PYX_ERR(0, 165, __pyx_L1_error) - __Pyx_GIVEREF(__pyx_t_11); - __Pyx_GOTREF(__pyx_v_ret->path); - __Pyx_DECREF(__pyx_v_ret->path); - __pyx_v_ret->path = ((PyObject*)__pyx_t_11); - __pyx_t_11 = 0; - - /* "aiohttp/_http_parser.pyx":164 - * if "method" in dct: - * ret.method = dct["method"] - * if "path" in dct: # <<<<<<<<<<<<<< - * ret.path = dct["path"] - * if "version" in dct: - */ - } - - /* "aiohttp/_http_parser.pyx":166 - * if "path" in dct: - * ret.path = dct["path"] - * if "version" in dct: # <<<<<<<<<<<<<< - * ret.version = dct["version"] - * if "headers" in dct: - */ - __pyx_t_9 = (__Pyx_PyDict_ContainsTF(__pyx_n_u_version, __pyx_v_dct, Py_EQ)); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 166, __pyx_L1_error) - __pyx_t_8 = (__pyx_t_9 != 0); - if (__pyx_t_8) { - - /* "aiohttp/_http_parser.pyx":167 - * ret.path = dct["path"] - * if "version" in dct: - * ret.version = dct["version"] # <<<<<<<<<<<<<< - * if "headers" in dct: - * ret.headers = dct["headers"] - */ - __pyx_t_11 = __Pyx_PyDict_GetItem(__pyx_v_dct, __pyx_n_u_version); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 167, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_11); - __Pyx_GIVEREF(__pyx_t_11); - __Pyx_GOTREF(__pyx_v_ret->version); - __Pyx_DECREF(__pyx_v_ret->version); - __pyx_v_ret->version = __pyx_t_11; - __pyx_t_11 = 0; - - /* "aiohttp/_http_parser.pyx":166 - * if "path" in dct: - * ret.path = dct["path"] - * if "version" in dct: # <<<<<<<<<<<<<< - * ret.version = dct["version"] - * if "headers" in dct: - */ - } - - /* "aiohttp/_http_parser.pyx":168 - * if "version" in dct: - * ret.version = dct["version"] - * if "headers" in dct: # <<<<<<<<<<<<<< - * ret.headers = dct["headers"] - * if "raw_headers" in dct: - */ - __pyx_t_8 = (__Pyx_PyDict_ContainsTF(__pyx_n_u_headers, __pyx_v_dct, Py_EQ)); if (unlikely(__pyx_t_8 < 0)) __PYX_ERR(0, 168, __pyx_L1_error) - __pyx_t_9 = (__pyx_t_8 != 0); - if (__pyx_t_9) { - - /* "aiohttp/_http_parser.pyx":169 - * ret.version = dct["version"] - * if "headers" in dct: - * ret.headers = dct["headers"] # <<<<<<<<<<<<<< - * if "raw_headers" in dct: - * ret.raw_headers = dct["raw_headers"] - */ - __pyx_t_11 = __Pyx_PyDict_GetItem(__pyx_v_dct, __pyx_n_u_headers); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 169, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_11); - __Pyx_GIVEREF(__pyx_t_11); - __Pyx_GOTREF(__pyx_v_ret->headers); - __Pyx_DECREF(__pyx_v_ret->headers); - __pyx_v_ret->headers = __pyx_t_11; - __pyx_t_11 = 0; - - /* "aiohttp/_http_parser.pyx":168 - * if "version" in dct: - * ret.version = dct["version"] - * if "headers" in dct: # <<<<<<<<<<<<<< - * ret.headers = dct["headers"] - * if "raw_headers" in dct: - */ - } - - /* "aiohttp/_http_parser.pyx":170 - * if "headers" in dct: - * ret.headers = dct["headers"] - * if "raw_headers" in dct: # <<<<<<<<<<<<<< - * ret.raw_headers = dct["raw_headers"] - * if "should_close" in dct: - */ - __pyx_t_9 = (__Pyx_PyDict_ContainsTF(__pyx_n_u_raw_headers, __pyx_v_dct, Py_EQ)); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 170, __pyx_L1_error) - __pyx_t_8 = (__pyx_t_9 != 0); - if (__pyx_t_8) { - - /* "aiohttp/_http_parser.pyx":171 - * ret.headers = dct["headers"] - * if "raw_headers" in dct: - * ret.raw_headers = dct["raw_headers"] # <<<<<<<<<<<<<< - * if "should_close" in dct: - * ret.should_close = dct["should_close"] - */ - __pyx_t_11 = __Pyx_PyDict_GetItem(__pyx_v_dct, __pyx_n_u_raw_headers); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 171, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_11); - __Pyx_GIVEREF(__pyx_t_11); - __Pyx_GOTREF(__pyx_v_ret->raw_headers); - __Pyx_DECREF(__pyx_v_ret->raw_headers); - __pyx_v_ret->raw_headers = __pyx_t_11; - __pyx_t_11 = 0; - - /* "aiohttp/_http_parser.pyx":170 - * if "headers" in dct: - * ret.headers = dct["headers"] - * if "raw_headers" in dct: # <<<<<<<<<<<<<< - * ret.raw_headers = dct["raw_headers"] - * if "should_close" in dct: - */ - } - - /* "aiohttp/_http_parser.pyx":172 - * if "raw_headers" in dct: - * ret.raw_headers = dct["raw_headers"] - * if "should_close" in dct: # <<<<<<<<<<<<<< - * ret.should_close = dct["should_close"] - * if "compression" in dct: - */ - __pyx_t_8 = (__Pyx_PyDict_ContainsTF(__pyx_n_u_should_close, __pyx_v_dct, Py_EQ)); if (unlikely(__pyx_t_8 < 0)) __PYX_ERR(0, 172, __pyx_L1_error) - __pyx_t_9 = (__pyx_t_8 != 0); - if (__pyx_t_9) { - - /* "aiohttp/_http_parser.pyx":173 - * ret.raw_headers = dct["raw_headers"] - * if "should_close" in dct: - * ret.should_close = dct["should_close"] # <<<<<<<<<<<<<< - * if "compression" in dct: - * ret.compression = dct["compression"] - */ - __pyx_t_11 = __Pyx_PyDict_GetItem(__pyx_v_dct, __pyx_n_u_should_close); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 173, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_11); - __Pyx_GIVEREF(__pyx_t_11); - __Pyx_GOTREF(__pyx_v_ret->should_close); - __Pyx_DECREF(__pyx_v_ret->should_close); - __pyx_v_ret->should_close = __pyx_t_11; - __pyx_t_11 = 0; - - /* "aiohttp/_http_parser.pyx":172 - * if "raw_headers" in dct: - * ret.raw_headers = dct["raw_headers"] - * if "should_close" in dct: # <<<<<<<<<<<<<< - * ret.should_close = dct["should_close"] - * if "compression" in dct: - */ - } - - /* "aiohttp/_http_parser.pyx":174 - * if "should_close" in dct: - * ret.should_close = dct["should_close"] - * if "compression" in dct: # <<<<<<<<<<<<<< - * ret.compression = dct["compression"] - * if "upgrade" in dct: - */ - __pyx_t_9 = (__Pyx_PyDict_ContainsTF(__pyx_n_u_compression, __pyx_v_dct, Py_EQ)); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 174, __pyx_L1_error) - __pyx_t_8 = (__pyx_t_9 != 0); - if (__pyx_t_8) { - - /* "aiohttp/_http_parser.pyx":175 - * ret.should_close = dct["should_close"] - * if "compression" in dct: - * ret.compression = dct["compression"] # <<<<<<<<<<<<<< - * if "upgrade" in dct: - * ret.upgrade = dct["upgrade"] - */ - __pyx_t_11 = __Pyx_PyDict_GetItem(__pyx_v_dct, __pyx_n_u_compression); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 175, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_11); - __Pyx_GIVEREF(__pyx_t_11); - __Pyx_GOTREF(__pyx_v_ret->compression); - __Pyx_DECREF(__pyx_v_ret->compression); - __pyx_v_ret->compression = __pyx_t_11; - __pyx_t_11 = 0; - - /* "aiohttp/_http_parser.pyx":174 - * if "should_close" in dct: - * ret.should_close = dct["should_close"] - * if "compression" in dct: # <<<<<<<<<<<<<< - * ret.compression = dct["compression"] - * if "upgrade" in dct: - */ - } - - /* "aiohttp/_http_parser.pyx":176 - * if "compression" in dct: - * ret.compression = dct["compression"] - * if "upgrade" in dct: # <<<<<<<<<<<<<< - * ret.upgrade = dct["upgrade"] - * if "chunked" in dct: - */ - __pyx_t_8 = (__Pyx_PyDict_ContainsTF(__pyx_n_u_upgrade, __pyx_v_dct, Py_EQ)); if (unlikely(__pyx_t_8 < 0)) __PYX_ERR(0, 176, __pyx_L1_error) - __pyx_t_9 = (__pyx_t_8 != 0); - if (__pyx_t_9) { - - /* "aiohttp/_http_parser.pyx":177 - * ret.compression = dct["compression"] - * if "upgrade" in dct: - * ret.upgrade = dct["upgrade"] # <<<<<<<<<<<<<< - * if "chunked" in dct: - * ret.chunked = dct["chunked"] - */ - __pyx_t_11 = __Pyx_PyDict_GetItem(__pyx_v_dct, __pyx_n_u_upgrade); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 177, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_11); - __Pyx_GIVEREF(__pyx_t_11); - __Pyx_GOTREF(__pyx_v_ret->upgrade); - __Pyx_DECREF(__pyx_v_ret->upgrade); - __pyx_v_ret->upgrade = __pyx_t_11; - __pyx_t_11 = 0; - - /* "aiohttp/_http_parser.pyx":176 - * if "compression" in dct: - * ret.compression = dct["compression"] - * if "upgrade" in dct: # <<<<<<<<<<<<<< - * ret.upgrade = dct["upgrade"] - * if "chunked" in dct: - */ - } - - /* "aiohttp/_http_parser.pyx":178 - * if "upgrade" in dct: - * ret.upgrade = dct["upgrade"] - * if "chunked" in dct: # <<<<<<<<<<<<<< - * ret.chunked = dct["chunked"] - * if "url" in dct: - */ - __pyx_t_9 = (__Pyx_PyDict_ContainsTF(__pyx_n_u_chunked, __pyx_v_dct, Py_EQ)); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 178, __pyx_L1_error) - __pyx_t_8 = (__pyx_t_9 != 0); - if (__pyx_t_8) { - - /* "aiohttp/_http_parser.pyx":179 - * ret.upgrade = dct["upgrade"] - * if "chunked" in dct: - * ret.chunked = dct["chunked"] # <<<<<<<<<<<<<< - * if "url" in dct: - * ret.url = dct["url"] - */ - __pyx_t_11 = __Pyx_PyDict_GetItem(__pyx_v_dct, __pyx_n_u_chunked); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 179, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_11); - __Pyx_GIVEREF(__pyx_t_11); - __Pyx_GOTREF(__pyx_v_ret->chunked); - __Pyx_DECREF(__pyx_v_ret->chunked); - __pyx_v_ret->chunked = __pyx_t_11; - __pyx_t_11 = 0; - - /* "aiohttp/_http_parser.pyx":178 - * if "upgrade" in dct: - * ret.upgrade = dct["upgrade"] - * if "chunked" in dct: # <<<<<<<<<<<<<< - * ret.chunked = dct["chunked"] - * if "url" in dct: - */ - } - - /* "aiohttp/_http_parser.pyx":180 - * if "chunked" in dct: - * ret.chunked = dct["chunked"] - * if "url" in dct: # <<<<<<<<<<<<<< - * ret.url = dct["url"] - * return ret - */ - __pyx_t_8 = (__Pyx_PyDict_ContainsTF(__pyx_n_u_url, __pyx_v_dct, Py_EQ)); if (unlikely(__pyx_t_8 < 0)) __PYX_ERR(0, 180, __pyx_L1_error) - __pyx_t_9 = (__pyx_t_8 != 0); - if (__pyx_t_9) { - - /* "aiohttp/_http_parser.pyx":181 - * ret.chunked = dct["chunked"] - * if "url" in dct: - * ret.url = dct["url"] # <<<<<<<<<<<<<< - * return ret - * - */ - __pyx_t_11 = __Pyx_PyDict_GetItem(__pyx_v_dct, __pyx_n_u_url); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 181, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_11); - __Pyx_GIVEREF(__pyx_t_11); - __Pyx_GOTREF(__pyx_v_ret->url); - __Pyx_DECREF(__pyx_v_ret->url); - __pyx_v_ret->url = __pyx_t_11; - __pyx_t_11 = 0; - - /* "aiohttp/_http_parser.pyx":180 - * if "chunked" in dct: - * ret.chunked = dct["chunked"] - * if "url" in dct: # <<<<<<<<<<<<<< - * ret.url = dct["url"] - * return ret - */ - } - - /* "aiohttp/_http_parser.pyx":182 - * if "url" in dct: - * ret.url = dct["url"] - * return ret # <<<<<<<<<<<<<< - * - * cdef _new_request_message(str method, - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(((PyObject *)__pyx_v_ret)); - __pyx_r = ((PyObject *)__pyx_v_ret); - goto __pyx_L0; - - /* "aiohttp/_http_parser.pyx":150 - * return '' - * - * def _replace(self, **dct): # <<<<<<<<<<<<<< - * cdef RawRequestMessage ret - * ret = _new_request_message(self.method, - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_XDECREF(__pyx_t_5); - __Pyx_XDECREF(__pyx_t_7); - __Pyx_XDECREF(__pyx_t_10); - __Pyx_XDECREF(__pyx_t_11); - __Pyx_AddTraceback("aiohttp._http_parser.RawRequestMessage._replace", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XDECREF((PyObject *)__pyx_v_ret); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":111 - * @cython.freelist(DEFAULT_FREELIST_SIZE) - * cdef class RawRequestMessage: - * cdef readonly str method # <<<<<<<<<<<<<< - * cdef readonly str path - * cdef readonly object version # HttpVersion - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_6method_1__get__(PyObject *__pyx_v_self); /*proto*/ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_6method_1__get__(PyObject *__pyx_v_self) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_6method___get__(((struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_6method___get__(struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__get__", 0); - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v_self->method); - __pyx_r = __pyx_v_self->method; - goto __pyx_L0; - - /* function exit code */ - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":112 - * cdef class RawRequestMessage: - * cdef readonly str method - * cdef readonly str path # <<<<<<<<<<<<<< - * cdef readonly object version # HttpVersion - * cdef readonly object headers # CIMultiDict - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_4path_1__get__(PyObject *__pyx_v_self); /*proto*/ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_4path_1__get__(PyObject *__pyx_v_self) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_4path___get__(((struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_4path___get__(struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__get__", 0); - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v_self->path); - __pyx_r = __pyx_v_self->path; - goto __pyx_L0; - - /* function exit code */ - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":113 - * cdef readonly str method - * cdef readonly str path - * cdef readonly object version # HttpVersion # <<<<<<<<<<<<<< - * cdef readonly object headers # CIMultiDict - * cdef readonly object raw_headers # tuple - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_7version_1__get__(PyObject *__pyx_v_self); /*proto*/ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_7version_1__get__(PyObject *__pyx_v_self) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_7version___get__(((struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_7version___get__(struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__get__", 0); - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v_self->version); - __pyx_r = __pyx_v_self->version; - goto __pyx_L0; - - /* function exit code */ - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":114 - * cdef readonly str path - * cdef readonly object version # HttpVersion - * cdef readonly object headers # CIMultiDict # <<<<<<<<<<<<<< - * cdef readonly object raw_headers # tuple - * cdef readonly object should_close - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_7headers_1__get__(PyObject *__pyx_v_self); /*proto*/ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_7headers_1__get__(PyObject *__pyx_v_self) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_7headers___get__(((struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_7headers___get__(struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__get__", 0); - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v_self->headers); - __pyx_r = __pyx_v_self->headers; - goto __pyx_L0; - - /* function exit code */ - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":115 - * cdef readonly object version # HttpVersion - * cdef readonly object headers # CIMultiDict - * cdef readonly object raw_headers # tuple # <<<<<<<<<<<<<< - * cdef readonly object should_close - * cdef readonly object compression - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_11raw_headers_1__get__(PyObject *__pyx_v_self); /*proto*/ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_11raw_headers_1__get__(PyObject *__pyx_v_self) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_11raw_headers___get__(((struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_11raw_headers___get__(struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__get__", 0); - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v_self->raw_headers); - __pyx_r = __pyx_v_self->raw_headers; - goto __pyx_L0; - - /* function exit code */ - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":116 - * cdef readonly object headers # CIMultiDict - * cdef readonly object raw_headers # tuple - * cdef readonly object should_close # <<<<<<<<<<<<<< - * cdef readonly object compression - * cdef readonly object upgrade - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_12should_close_1__get__(PyObject *__pyx_v_self); /*proto*/ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_12should_close_1__get__(PyObject *__pyx_v_self) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_12should_close___get__(((struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_12should_close___get__(struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__get__", 0); - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v_self->should_close); - __pyx_r = __pyx_v_self->should_close; - goto __pyx_L0; - - /* function exit code */ - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":117 - * cdef readonly object raw_headers # tuple - * cdef readonly object should_close - * cdef readonly object compression # <<<<<<<<<<<<<< - * cdef readonly object upgrade - * cdef readonly object chunked - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_11compression_1__get__(PyObject *__pyx_v_self); /*proto*/ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_11compression_1__get__(PyObject *__pyx_v_self) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_11compression___get__(((struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_11compression___get__(struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__get__", 0); - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v_self->compression); - __pyx_r = __pyx_v_self->compression; - goto __pyx_L0; - - /* function exit code */ - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":118 - * cdef readonly object should_close - * cdef readonly object compression - * cdef readonly object upgrade # <<<<<<<<<<<<<< - * cdef readonly object chunked - * cdef readonly object url # yarl.URL - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_7upgrade_1__get__(PyObject *__pyx_v_self); /*proto*/ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_7upgrade_1__get__(PyObject *__pyx_v_self) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_7upgrade___get__(((struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_7upgrade___get__(struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__get__", 0); - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v_self->upgrade); - __pyx_r = __pyx_v_self->upgrade; - goto __pyx_L0; - - /* function exit code */ - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":119 - * cdef readonly object compression - * cdef readonly object upgrade - * cdef readonly object chunked # <<<<<<<<<<<<<< - * cdef readonly object url # yarl.URL - * - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_7chunked_1__get__(PyObject *__pyx_v_self); /*proto*/ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_7chunked_1__get__(PyObject *__pyx_v_self) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_7chunked___get__(((struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_7chunked___get__(struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__get__", 0); - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v_self->chunked); - __pyx_r = __pyx_v_self->chunked; - goto __pyx_L0; - - /* function exit code */ - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":120 - * cdef readonly object upgrade - * cdef readonly object chunked - * cdef readonly object url # yarl.URL # <<<<<<<<<<<<<< - * - * def __init__(self, method, path, version, headers, raw_headers, - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_3url_1__get__(PyObject *__pyx_v_self); /*proto*/ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_3url_1__get__(PyObject *__pyx_v_self) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_3url___get__(((struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_3url___get__(struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__get__", 0); - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v_self->url); - __pyx_r = __pyx_v_self->url; - goto __pyx_L0; - - /* function exit code */ - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "(tree fragment)":1 - * def __reduce_cython__(self): # <<<<<<<<<<<<<< - * cdef tuple state - * cdef object _dict - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_7__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_7__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_6__reduce_cython__(((struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_6__reduce_cython__(struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *__pyx_v_self) { - PyObject *__pyx_v_state = 0; - PyObject *__pyx_v__dict = 0; - int __pyx_v_use_setstate; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_t_2; - int __pyx_t_3; - PyObject *__pyx_t_4 = NULL; - int __pyx_t_5; - PyObject *__pyx_t_6 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__reduce_cython__", 0); - - /* "(tree fragment)":5 - * cdef object _dict - * cdef bint use_setstate - * state = (self.chunked, self.compression, self.headers, self.method, self.path, self.raw_headers, self.should_close, self.upgrade, self.url, self.version) # <<<<<<<<<<<<<< - * _dict = getattr(self, '__dict__', None) - * if _dict is not None: - */ - __pyx_t_1 = PyTuple_New(10); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 5, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF(__pyx_v_self->chunked); - __Pyx_GIVEREF(__pyx_v_self->chunked); - PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v_self->chunked); - __Pyx_INCREF(__pyx_v_self->compression); - __Pyx_GIVEREF(__pyx_v_self->compression); - PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_self->compression); - __Pyx_INCREF(__pyx_v_self->headers); - __Pyx_GIVEREF(__pyx_v_self->headers); - PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_v_self->headers); - __Pyx_INCREF(__pyx_v_self->method); - __Pyx_GIVEREF(__pyx_v_self->method); - PyTuple_SET_ITEM(__pyx_t_1, 3, __pyx_v_self->method); - __Pyx_INCREF(__pyx_v_self->path); - __Pyx_GIVEREF(__pyx_v_self->path); - PyTuple_SET_ITEM(__pyx_t_1, 4, __pyx_v_self->path); - __Pyx_INCREF(__pyx_v_self->raw_headers); - __Pyx_GIVEREF(__pyx_v_self->raw_headers); - PyTuple_SET_ITEM(__pyx_t_1, 5, __pyx_v_self->raw_headers); - __Pyx_INCREF(__pyx_v_self->should_close); - __Pyx_GIVEREF(__pyx_v_self->should_close); - PyTuple_SET_ITEM(__pyx_t_1, 6, __pyx_v_self->should_close); - __Pyx_INCREF(__pyx_v_self->upgrade); - __Pyx_GIVEREF(__pyx_v_self->upgrade); - PyTuple_SET_ITEM(__pyx_t_1, 7, __pyx_v_self->upgrade); - __Pyx_INCREF(__pyx_v_self->url); - __Pyx_GIVEREF(__pyx_v_self->url); - PyTuple_SET_ITEM(__pyx_t_1, 8, __pyx_v_self->url); - __Pyx_INCREF(__pyx_v_self->version); - __Pyx_GIVEREF(__pyx_v_self->version); - PyTuple_SET_ITEM(__pyx_t_1, 9, __pyx_v_self->version); - __pyx_v_state = ((PyObject*)__pyx_t_1); - __pyx_t_1 = 0; - - /* "(tree fragment)":6 - * cdef bint use_setstate - * state = (self.chunked, self.compression, self.headers, self.method, self.path, self.raw_headers, self.should_close, self.upgrade, self.url, self.version) - * _dict = getattr(self, '__dict__', None) # <<<<<<<<<<<<<< - * if _dict is not None: - * state += (_dict,) - */ - __pyx_t_1 = __Pyx_GetAttr3(((PyObject *)__pyx_v_self), __pyx_n_s_dict, Py_None); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 6, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_v__dict = __pyx_t_1; - __pyx_t_1 = 0; - - /* "(tree fragment)":7 - * state = (self.chunked, self.compression, self.headers, self.method, self.path, self.raw_headers, self.should_close, self.upgrade, self.url, self.version) - * _dict = getattr(self, '__dict__', None) - * if _dict is not None: # <<<<<<<<<<<<<< - * state += (_dict,) - * use_setstate = True - */ - __pyx_t_2 = (__pyx_v__dict != Py_None); - __pyx_t_3 = (__pyx_t_2 != 0); - if (__pyx_t_3) { - - /* "(tree fragment)":8 - * _dict = getattr(self, '__dict__', None) - * if _dict is not None: - * state += (_dict,) # <<<<<<<<<<<<<< - * use_setstate = True - * else: - */ - __pyx_t_1 = PyTuple_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 8, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF(__pyx_v__dict); - __Pyx_GIVEREF(__pyx_v__dict); - PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v__dict); - __pyx_t_4 = PyNumber_InPlaceAdd(__pyx_v_state, __pyx_t_1); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 8, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF_SET(__pyx_v_state, ((PyObject*)__pyx_t_4)); - __pyx_t_4 = 0; - - /* "(tree fragment)":9 - * if _dict is not None: - * state += (_dict,) - * use_setstate = True # <<<<<<<<<<<<<< - * else: - * use_setstate = self.chunked is not None or self.compression is not None or self.headers is not None or self.method is not None or self.path is not None or self.raw_headers is not None or self.should_close is not None or self.upgrade is not None or self.url is not None or self.version is not None - */ - __pyx_v_use_setstate = 1; - - /* "(tree fragment)":7 - * state = (self.chunked, self.compression, self.headers, self.method, self.path, self.raw_headers, self.should_close, self.upgrade, self.url, self.version) - * _dict = getattr(self, '__dict__', None) - * if _dict is not None: # <<<<<<<<<<<<<< - * state += (_dict,) - * use_setstate = True - */ - goto __pyx_L3; - } - - /* "(tree fragment)":11 - * use_setstate = True - * else: - * use_setstate = self.chunked is not None or self.compression is not None or self.headers is not None or self.method is not None or self.path is not None or self.raw_headers is not None or self.should_close is not None or self.upgrade is not None or self.url is not None or self.version is not None # <<<<<<<<<<<<<< - * if use_setstate: - * return __pyx_unpickle_RawRequestMessage, (type(self), 0x1408252, None), state - */ - /*else*/ { - __pyx_t_2 = (__pyx_v_self->chunked != Py_None); - __pyx_t_5 = (__pyx_t_2 != 0); - if (!__pyx_t_5) { - } else { - __pyx_t_3 = __pyx_t_5; - goto __pyx_L4_bool_binop_done; - } - __pyx_t_5 = (__pyx_v_self->compression != Py_None); - __pyx_t_2 = (__pyx_t_5 != 0); - if (!__pyx_t_2) { - } else { - __pyx_t_3 = __pyx_t_2; - goto __pyx_L4_bool_binop_done; - } - __pyx_t_2 = (__pyx_v_self->headers != Py_None); - __pyx_t_5 = (__pyx_t_2 != 0); - if (!__pyx_t_5) { - } else { - __pyx_t_3 = __pyx_t_5; - goto __pyx_L4_bool_binop_done; - } - __pyx_t_5 = (__pyx_v_self->method != ((PyObject*)Py_None)); - __pyx_t_2 = (__pyx_t_5 != 0); - if (!__pyx_t_2) { - } else { - __pyx_t_3 = __pyx_t_2; - goto __pyx_L4_bool_binop_done; - } - __pyx_t_2 = (__pyx_v_self->path != ((PyObject*)Py_None)); - __pyx_t_5 = (__pyx_t_2 != 0); - if (!__pyx_t_5) { - } else { - __pyx_t_3 = __pyx_t_5; - goto __pyx_L4_bool_binop_done; - } - __pyx_t_5 = (__pyx_v_self->raw_headers != Py_None); - __pyx_t_2 = (__pyx_t_5 != 0); - if (!__pyx_t_2) { - } else { - __pyx_t_3 = __pyx_t_2; - goto __pyx_L4_bool_binop_done; - } - __pyx_t_2 = (__pyx_v_self->should_close != Py_None); - __pyx_t_5 = (__pyx_t_2 != 0); - if (!__pyx_t_5) { - } else { - __pyx_t_3 = __pyx_t_5; - goto __pyx_L4_bool_binop_done; - } - __pyx_t_5 = (__pyx_v_self->upgrade != Py_None); - __pyx_t_2 = (__pyx_t_5 != 0); - if (!__pyx_t_2) { - } else { - __pyx_t_3 = __pyx_t_2; - goto __pyx_L4_bool_binop_done; - } - __pyx_t_2 = (__pyx_v_self->url != Py_None); - __pyx_t_5 = (__pyx_t_2 != 0); - if (!__pyx_t_5) { - } else { - __pyx_t_3 = __pyx_t_5; - goto __pyx_L4_bool_binop_done; - } - __pyx_t_5 = (__pyx_v_self->version != Py_None); - __pyx_t_2 = (__pyx_t_5 != 0); - __pyx_t_3 = __pyx_t_2; - __pyx_L4_bool_binop_done:; - __pyx_v_use_setstate = __pyx_t_3; - } - __pyx_L3:; - - /* "(tree fragment)":12 - * else: - * use_setstate = self.chunked is not None or self.compression is not None or self.headers is not None or self.method is not None or self.path is not None or self.raw_headers is not None or self.should_close is not None or self.upgrade is not None or self.url is not None or self.version is not None - * if use_setstate: # <<<<<<<<<<<<<< - * return __pyx_unpickle_RawRequestMessage, (type(self), 0x1408252, None), state - * else: - */ - __pyx_t_3 = (__pyx_v_use_setstate != 0); - if (__pyx_t_3) { - - /* "(tree fragment)":13 - * use_setstate = self.chunked is not None or self.compression is not None or self.headers is not None or self.method is not None or self.path is not None or self.raw_headers is not None or self.should_close is not None or self.upgrade is not None or self.url is not None or self.version is not None - * if use_setstate: - * return __pyx_unpickle_RawRequestMessage, (type(self), 0x1408252, None), state # <<<<<<<<<<<<<< - * else: - * return __pyx_unpickle_RawRequestMessage, (type(self), 0x1408252, state) - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_pyx_unpickle_RawRequestMessage); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); - __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); - PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); - __Pyx_INCREF(__pyx_int_21004882); - __Pyx_GIVEREF(__pyx_int_21004882); - PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_int_21004882); - __Pyx_INCREF(Py_None); - __Pyx_GIVEREF(Py_None); - PyTuple_SET_ITEM(__pyx_t_1, 2, Py_None); - __pyx_t_6 = PyTuple_New(3); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_6); - __Pyx_GIVEREF(__pyx_t_4); - PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_4); - __Pyx_GIVEREF(__pyx_t_1); - PyTuple_SET_ITEM(__pyx_t_6, 1, __pyx_t_1); - __Pyx_INCREF(__pyx_v_state); - __Pyx_GIVEREF(__pyx_v_state); - PyTuple_SET_ITEM(__pyx_t_6, 2, __pyx_v_state); - __pyx_t_4 = 0; - __pyx_t_1 = 0; - __pyx_r = __pyx_t_6; - __pyx_t_6 = 0; - goto __pyx_L0; - - /* "(tree fragment)":12 - * else: - * use_setstate = self.chunked is not None or self.compression is not None or self.headers is not None or self.method is not None or self.path is not None or self.raw_headers is not None or self.should_close is not None or self.upgrade is not None or self.url is not None or self.version is not None - * if use_setstate: # <<<<<<<<<<<<<< - * return __pyx_unpickle_RawRequestMessage, (type(self), 0x1408252, None), state - * else: - */ - } - - /* "(tree fragment)":15 - * return __pyx_unpickle_RawRequestMessage, (type(self), 0x1408252, None), state - * else: - * return __pyx_unpickle_RawRequestMessage, (type(self), 0x1408252, state) # <<<<<<<<<<<<<< - * def __setstate_cython__(self, __pyx_state): - * __pyx_unpickle_RawRequestMessage__set_state(self, __pyx_state) - */ - /*else*/ { - __Pyx_XDECREF(__pyx_r); - __Pyx_GetModuleGlobalName(__pyx_t_6, __pyx_n_s_pyx_unpickle_RawRequestMessage); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_6); - __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); - __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); - PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); - __Pyx_INCREF(__pyx_int_21004882); - __Pyx_GIVEREF(__pyx_int_21004882); - PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_int_21004882); - __Pyx_INCREF(__pyx_v_state); - __Pyx_GIVEREF(__pyx_v_state); - PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_v_state); - __pyx_t_4 = PyTuple_New(2); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_GIVEREF(__pyx_t_6); - PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_6); - __Pyx_GIVEREF(__pyx_t_1); - PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_1); - __pyx_t_6 = 0; - __pyx_t_1 = 0; - __pyx_r = __pyx_t_4; - __pyx_t_4 = 0; - goto __pyx_L0; - } - - /* "(tree fragment)":1 - * def __reduce_cython__(self): # <<<<<<<<<<<<<< - * cdef tuple state - * cdef object _dict - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_XDECREF(__pyx_t_6); - __Pyx_AddTraceback("aiohttp._http_parser.RawRequestMessage.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v_state); - __Pyx_XDECREF(__pyx_v__dict); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "(tree fragment)":16 - * else: - * return __pyx_unpickle_RawRequestMessage, (type(self), 0x1408252, state) - * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< - * __pyx_unpickle_RawRequestMessage__set_state(self, __pyx_state) - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_9__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state); /*proto*/ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_9__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_8__setstate_cython__(((struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *)__pyx_v_self), ((PyObject *)__pyx_v___pyx_state)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_12_http_parser_17RawRequestMessage_8__setstate_cython__(struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *__pyx_v_self, PyObject *__pyx_v___pyx_state) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__setstate_cython__", 0); - - /* "(tree fragment)":17 - * return __pyx_unpickle_RawRequestMessage, (type(self), 0x1408252, state) - * def __setstate_cython__(self, __pyx_state): - * __pyx_unpickle_RawRequestMessage__set_state(self, __pyx_state) # <<<<<<<<<<<<<< - */ - if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v___pyx_state)->tp_name), 0))) __PYX_ERR(1, 17, __pyx_L1_error) - __pyx_t_1 = __pyx_f_7aiohttp_12_http_parser___pyx_unpickle_RawRequestMessage__set_state(__pyx_v_self, ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 17, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "(tree fragment)":16 - * else: - * return __pyx_unpickle_RawRequestMessage, (type(self), 0x1408252, state) - * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< - * __pyx_unpickle_RawRequestMessage__set_state(self, __pyx_state) - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("aiohttp._http_parser.RawRequestMessage.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":184 - * return ret - * - * cdef _new_request_message(str method, # <<<<<<<<<<<<<< - * str path, - * object version, - */ - -static PyObject *__pyx_f_7aiohttp_12_http_parser__new_request_message(PyObject *__pyx_v_method, PyObject *__pyx_v_path, PyObject *__pyx_v_version, PyObject *__pyx_v_headers, PyObject *__pyx_v_raw_headers, int __pyx_v_should_close, PyObject *__pyx_v_compression, int __pyx_v_upgrade, int __pyx_v_chunked, PyObject *__pyx_v_url) { - struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *__pyx_v_ret = 0; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("_new_request_message", 0); - - /* "aiohttp/_http_parser.pyx":195 - * object url): - * cdef RawRequestMessage ret - * ret = RawRequestMessage.__new__(RawRequestMessage) # <<<<<<<<<<<<<< - * ret.method = method - * ret.path = path - */ - __pyx_t_1 = ((PyObject *)__pyx_tp_new_7aiohttp_12_http_parser_RawRequestMessage(((PyTypeObject *)__pyx_ptype_7aiohttp_12_http_parser_RawRequestMessage), __pyx_empty_tuple, NULL)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 195, __pyx_L1_error) - __Pyx_GOTREF(((PyObject *)__pyx_t_1)); - __pyx_v_ret = ((struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *)__pyx_t_1); - __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":196 - * cdef RawRequestMessage ret - * ret = RawRequestMessage.__new__(RawRequestMessage) - * ret.method = method # <<<<<<<<<<<<<< - * ret.path = path - * ret.version = version - */ - __Pyx_INCREF(__pyx_v_method); - __Pyx_GIVEREF(__pyx_v_method); - __Pyx_GOTREF(__pyx_v_ret->method); - __Pyx_DECREF(__pyx_v_ret->method); - __pyx_v_ret->method = __pyx_v_method; - - /* "aiohttp/_http_parser.pyx":197 - * ret = RawRequestMessage.__new__(RawRequestMessage) - * ret.method = method - * ret.path = path # <<<<<<<<<<<<<< - * ret.version = version - * ret.headers = headers - */ - __Pyx_INCREF(__pyx_v_path); - __Pyx_GIVEREF(__pyx_v_path); - __Pyx_GOTREF(__pyx_v_ret->path); - __Pyx_DECREF(__pyx_v_ret->path); - __pyx_v_ret->path = __pyx_v_path; - - /* "aiohttp/_http_parser.pyx":198 - * ret.method = method - * ret.path = path - * ret.version = version # <<<<<<<<<<<<<< - * ret.headers = headers - * ret.raw_headers = raw_headers - */ - __Pyx_INCREF(__pyx_v_version); - __Pyx_GIVEREF(__pyx_v_version); - __Pyx_GOTREF(__pyx_v_ret->version); - __Pyx_DECREF(__pyx_v_ret->version); - __pyx_v_ret->version = __pyx_v_version; - - /* "aiohttp/_http_parser.pyx":199 - * ret.path = path - * ret.version = version - * ret.headers = headers # <<<<<<<<<<<<<< - * ret.raw_headers = raw_headers - * ret.should_close = should_close - */ - __Pyx_INCREF(__pyx_v_headers); - __Pyx_GIVEREF(__pyx_v_headers); - __Pyx_GOTREF(__pyx_v_ret->headers); - __Pyx_DECREF(__pyx_v_ret->headers); - __pyx_v_ret->headers = __pyx_v_headers; - - /* "aiohttp/_http_parser.pyx":200 - * ret.version = version - * ret.headers = headers - * ret.raw_headers = raw_headers # <<<<<<<<<<<<<< - * ret.should_close = should_close - * ret.compression = compression - */ - __Pyx_INCREF(__pyx_v_raw_headers); - __Pyx_GIVEREF(__pyx_v_raw_headers); - __Pyx_GOTREF(__pyx_v_ret->raw_headers); - __Pyx_DECREF(__pyx_v_ret->raw_headers); - __pyx_v_ret->raw_headers = __pyx_v_raw_headers; - - /* "aiohttp/_http_parser.pyx":201 - * ret.headers = headers - * ret.raw_headers = raw_headers - * ret.should_close = should_close # <<<<<<<<<<<<<< - * ret.compression = compression - * ret.upgrade = upgrade - */ - __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_should_close); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 201, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_v_ret->should_close); - __Pyx_DECREF(__pyx_v_ret->should_close); - __pyx_v_ret->should_close = __pyx_t_1; - __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":202 - * ret.raw_headers = raw_headers - * ret.should_close = should_close - * ret.compression = compression # <<<<<<<<<<<<<< - * ret.upgrade = upgrade - * ret.chunked = chunked - */ - __Pyx_INCREF(__pyx_v_compression); - __Pyx_GIVEREF(__pyx_v_compression); - __Pyx_GOTREF(__pyx_v_ret->compression); - __Pyx_DECREF(__pyx_v_ret->compression); - __pyx_v_ret->compression = __pyx_v_compression; - - /* "aiohttp/_http_parser.pyx":203 - * ret.should_close = should_close - * ret.compression = compression - * ret.upgrade = upgrade # <<<<<<<<<<<<<< - * ret.chunked = chunked - * ret.url = url - */ - __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_upgrade); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 203, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_v_ret->upgrade); - __Pyx_DECREF(__pyx_v_ret->upgrade); - __pyx_v_ret->upgrade = __pyx_t_1; - __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":204 - * ret.compression = compression - * ret.upgrade = upgrade - * ret.chunked = chunked # <<<<<<<<<<<<<< - * ret.url = url - * return ret - */ - __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_chunked); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 204, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_v_ret->chunked); - __Pyx_DECREF(__pyx_v_ret->chunked); - __pyx_v_ret->chunked = __pyx_t_1; - __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":205 - * ret.upgrade = upgrade - * ret.chunked = chunked - * ret.url = url # <<<<<<<<<<<<<< - * return ret - * - */ - __Pyx_INCREF(__pyx_v_url); - __Pyx_GIVEREF(__pyx_v_url); - __Pyx_GOTREF(__pyx_v_ret->url); - __Pyx_DECREF(__pyx_v_ret->url); - __pyx_v_ret->url = __pyx_v_url; - - /* "aiohttp/_http_parser.pyx":206 - * ret.chunked = chunked - * ret.url = url - * return ret # <<<<<<<<<<<<<< - * - * - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(((PyObject *)__pyx_v_ret)); - __pyx_r = ((PyObject *)__pyx_v_ret); - goto __pyx_L0; - - /* "aiohttp/_http_parser.pyx":184 - * return ret - * - * cdef _new_request_message(str method, # <<<<<<<<<<<<<< - * str path, - * object version, - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("aiohttp._http_parser._new_request_message", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XDECREF((PyObject *)__pyx_v_ret); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":221 - * cdef readonly object chunked - * - * def __init__(self, version, code, reason, headers, raw_headers, # <<<<<<<<<<<<<< - * should_close, compression, upgrade, chunked): - * self.version = version - */ - -/* Python wrapper */ -static int __pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ -static int __pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { - PyObject *__pyx_v_version = 0; - PyObject *__pyx_v_code = 0; - PyObject *__pyx_v_reason = 0; - PyObject *__pyx_v_headers = 0; - PyObject *__pyx_v_raw_headers = 0; - PyObject *__pyx_v_should_close = 0; - PyObject *__pyx_v_compression = 0; - PyObject *__pyx_v_upgrade = 0; - PyObject *__pyx_v_chunked = 0; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - int __pyx_r; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__init__ (wrapper)", 0); - { - static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_version,&__pyx_n_s_code,&__pyx_n_s_reason,&__pyx_n_s_headers,&__pyx_n_s_raw_headers,&__pyx_n_s_should_close,&__pyx_n_s_compression,&__pyx_n_s_upgrade,&__pyx_n_s_chunked,0}; - PyObject* values[9] = {0,0,0,0,0,0,0,0,0}; - if (unlikely(__pyx_kwds)) { - Py_ssize_t kw_args; - const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); - switch (pos_args) { - case 9: values[8] = PyTuple_GET_ITEM(__pyx_args, 8); - CYTHON_FALLTHROUGH; - case 8: values[7] = PyTuple_GET_ITEM(__pyx_args, 7); - CYTHON_FALLTHROUGH; - case 7: values[6] = PyTuple_GET_ITEM(__pyx_args, 6); - CYTHON_FALLTHROUGH; - case 6: values[5] = PyTuple_GET_ITEM(__pyx_args, 5); - CYTHON_FALLTHROUGH; - case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4); - CYTHON_FALLTHROUGH; - case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); - CYTHON_FALLTHROUGH; - case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); - CYTHON_FALLTHROUGH; - case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); - CYTHON_FALLTHROUGH; - case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = PyDict_Size(__pyx_kwds); - switch (pos_args) { - case 0: - if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_version)) != 0)) kw_args--; - else goto __pyx_L5_argtuple_error; - CYTHON_FALLTHROUGH; - case 1: - if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_code)) != 0)) kw_args--; - else { - __Pyx_RaiseArgtupleInvalid("__init__", 1, 9, 9, 1); __PYX_ERR(0, 221, __pyx_L3_error) - } - CYTHON_FALLTHROUGH; - case 2: - if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_reason)) != 0)) kw_args--; - else { - __Pyx_RaiseArgtupleInvalid("__init__", 1, 9, 9, 2); __PYX_ERR(0, 221, __pyx_L3_error) - } - CYTHON_FALLTHROUGH; - case 3: - if (likely((values[3] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_headers)) != 0)) kw_args--; - else { - __Pyx_RaiseArgtupleInvalid("__init__", 1, 9, 9, 3); __PYX_ERR(0, 221, __pyx_L3_error) - } - CYTHON_FALLTHROUGH; - case 4: - if (likely((values[4] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_raw_headers)) != 0)) kw_args--; - else { - __Pyx_RaiseArgtupleInvalid("__init__", 1, 9, 9, 4); __PYX_ERR(0, 221, __pyx_L3_error) - } - CYTHON_FALLTHROUGH; - case 5: - if (likely((values[5] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_should_close)) != 0)) kw_args--; - else { - __Pyx_RaiseArgtupleInvalid("__init__", 1, 9, 9, 5); __PYX_ERR(0, 221, __pyx_L3_error) - } - CYTHON_FALLTHROUGH; - case 6: - if (likely((values[6] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_compression)) != 0)) kw_args--; - else { - __Pyx_RaiseArgtupleInvalid("__init__", 1, 9, 9, 6); __PYX_ERR(0, 221, __pyx_L3_error) - } - CYTHON_FALLTHROUGH; - case 7: - if (likely((values[7] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_upgrade)) != 0)) kw_args--; - else { - __Pyx_RaiseArgtupleInvalid("__init__", 1, 9, 9, 7); __PYX_ERR(0, 221, __pyx_L3_error) - } - CYTHON_FALLTHROUGH; - case 8: - if (likely((values[8] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_chunked)) != 0)) kw_args--; - else { - __Pyx_RaiseArgtupleInvalid("__init__", 1, 9, 9, 8); __PYX_ERR(0, 221, __pyx_L3_error) - } - } - if (unlikely(kw_args > 0)) { - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__init__") < 0)) __PYX_ERR(0, 221, __pyx_L3_error) - } - } else if (PyTuple_GET_SIZE(__pyx_args) != 9) { - goto __pyx_L5_argtuple_error; - } else { - values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - values[1] = PyTuple_GET_ITEM(__pyx_args, 1); - values[2] = PyTuple_GET_ITEM(__pyx_args, 2); - values[3] = PyTuple_GET_ITEM(__pyx_args, 3); - values[4] = PyTuple_GET_ITEM(__pyx_args, 4); - values[5] = PyTuple_GET_ITEM(__pyx_args, 5); - values[6] = PyTuple_GET_ITEM(__pyx_args, 6); - values[7] = PyTuple_GET_ITEM(__pyx_args, 7); - values[8] = PyTuple_GET_ITEM(__pyx_args, 8); - } - __pyx_v_version = values[0]; - __pyx_v_code = values[1]; - __pyx_v_reason = values[2]; - __pyx_v_headers = values[3]; - __pyx_v_raw_headers = values[4]; - __pyx_v_should_close = values[5]; - __pyx_v_compression = values[6]; - __pyx_v_upgrade = values[7]; - __pyx_v_chunked = values[8]; - } - goto __pyx_L4_argument_unpacking_done; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("__init__", 1, 9, 9, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 221, __pyx_L3_error) - __pyx_L3_error:; - __Pyx_AddTraceback("aiohttp._http_parser.RawResponseMessage.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return -1; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage___init__(((struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *)__pyx_v_self), __pyx_v_version, __pyx_v_code, __pyx_v_reason, __pyx_v_headers, __pyx_v_raw_headers, __pyx_v_should_close, __pyx_v_compression, __pyx_v_upgrade, __pyx_v_chunked); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static int __pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage___init__(struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *__pyx_v_self, PyObject *__pyx_v_version, PyObject *__pyx_v_code, PyObject *__pyx_v_reason, PyObject *__pyx_v_headers, PyObject *__pyx_v_raw_headers, PyObject *__pyx_v_should_close, PyObject *__pyx_v_compression, PyObject *__pyx_v_upgrade, PyObject *__pyx_v_chunked) { - int __pyx_r; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - PyObject *__pyx_t_2 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__init__", 0); - - /* "aiohttp/_http_parser.pyx":223 - * def __init__(self, version, code, reason, headers, raw_headers, - * should_close, compression, upgrade, chunked): - * self.version = version # <<<<<<<<<<<<<< - * self.code = code - * self.reason = reason - */ - __Pyx_INCREF(__pyx_v_version); - __Pyx_GIVEREF(__pyx_v_version); - __Pyx_GOTREF(__pyx_v_self->version); - __Pyx_DECREF(__pyx_v_self->version); - __pyx_v_self->version = __pyx_v_version; - - /* "aiohttp/_http_parser.pyx":224 - * should_close, compression, upgrade, chunked): - * self.version = version - * self.code = code # <<<<<<<<<<<<<< - * self.reason = reason - * self.headers = headers - */ - __pyx_t_1 = __Pyx_PyInt_As_int(__pyx_v_code); if (unlikely((__pyx_t_1 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 224, __pyx_L1_error) - __pyx_v_self->code = __pyx_t_1; - - /* "aiohttp/_http_parser.pyx":225 - * self.version = version - * self.code = code - * self.reason = reason # <<<<<<<<<<<<<< - * self.headers = headers - * self.raw_headers = raw_headers - */ - if (!(likely(PyUnicode_CheckExact(__pyx_v_reason))||((__pyx_v_reason) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "unicode", Py_TYPE(__pyx_v_reason)->tp_name), 0))) __PYX_ERR(0, 225, __pyx_L1_error) - __pyx_t_2 = __pyx_v_reason; - __Pyx_INCREF(__pyx_t_2); - __Pyx_GIVEREF(__pyx_t_2); - __Pyx_GOTREF(__pyx_v_self->reason); - __Pyx_DECREF(__pyx_v_self->reason); - __pyx_v_self->reason = ((PyObject*)__pyx_t_2); - __pyx_t_2 = 0; - - /* "aiohttp/_http_parser.pyx":226 - * self.code = code - * self.reason = reason - * self.headers = headers # <<<<<<<<<<<<<< - * self.raw_headers = raw_headers - * self.should_close = should_close - */ - __Pyx_INCREF(__pyx_v_headers); - __Pyx_GIVEREF(__pyx_v_headers); - __Pyx_GOTREF(__pyx_v_self->headers); - __Pyx_DECREF(__pyx_v_self->headers); - __pyx_v_self->headers = __pyx_v_headers; - - /* "aiohttp/_http_parser.pyx":227 - * self.reason = reason - * self.headers = headers - * self.raw_headers = raw_headers # <<<<<<<<<<<<<< - * self.should_close = should_close - * self.compression = compression - */ - __Pyx_INCREF(__pyx_v_raw_headers); - __Pyx_GIVEREF(__pyx_v_raw_headers); - __Pyx_GOTREF(__pyx_v_self->raw_headers); - __Pyx_DECREF(__pyx_v_self->raw_headers); - __pyx_v_self->raw_headers = __pyx_v_raw_headers; - - /* "aiohttp/_http_parser.pyx":228 - * self.headers = headers - * self.raw_headers = raw_headers - * self.should_close = should_close # <<<<<<<<<<<<<< - * self.compression = compression - * self.upgrade = upgrade - */ - __Pyx_INCREF(__pyx_v_should_close); - __Pyx_GIVEREF(__pyx_v_should_close); - __Pyx_GOTREF(__pyx_v_self->should_close); - __Pyx_DECREF(__pyx_v_self->should_close); - __pyx_v_self->should_close = __pyx_v_should_close; - - /* "aiohttp/_http_parser.pyx":229 - * self.raw_headers = raw_headers - * self.should_close = should_close - * self.compression = compression # <<<<<<<<<<<<<< - * self.upgrade = upgrade - * self.chunked = chunked - */ - __Pyx_INCREF(__pyx_v_compression); - __Pyx_GIVEREF(__pyx_v_compression); - __Pyx_GOTREF(__pyx_v_self->compression); - __Pyx_DECREF(__pyx_v_self->compression); - __pyx_v_self->compression = __pyx_v_compression; - - /* "aiohttp/_http_parser.pyx":230 - * self.should_close = should_close - * self.compression = compression - * self.upgrade = upgrade # <<<<<<<<<<<<<< - * self.chunked = chunked - * - */ - __Pyx_INCREF(__pyx_v_upgrade); - __Pyx_GIVEREF(__pyx_v_upgrade); - __Pyx_GOTREF(__pyx_v_self->upgrade); - __Pyx_DECREF(__pyx_v_self->upgrade); - __pyx_v_self->upgrade = __pyx_v_upgrade; - - /* "aiohttp/_http_parser.pyx":231 - * self.compression = compression - * self.upgrade = upgrade - * self.chunked = chunked # <<<<<<<<<<<<<< - * - * def __repr__(self): - */ - __Pyx_INCREF(__pyx_v_chunked); - __Pyx_GIVEREF(__pyx_v_chunked); - __Pyx_GOTREF(__pyx_v_self->chunked); - __Pyx_DECREF(__pyx_v_self->chunked); - __pyx_v_self->chunked = __pyx_v_chunked; - - /* "aiohttp/_http_parser.pyx":221 - * cdef readonly object chunked - * - * def __init__(self, version, code, reason, headers, raw_headers, # <<<<<<<<<<<<<< - * should_close, compression, upgrade, chunked): - * self.version = version - */ - - /* function exit code */ - __pyx_r = 0; - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_AddTraceback("aiohttp._http_parser.RawResponseMessage.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = -1; - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":233 - * self.chunked = chunked - * - * def __repr__(self): # <<<<<<<<<<<<<< - * info = [] - * info.append(("version", self.version)) - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_3__repr__(PyObject *__pyx_v_self); /*proto*/ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_3__repr__(PyObject *__pyx_v_self) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__repr__ (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_2__repr__(((struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} -static PyObject *__pyx_gb_7aiohttp_12_http_parser_18RawResponseMessage_8__repr___2generator1(__pyx_CoroutineObject *__pyx_generator, CYTHON_UNUSED PyThreadState *__pyx_tstate, PyObject *__pyx_sent_value); /* proto */ - -/* "aiohttp/_http_parser.pyx":244 - * info.append(("upgrade", self.upgrade)) - * info.append(("chunked", self.chunked)) - * sinfo = ', '.join(name + '=' + repr(val) for name, val in info) # <<<<<<<<<<<<<< - * return '' - * - */ - -static PyObject *__pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_8__repr___genexpr(PyObject *__pyx_self) { - struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr *__pyx_cur_scope; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("genexpr", 0); - __pyx_cur_scope = (struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr *)__pyx_tp_new_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr(__pyx_ptype_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr, __pyx_empty_tuple, NULL); - if (unlikely(!__pyx_cur_scope)) { - __pyx_cur_scope = ((struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr *)Py_None); - __Pyx_INCREF(Py_None); - __PYX_ERR(0, 244, __pyx_L1_error) - } else { - __Pyx_GOTREF(__pyx_cur_scope); - } - __pyx_cur_scope->__pyx_outer_scope = (struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__ *) __pyx_self; - __Pyx_INCREF(((PyObject *)__pyx_cur_scope->__pyx_outer_scope)); - __Pyx_GIVEREF(__pyx_cur_scope->__pyx_outer_scope); - { - __pyx_CoroutineObject *gen = __Pyx_Generator_New((__pyx_coroutine_body_t) __pyx_gb_7aiohttp_12_http_parser_18RawResponseMessage_8__repr___2generator1, NULL, (PyObject *) __pyx_cur_scope, __pyx_n_s_genexpr, __pyx_n_s_repr___locals_genexpr, __pyx_n_s_aiohttp__http_parser); if (unlikely(!gen)) __PYX_ERR(0, 244, __pyx_L1_error) - __Pyx_DECREF(__pyx_cur_scope); - __Pyx_RefNannyFinishContext(); - return (PyObject *) gen; - } - - /* function exit code */ - __pyx_L1_error:; - __Pyx_AddTraceback("aiohttp._http_parser.RawResponseMessage.__repr__.genexpr", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __Pyx_DECREF(((PyObject *)__pyx_cur_scope)); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_gb_7aiohttp_12_http_parser_18RawResponseMessage_8__repr___2generator1(__pyx_CoroutineObject *__pyx_generator, CYTHON_UNUSED PyThreadState *__pyx_tstate, PyObject *__pyx_sent_value) /* generator body */ -{ - struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr *__pyx_cur_scope = ((struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr *)__pyx_generator->closure); - PyObject *__pyx_r = NULL; - PyObject *__pyx_t_1 = NULL; - Py_ssize_t __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - PyObject *__pyx_t_5 = NULL; - PyObject *__pyx_t_6 = NULL; - PyObject *(*__pyx_t_7)(PyObject *); - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("genexpr", 0); - switch (__pyx_generator->resume_label) { - case 0: goto __pyx_L3_first_run; - default: /* CPython raises the right error here */ - __Pyx_RefNannyFinishContext(); - return NULL; - } - __pyx_L3_first_run:; - if (unlikely(!__pyx_sent_value)) __PYX_ERR(0, 244, __pyx_L1_error) - __pyx_r = PyList_New(0); if (unlikely(!__pyx_r)) __PYX_ERR(0, 244, __pyx_L1_error) - __Pyx_GOTREF(__pyx_r); - if (unlikely(!__pyx_cur_scope->__pyx_outer_scope->__pyx_v_info)) { __Pyx_RaiseClosureNameError("info"); __PYX_ERR(0, 244, __pyx_L1_error) } - if (unlikely(__pyx_cur_scope->__pyx_outer_scope->__pyx_v_info == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); - __PYX_ERR(0, 244, __pyx_L1_error) - } - __pyx_t_1 = __pyx_cur_scope->__pyx_outer_scope->__pyx_v_info; __Pyx_INCREF(__pyx_t_1); __pyx_t_2 = 0; - for (;;) { - if (__pyx_t_2 >= PyList_GET_SIZE(__pyx_t_1)) break; - #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_3 = PyList_GET_ITEM(__pyx_t_1, __pyx_t_2); __Pyx_INCREF(__pyx_t_3); __pyx_t_2++; if (unlikely(0 < 0)) __PYX_ERR(0, 244, __pyx_L1_error) - #else - __pyx_t_3 = PySequence_ITEM(__pyx_t_1, __pyx_t_2); __pyx_t_2++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 244, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - #endif - if ((likely(PyTuple_CheckExact(__pyx_t_3))) || (PyList_CheckExact(__pyx_t_3))) { - PyObject* sequence = __pyx_t_3; - Py_ssize_t size = __Pyx_PySequence_SIZE(sequence); - if (unlikely(size != 2)) { - if (size > 2) __Pyx_RaiseTooManyValuesError(2); - else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); - __PYX_ERR(0, 244, __pyx_L1_error) - } - #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - if (likely(PyTuple_CheckExact(sequence))) { - __pyx_t_4 = PyTuple_GET_ITEM(sequence, 0); - __pyx_t_5 = PyTuple_GET_ITEM(sequence, 1); - } else { - __pyx_t_4 = PyList_GET_ITEM(sequence, 0); - __pyx_t_5 = PyList_GET_ITEM(sequence, 1); - } - __Pyx_INCREF(__pyx_t_4); - __Pyx_INCREF(__pyx_t_5); - #else - __pyx_t_4 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 244, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_5 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 244, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_5); - #endif - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - } else { - Py_ssize_t index = -1; - __pyx_t_6 = PyObject_GetIter(__pyx_t_3); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 244, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_6); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_7 = Py_TYPE(__pyx_t_6)->tp_iternext; - index = 0; __pyx_t_4 = __pyx_t_7(__pyx_t_6); if (unlikely(!__pyx_t_4)) goto __pyx_L6_unpacking_failed; - __Pyx_GOTREF(__pyx_t_4); - index = 1; __pyx_t_5 = __pyx_t_7(__pyx_t_6); if (unlikely(!__pyx_t_5)) goto __pyx_L6_unpacking_failed; - __Pyx_GOTREF(__pyx_t_5); - if (__Pyx_IternextUnpackEndCheck(__pyx_t_7(__pyx_t_6), 2) < 0) __PYX_ERR(0, 244, __pyx_L1_error) - __pyx_t_7 = NULL; - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - goto __pyx_L7_unpacking_done; - __pyx_L6_unpacking_failed:; - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - __pyx_t_7 = NULL; - if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index); - __PYX_ERR(0, 244, __pyx_L1_error) - __pyx_L7_unpacking_done:; - } - __Pyx_XGOTREF(__pyx_cur_scope->__pyx_v_name); - __Pyx_XDECREF_SET(__pyx_cur_scope->__pyx_v_name, __pyx_t_4); - __Pyx_GIVEREF(__pyx_t_4); - __pyx_t_4 = 0; - __Pyx_XGOTREF(__pyx_cur_scope->__pyx_v_val); - __Pyx_XDECREF_SET(__pyx_cur_scope->__pyx_v_val, __pyx_t_5); - __Pyx_GIVEREF(__pyx_t_5); - __pyx_t_5 = 0; - __pyx_t_3 = PyNumber_Add(__pyx_cur_scope->__pyx_v_name, __pyx_kp_u_); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 244, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_5 = PyObject_Repr(__pyx_cur_scope->__pyx_v_val); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 244, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_5); - __pyx_t_4 = PyNumber_Add(__pyx_t_3, __pyx_t_5); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 244, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - if (unlikely(__Pyx_ListComp_Append(__pyx_r, (PyObject*)__pyx_t_4))) __PYX_ERR(0, 244, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - } - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - CYTHON_MAYBE_UNUSED_VAR(__pyx_cur_scope); - - /* function exit code */ - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_r); __pyx_r = 0; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_XDECREF(__pyx_t_5); - __Pyx_XDECREF(__pyx_t_6); - __Pyx_AddTraceback("genexpr", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - #if !CYTHON_USE_EXC_INFO_STACK - __Pyx_Coroutine_ResetAndClearException(__pyx_generator); - #endif - __pyx_generator->resume_label = -1; - __Pyx_Coroutine_clear((PyObject*)__pyx_generator); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":233 - * self.chunked = chunked - * - * def __repr__(self): # <<<<<<<<<<<<<< - * info = [] - * info.append(("version", self.version)) - */ - -static PyObject *__pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_2__repr__(struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *__pyx_v_self) { - struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__ *__pyx_cur_scope; - PyObject *__pyx_v_sinfo = NULL; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__repr__", 0); - __pyx_cur_scope = (struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__ *)__pyx_tp_new_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__(__pyx_ptype_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__, __pyx_empty_tuple, NULL); - if (unlikely(!__pyx_cur_scope)) { - __pyx_cur_scope = ((struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__ *)Py_None); - __Pyx_INCREF(Py_None); - __PYX_ERR(0, 233, __pyx_L1_error) - } else { - __Pyx_GOTREF(__pyx_cur_scope); - } - - /* "aiohttp/_http_parser.pyx":234 - * - * def __repr__(self): - * info = [] # <<<<<<<<<<<<<< - * info.append(("version", self.version)) - * info.append(("code", self.code)) - */ - __pyx_t_1 = PyList_New(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 234, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - __pyx_cur_scope->__pyx_v_info = ((PyObject*)__pyx_t_1); - __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":235 - * def __repr__(self): - * info = [] - * info.append(("version", self.version)) # <<<<<<<<<<<<<< - * info.append(("code", self.code)) - * info.append(("reason", self.reason)) - */ - __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 235, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF(__pyx_n_u_version); - __Pyx_GIVEREF(__pyx_n_u_version); - PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_n_u_version); - __Pyx_INCREF(__pyx_v_self->version); - __Pyx_GIVEREF(__pyx_v_self->version); - PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_self->version); - __pyx_t_2 = __Pyx_PyList_Append(__pyx_cur_scope->__pyx_v_info, __pyx_t_1); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 235, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":236 - * info = [] - * info.append(("version", self.version)) - * info.append(("code", self.code)) # <<<<<<<<<<<<<< - * info.append(("reason", self.reason)) - * info.append(("headers", self.headers)) - */ - __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_self->code); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 236, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 236, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_INCREF(__pyx_n_u_code); - __Pyx_GIVEREF(__pyx_n_u_code); - PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_n_u_code); - __Pyx_GIVEREF(__pyx_t_1); - PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_t_1); - __pyx_t_1 = 0; - __pyx_t_2 = __Pyx_PyList_Append(__pyx_cur_scope->__pyx_v_info, __pyx_t_3); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 236, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "aiohttp/_http_parser.pyx":237 - * info.append(("version", self.version)) - * info.append(("code", self.code)) - * info.append(("reason", self.reason)) # <<<<<<<<<<<<<< - * info.append(("headers", self.headers)) - * info.append(("raw_headers", self.raw_headers)) - */ - __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 237, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_INCREF(__pyx_n_u_reason); - __Pyx_GIVEREF(__pyx_n_u_reason); - PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_n_u_reason); - __Pyx_INCREF(__pyx_v_self->reason); - __Pyx_GIVEREF(__pyx_v_self->reason); - PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_v_self->reason); - __pyx_t_2 = __Pyx_PyList_Append(__pyx_cur_scope->__pyx_v_info, __pyx_t_3); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 237, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "aiohttp/_http_parser.pyx":238 - * info.append(("code", self.code)) - * info.append(("reason", self.reason)) - * info.append(("headers", self.headers)) # <<<<<<<<<<<<<< - * info.append(("raw_headers", self.raw_headers)) - * info.append(("should_close", self.should_close)) - */ - __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 238, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_INCREF(__pyx_n_u_headers); - __Pyx_GIVEREF(__pyx_n_u_headers); - PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_n_u_headers); - __Pyx_INCREF(__pyx_v_self->headers); - __Pyx_GIVEREF(__pyx_v_self->headers); - PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_v_self->headers); - __pyx_t_2 = __Pyx_PyList_Append(__pyx_cur_scope->__pyx_v_info, __pyx_t_3); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 238, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "aiohttp/_http_parser.pyx":239 - * info.append(("reason", self.reason)) - * info.append(("headers", self.headers)) - * info.append(("raw_headers", self.raw_headers)) # <<<<<<<<<<<<<< - * info.append(("should_close", self.should_close)) - * info.append(("compression", self.compression)) - */ - __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 239, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_INCREF(__pyx_n_u_raw_headers); - __Pyx_GIVEREF(__pyx_n_u_raw_headers); - PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_n_u_raw_headers); - __Pyx_INCREF(__pyx_v_self->raw_headers); - __Pyx_GIVEREF(__pyx_v_self->raw_headers); - PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_v_self->raw_headers); - __pyx_t_2 = __Pyx_PyList_Append(__pyx_cur_scope->__pyx_v_info, __pyx_t_3); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 239, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "aiohttp/_http_parser.pyx":240 - * info.append(("headers", self.headers)) - * info.append(("raw_headers", self.raw_headers)) - * info.append(("should_close", self.should_close)) # <<<<<<<<<<<<<< - * info.append(("compression", self.compression)) - * info.append(("upgrade", self.upgrade)) - */ - __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 240, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_INCREF(__pyx_n_u_should_close); - __Pyx_GIVEREF(__pyx_n_u_should_close); - PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_n_u_should_close); - __Pyx_INCREF(__pyx_v_self->should_close); - __Pyx_GIVEREF(__pyx_v_self->should_close); - PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_v_self->should_close); - __pyx_t_2 = __Pyx_PyList_Append(__pyx_cur_scope->__pyx_v_info, __pyx_t_3); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 240, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "aiohttp/_http_parser.pyx":241 - * info.append(("raw_headers", self.raw_headers)) - * info.append(("should_close", self.should_close)) - * info.append(("compression", self.compression)) # <<<<<<<<<<<<<< - * info.append(("upgrade", self.upgrade)) - * info.append(("chunked", self.chunked)) - */ - __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 241, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_INCREF(__pyx_n_u_compression); - __Pyx_GIVEREF(__pyx_n_u_compression); - PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_n_u_compression); - __Pyx_INCREF(__pyx_v_self->compression); - __Pyx_GIVEREF(__pyx_v_self->compression); - PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_v_self->compression); - __pyx_t_2 = __Pyx_PyList_Append(__pyx_cur_scope->__pyx_v_info, __pyx_t_3); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 241, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "aiohttp/_http_parser.pyx":242 - * info.append(("should_close", self.should_close)) - * info.append(("compression", self.compression)) - * info.append(("upgrade", self.upgrade)) # <<<<<<<<<<<<<< - * info.append(("chunked", self.chunked)) - * sinfo = ', '.join(name + '=' + repr(val) for name, val in info) - */ - __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 242, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_INCREF(__pyx_n_u_upgrade); - __Pyx_GIVEREF(__pyx_n_u_upgrade); - PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_n_u_upgrade); - __Pyx_INCREF(__pyx_v_self->upgrade); - __Pyx_GIVEREF(__pyx_v_self->upgrade); - PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_v_self->upgrade); - __pyx_t_2 = __Pyx_PyList_Append(__pyx_cur_scope->__pyx_v_info, __pyx_t_3); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 242, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "aiohttp/_http_parser.pyx":243 - * info.append(("compression", self.compression)) - * info.append(("upgrade", self.upgrade)) - * info.append(("chunked", self.chunked)) # <<<<<<<<<<<<<< - * sinfo = ', '.join(name + '=' + repr(val) for name, val in info) - * return '' - */ - __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 243, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_INCREF(__pyx_n_u_chunked); - __Pyx_GIVEREF(__pyx_n_u_chunked); - PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_n_u_chunked); - __Pyx_INCREF(__pyx_v_self->chunked); - __Pyx_GIVEREF(__pyx_v_self->chunked); - PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_v_self->chunked); - __pyx_t_2 = __Pyx_PyList_Append(__pyx_cur_scope->__pyx_v_info, __pyx_t_3); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 243, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "aiohttp/_http_parser.pyx":244 - * info.append(("upgrade", self.upgrade)) - * info.append(("chunked", self.chunked)) - * sinfo = ', '.join(name + '=' + repr(val) for name, val in info) # <<<<<<<<<<<<<< - * return '' - * - */ - __pyx_t_3 = __pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_8__repr___genexpr(((PyObject*)__pyx_cur_scope)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 244, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = __Pyx_Generator_Next(__pyx_t_3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 244, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = PyUnicode_Join(__pyx_kp_u__2, __pyx_t_1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 244, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_v_sinfo = ((PyObject*)__pyx_t_3); - __pyx_t_3 = 0; - - /* "aiohttp/_http_parser.pyx":245 - * info.append(("chunked", self.chunked)) - * sinfo = ', '.join(name + '=' + repr(val) for name, val in info) - * return '' # <<<<<<<<<<<<<< - * - * - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_3 = __Pyx_PyUnicode_ConcatSafe(__pyx_kp_u_RawResponseMessage, __pyx_v_sinfo); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 245, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = __Pyx_PyUnicode_Concat(__pyx_t_3, __pyx_kp_u__3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 245, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* "aiohttp/_http_parser.pyx":233 - * self.chunked = chunked - * - * def __repr__(self): # <<<<<<<<<<<<<< - * info = [] - * info.append(("version", self.version)) - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("aiohttp._http_parser.RawResponseMessage.__repr__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v_sinfo); - __Pyx_DECREF(((PyObject *)__pyx_cur_scope)); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":211 - * @cython.freelist(DEFAULT_FREELIST_SIZE) - * cdef class RawResponseMessage: - * cdef readonly object version # HttpVersion # <<<<<<<<<<<<<< - * cdef readonly int code - * cdef readonly str reason - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_7version_1__get__(PyObject *__pyx_v_self); /*proto*/ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_7version_1__get__(PyObject *__pyx_v_self) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_7version___get__(((struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_7version___get__(struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__get__", 0); - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v_self->version); - __pyx_r = __pyx_v_self->version; - goto __pyx_L0; - - /* function exit code */ - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":212 - * cdef class RawResponseMessage: - * cdef readonly object version # HttpVersion - * cdef readonly int code # <<<<<<<<<<<<<< - * cdef readonly str reason - * cdef readonly object headers # CIMultiDict - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_4code_1__get__(PyObject *__pyx_v_self); /*proto*/ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_4code_1__get__(PyObject *__pyx_v_self) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_4code___get__(((struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_4code___get__(struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__get__", 0); - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_self->code); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 212, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("aiohttp._http_parser.RawResponseMessage.code.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":213 - * cdef readonly object version # HttpVersion - * cdef readonly int code - * cdef readonly str reason # <<<<<<<<<<<<<< - * cdef readonly object headers # CIMultiDict - * cdef readonly object raw_headers # tuple - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_6reason_1__get__(PyObject *__pyx_v_self); /*proto*/ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_6reason_1__get__(PyObject *__pyx_v_self) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_6reason___get__(((struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_6reason___get__(struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__get__", 0); - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v_self->reason); - __pyx_r = __pyx_v_self->reason; - goto __pyx_L0; - - /* function exit code */ - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":214 - * cdef readonly int code - * cdef readonly str reason - * cdef readonly object headers # CIMultiDict # <<<<<<<<<<<<<< - * cdef readonly object raw_headers # tuple - * cdef readonly object should_close - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_7headers_1__get__(PyObject *__pyx_v_self); /*proto*/ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_7headers_1__get__(PyObject *__pyx_v_self) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_7headers___get__(((struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_7headers___get__(struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__get__", 0); - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v_self->headers); - __pyx_r = __pyx_v_self->headers; - goto __pyx_L0; - - /* function exit code */ - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":215 - * cdef readonly str reason - * cdef readonly object headers # CIMultiDict - * cdef readonly object raw_headers # tuple # <<<<<<<<<<<<<< - * cdef readonly object should_close - * cdef readonly object compression - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_11raw_headers_1__get__(PyObject *__pyx_v_self); /*proto*/ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_11raw_headers_1__get__(PyObject *__pyx_v_self) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_11raw_headers___get__(((struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_11raw_headers___get__(struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__get__", 0); - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v_self->raw_headers); - __pyx_r = __pyx_v_self->raw_headers; - goto __pyx_L0; - - /* function exit code */ - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":216 - * cdef readonly object headers # CIMultiDict - * cdef readonly object raw_headers # tuple - * cdef readonly object should_close # <<<<<<<<<<<<<< - * cdef readonly object compression - * cdef readonly object upgrade - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_12should_close_1__get__(PyObject *__pyx_v_self); /*proto*/ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_12should_close_1__get__(PyObject *__pyx_v_self) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_12should_close___get__(((struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_12should_close___get__(struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__get__", 0); - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v_self->should_close); - __pyx_r = __pyx_v_self->should_close; - goto __pyx_L0; - - /* function exit code */ - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":217 - * cdef readonly object raw_headers # tuple - * cdef readonly object should_close - * cdef readonly object compression # <<<<<<<<<<<<<< - * cdef readonly object upgrade - * cdef readonly object chunked - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_11compression_1__get__(PyObject *__pyx_v_self); /*proto*/ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_11compression_1__get__(PyObject *__pyx_v_self) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_11compression___get__(((struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_11compression___get__(struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__get__", 0); - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v_self->compression); - __pyx_r = __pyx_v_self->compression; - goto __pyx_L0; - - /* function exit code */ - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":218 - * cdef readonly object should_close - * cdef readonly object compression - * cdef readonly object upgrade # <<<<<<<<<<<<<< - * cdef readonly object chunked - * - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_7upgrade_1__get__(PyObject *__pyx_v_self); /*proto*/ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_7upgrade_1__get__(PyObject *__pyx_v_self) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_7upgrade___get__(((struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_7upgrade___get__(struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__get__", 0); - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v_self->upgrade); - __pyx_r = __pyx_v_self->upgrade; - goto __pyx_L0; - - /* function exit code */ - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":219 - * cdef readonly object compression - * cdef readonly object upgrade - * cdef readonly object chunked # <<<<<<<<<<<<<< - * - * def __init__(self, version, code, reason, headers, raw_headers, - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_7chunked_1__get__(PyObject *__pyx_v_self); /*proto*/ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_7chunked_1__get__(PyObject *__pyx_v_self) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_7chunked___get__(((struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_7chunked___get__(struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__get__", 0); - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v_self->chunked); - __pyx_r = __pyx_v_self->chunked; - goto __pyx_L0; - - /* function exit code */ - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "(tree fragment)":1 - * def __reduce_cython__(self): # <<<<<<<<<<<<<< - * cdef tuple state - * cdef object _dict - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_5__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_5__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_4__reduce_cython__(((struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_4__reduce_cython__(struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *__pyx_v_self) { - PyObject *__pyx_v_state = 0; - PyObject *__pyx_v__dict = 0; - int __pyx_v_use_setstate; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - int __pyx_t_3; - int __pyx_t_4; - int __pyx_t_5; - PyObject *__pyx_t_6 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__reduce_cython__", 0); - - /* "(tree fragment)":5 - * cdef object _dict - * cdef bint use_setstate - * state = (self.chunked, self.code, self.compression, self.headers, self.raw_headers, self.reason, self.should_close, self.upgrade, self.version) # <<<<<<<<<<<<<< - * _dict = getattr(self, '__dict__', None) - * if _dict is not None: - */ - __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_self->code); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 5, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyTuple_New(9); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 5, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(__pyx_v_self->chunked); - __Pyx_GIVEREF(__pyx_v_self->chunked); - PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_v_self->chunked); - __Pyx_GIVEREF(__pyx_t_1); - PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_t_1); - __Pyx_INCREF(__pyx_v_self->compression); - __Pyx_GIVEREF(__pyx_v_self->compression); - PyTuple_SET_ITEM(__pyx_t_2, 2, __pyx_v_self->compression); - __Pyx_INCREF(__pyx_v_self->headers); - __Pyx_GIVEREF(__pyx_v_self->headers); - PyTuple_SET_ITEM(__pyx_t_2, 3, __pyx_v_self->headers); - __Pyx_INCREF(__pyx_v_self->raw_headers); - __Pyx_GIVEREF(__pyx_v_self->raw_headers); - PyTuple_SET_ITEM(__pyx_t_2, 4, __pyx_v_self->raw_headers); - __Pyx_INCREF(__pyx_v_self->reason); - __Pyx_GIVEREF(__pyx_v_self->reason); - PyTuple_SET_ITEM(__pyx_t_2, 5, __pyx_v_self->reason); - __Pyx_INCREF(__pyx_v_self->should_close); - __Pyx_GIVEREF(__pyx_v_self->should_close); - PyTuple_SET_ITEM(__pyx_t_2, 6, __pyx_v_self->should_close); - __Pyx_INCREF(__pyx_v_self->upgrade); - __Pyx_GIVEREF(__pyx_v_self->upgrade); - PyTuple_SET_ITEM(__pyx_t_2, 7, __pyx_v_self->upgrade); - __Pyx_INCREF(__pyx_v_self->version); - __Pyx_GIVEREF(__pyx_v_self->version); - PyTuple_SET_ITEM(__pyx_t_2, 8, __pyx_v_self->version); - __pyx_t_1 = 0; - __pyx_v_state = ((PyObject*)__pyx_t_2); - __pyx_t_2 = 0; - - /* "(tree fragment)":6 - * cdef bint use_setstate - * state = (self.chunked, self.code, self.compression, self.headers, self.raw_headers, self.reason, self.should_close, self.upgrade, self.version) - * _dict = getattr(self, '__dict__', None) # <<<<<<<<<<<<<< - * if _dict is not None: - * state += (_dict,) - */ - __pyx_t_2 = __Pyx_GetAttr3(((PyObject *)__pyx_v_self), __pyx_n_s_dict, Py_None); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 6, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_v__dict = __pyx_t_2; - __pyx_t_2 = 0; - - /* "(tree fragment)":7 - * state = (self.chunked, self.code, self.compression, self.headers, self.raw_headers, self.reason, self.should_close, self.upgrade, self.version) - * _dict = getattr(self, '__dict__', None) - * if _dict is not None: # <<<<<<<<<<<<<< - * state += (_dict,) - * use_setstate = True - */ - __pyx_t_3 = (__pyx_v__dict != Py_None); - __pyx_t_4 = (__pyx_t_3 != 0); - if (__pyx_t_4) { - - /* "(tree fragment)":8 - * _dict = getattr(self, '__dict__', None) - * if _dict is not None: - * state += (_dict,) # <<<<<<<<<<<<<< - * use_setstate = True - * else: - */ - __pyx_t_2 = PyTuple_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 8, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(__pyx_v__dict); - __Pyx_GIVEREF(__pyx_v__dict); - PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_v__dict); - __pyx_t_1 = PyNumber_InPlaceAdd(__pyx_v_state, __pyx_t_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 8, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF_SET(__pyx_v_state, ((PyObject*)__pyx_t_1)); - __pyx_t_1 = 0; - - /* "(tree fragment)":9 - * if _dict is not None: - * state += (_dict,) - * use_setstate = True # <<<<<<<<<<<<<< - * else: - * use_setstate = self.chunked is not None or self.compression is not None or self.headers is not None or self.raw_headers is not None or self.reason is not None or self.should_close is not None or self.upgrade is not None or self.version is not None - */ - __pyx_v_use_setstate = 1; - - /* "(tree fragment)":7 - * state = (self.chunked, self.code, self.compression, self.headers, self.raw_headers, self.reason, self.should_close, self.upgrade, self.version) - * _dict = getattr(self, '__dict__', None) - * if _dict is not None: # <<<<<<<<<<<<<< - * state += (_dict,) - * use_setstate = True - */ - goto __pyx_L3; - } - - /* "(tree fragment)":11 - * use_setstate = True - * else: - * use_setstate = self.chunked is not None or self.compression is not None or self.headers is not None or self.raw_headers is not None or self.reason is not None or self.should_close is not None or self.upgrade is not None or self.version is not None # <<<<<<<<<<<<<< - * if use_setstate: - * return __pyx_unpickle_RawResponseMessage, (type(self), 0xc7706dc, None), state - */ - /*else*/ { - __pyx_t_3 = (__pyx_v_self->chunked != Py_None); - __pyx_t_5 = (__pyx_t_3 != 0); - if (!__pyx_t_5) { - } else { - __pyx_t_4 = __pyx_t_5; - goto __pyx_L4_bool_binop_done; - } - __pyx_t_5 = (__pyx_v_self->compression != Py_None); - __pyx_t_3 = (__pyx_t_5 != 0); - if (!__pyx_t_3) { - } else { - __pyx_t_4 = __pyx_t_3; - goto __pyx_L4_bool_binop_done; - } - __pyx_t_3 = (__pyx_v_self->headers != Py_None); - __pyx_t_5 = (__pyx_t_3 != 0); - if (!__pyx_t_5) { - } else { - __pyx_t_4 = __pyx_t_5; - goto __pyx_L4_bool_binop_done; - } - __pyx_t_5 = (__pyx_v_self->raw_headers != Py_None); - __pyx_t_3 = (__pyx_t_5 != 0); - if (!__pyx_t_3) { - } else { - __pyx_t_4 = __pyx_t_3; - goto __pyx_L4_bool_binop_done; - } - __pyx_t_3 = (__pyx_v_self->reason != ((PyObject*)Py_None)); - __pyx_t_5 = (__pyx_t_3 != 0); - if (!__pyx_t_5) { - } else { - __pyx_t_4 = __pyx_t_5; - goto __pyx_L4_bool_binop_done; - } - __pyx_t_5 = (__pyx_v_self->should_close != Py_None); - __pyx_t_3 = (__pyx_t_5 != 0); - if (!__pyx_t_3) { - } else { - __pyx_t_4 = __pyx_t_3; - goto __pyx_L4_bool_binop_done; - } - __pyx_t_3 = (__pyx_v_self->upgrade != Py_None); - __pyx_t_5 = (__pyx_t_3 != 0); - if (!__pyx_t_5) { - } else { - __pyx_t_4 = __pyx_t_5; - goto __pyx_L4_bool_binop_done; - } - __pyx_t_5 = (__pyx_v_self->version != Py_None); - __pyx_t_3 = (__pyx_t_5 != 0); - __pyx_t_4 = __pyx_t_3; - __pyx_L4_bool_binop_done:; - __pyx_v_use_setstate = __pyx_t_4; - } - __pyx_L3:; - - /* "(tree fragment)":12 - * else: - * use_setstate = self.chunked is not None or self.compression is not None or self.headers is not None or self.raw_headers is not None or self.reason is not None or self.should_close is not None or self.upgrade is not None or self.version is not None - * if use_setstate: # <<<<<<<<<<<<<< - * return __pyx_unpickle_RawResponseMessage, (type(self), 0xc7706dc, None), state - * else: - */ - __pyx_t_4 = (__pyx_v_use_setstate != 0); - if (__pyx_t_4) { - - /* "(tree fragment)":13 - * use_setstate = self.chunked is not None or self.compression is not None or self.headers is not None or self.raw_headers is not None or self.reason is not None or self.should_close is not None or self.upgrade is not None or self.version is not None - * if use_setstate: - * return __pyx_unpickle_RawResponseMessage, (type(self), 0xc7706dc, None), state # <<<<<<<<<<<<<< - * else: - * return __pyx_unpickle_RawResponseMessage, (type(self), 0xc7706dc, state) - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_pyx_unpickle_RawResponseMessag); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyTuple_New(3); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); - __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); - PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); - __Pyx_INCREF(__pyx_int_209127132); - __Pyx_GIVEREF(__pyx_int_209127132); - PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_int_209127132); - __Pyx_INCREF(Py_None); - __Pyx_GIVEREF(Py_None); - PyTuple_SET_ITEM(__pyx_t_2, 2, Py_None); - __pyx_t_6 = PyTuple_New(3); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_6); - __Pyx_GIVEREF(__pyx_t_1); - PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_1); - __Pyx_GIVEREF(__pyx_t_2); - PyTuple_SET_ITEM(__pyx_t_6, 1, __pyx_t_2); - __Pyx_INCREF(__pyx_v_state); - __Pyx_GIVEREF(__pyx_v_state); - PyTuple_SET_ITEM(__pyx_t_6, 2, __pyx_v_state); - __pyx_t_1 = 0; - __pyx_t_2 = 0; - __pyx_r = __pyx_t_6; - __pyx_t_6 = 0; - goto __pyx_L0; - - /* "(tree fragment)":12 - * else: - * use_setstate = self.chunked is not None or self.compression is not None or self.headers is not None or self.raw_headers is not None or self.reason is not None or self.should_close is not None or self.upgrade is not None or self.version is not None - * if use_setstate: # <<<<<<<<<<<<<< - * return __pyx_unpickle_RawResponseMessage, (type(self), 0xc7706dc, None), state - * else: - */ - } - - /* "(tree fragment)":15 - * return __pyx_unpickle_RawResponseMessage, (type(self), 0xc7706dc, None), state - * else: - * return __pyx_unpickle_RawResponseMessage, (type(self), 0xc7706dc, state) # <<<<<<<<<<<<<< - * def __setstate_cython__(self, __pyx_state): - * __pyx_unpickle_RawResponseMessage__set_state(self, __pyx_state) - */ - /*else*/ { - __Pyx_XDECREF(__pyx_r); - __Pyx_GetModuleGlobalName(__pyx_t_6, __pyx_n_s_pyx_unpickle_RawResponseMessag); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_6); - __pyx_t_2 = PyTuple_New(3); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); - __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); - PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); - __Pyx_INCREF(__pyx_int_209127132); - __Pyx_GIVEREF(__pyx_int_209127132); - PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_int_209127132); - __Pyx_INCREF(__pyx_v_state); - __Pyx_GIVEREF(__pyx_v_state); - PyTuple_SET_ITEM(__pyx_t_2, 2, __pyx_v_state); - __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_GIVEREF(__pyx_t_6); - PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_t_6); - __Pyx_GIVEREF(__pyx_t_2); - PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_t_2); - __pyx_t_6 = 0; - __pyx_t_2 = 0; - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - } - - /* "(tree fragment)":1 - * def __reduce_cython__(self): # <<<<<<<<<<<<<< - * cdef tuple state - * cdef object _dict - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_6); - __Pyx_AddTraceback("aiohttp._http_parser.RawResponseMessage.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v_state); - __Pyx_XDECREF(__pyx_v__dict); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "(tree fragment)":16 - * else: - * return __pyx_unpickle_RawResponseMessage, (type(self), 0xc7706dc, state) - * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< - * __pyx_unpickle_RawResponseMessage__set_state(self, __pyx_state) - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_7__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state); /*proto*/ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_7__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_6__setstate_cython__(((struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *)__pyx_v_self), ((PyObject *)__pyx_v___pyx_state)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_12_http_parser_18RawResponseMessage_6__setstate_cython__(struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *__pyx_v_self, PyObject *__pyx_v___pyx_state) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__setstate_cython__", 0); - - /* "(tree fragment)":17 - * return __pyx_unpickle_RawResponseMessage, (type(self), 0xc7706dc, state) - * def __setstate_cython__(self, __pyx_state): - * __pyx_unpickle_RawResponseMessage__set_state(self, __pyx_state) # <<<<<<<<<<<<<< - */ - if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v___pyx_state)->tp_name), 0))) __PYX_ERR(1, 17, __pyx_L1_error) - __pyx_t_1 = __pyx_f_7aiohttp_12_http_parser___pyx_unpickle_RawResponseMessage__set_state(__pyx_v_self, ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 17, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "(tree fragment)":16 - * else: - * return __pyx_unpickle_RawResponseMessage, (type(self), 0xc7706dc, state) - * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< - * __pyx_unpickle_RawResponseMessage__set_state(self, __pyx_state) - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("aiohttp._http_parser.RawResponseMessage.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":248 - * - * - * cdef _new_response_message(object version, # <<<<<<<<<<<<<< - * int code, - * str reason, - */ - -static PyObject *__pyx_f_7aiohttp_12_http_parser__new_response_message(PyObject *__pyx_v_version, int __pyx_v_code, PyObject *__pyx_v_reason, PyObject *__pyx_v_headers, PyObject *__pyx_v_raw_headers, int __pyx_v_should_close, PyObject *__pyx_v_compression, int __pyx_v_upgrade, int __pyx_v_chunked) { - struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *__pyx_v_ret = 0; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("_new_response_message", 0); - - /* "aiohttp/_http_parser.pyx":258 - * bint chunked): - * cdef RawResponseMessage ret - * ret = RawResponseMessage.__new__(RawResponseMessage) # <<<<<<<<<<<<<< - * ret.version = version - * ret.code = code - */ - __pyx_t_1 = ((PyObject *)__pyx_tp_new_7aiohttp_12_http_parser_RawResponseMessage(((PyTypeObject *)__pyx_ptype_7aiohttp_12_http_parser_RawResponseMessage), __pyx_empty_tuple, NULL)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 258, __pyx_L1_error) - __Pyx_GOTREF(((PyObject *)__pyx_t_1)); - __pyx_v_ret = ((struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *)__pyx_t_1); - __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":259 - * cdef RawResponseMessage ret - * ret = RawResponseMessage.__new__(RawResponseMessage) - * ret.version = version # <<<<<<<<<<<<<< - * ret.code = code - * ret.reason = reason - */ - __Pyx_INCREF(__pyx_v_version); - __Pyx_GIVEREF(__pyx_v_version); - __Pyx_GOTREF(__pyx_v_ret->version); - __Pyx_DECREF(__pyx_v_ret->version); - __pyx_v_ret->version = __pyx_v_version; - - /* "aiohttp/_http_parser.pyx":260 - * ret = RawResponseMessage.__new__(RawResponseMessage) - * ret.version = version - * ret.code = code # <<<<<<<<<<<<<< - * ret.reason = reason - * ret.headers = headers - */ - __pyx_v_ret->code = __pyx_v_code; - - /* "aiohttp/_http_parser.pyx":261 - * ret.version = version - * ret.code = code - * ret.reason = reason # <<<<<<<<<<<<<< - * ret.headers = headers - * ret.raw_headers = raw_headers - */ - __Pyx_INCREF(__pyx_v_reason); - __Pyx_GIVEREF(__pyx_v_reason); - __Pyx_GOTREF(__pyx_v_ret->reason); - __Pyx_DECREF(__pyx_v_ret->reason); - __pyx_v_ret->reason = __pyx_v_reason; - - /* "aiohttp/_http_parser.pyx":262 - * ret.code = code - * ret.reason = reason - * ret.headers = headers # <<<<<<<<<<<<<< - * ret.raw_headers = raw_headers - * ret.should_close = should_close - */ - __Pyx_INCREF(__pyx_v_headers); - __Pyx_GIVEREF(__pyx_v_headers); - __Pyx_GOTREF(__pyx_v_ret->headers); - __Pyx_DECREF(__pyx_v_ret->headers); - __pyx_v_ret->headers = __pyx_v_headers; - - /* "aiohttp/_http_parser.pyx":263 - * ret.reason = reason - * ret.headers = headers - * ret.raw_headers = raw_headers # <<<<<<<<<<<<<< - * ret.should_close = should_close - * ret.compression = compression - */ - __Pyx_INCREF(__pyx_v_raw_headers); - __Pyx_GIVEREF(__pyx_v_raw_headers); - __Pyx_GOTREF(__pyx_v_ret->raw_headers); - __Pyx_DECREF(__pyx_v_ret->raw_headers); - __pyx_v_ret->raw_headers = __pyx_v_raw_headers; - - /* "aiohttp/_http_parser.pyx":264 - * ret.headers = headers - * ret.raw_headers = raw_headers - * ret.should_close = should_close # <<<<<<<<<<<<<< - * ret.compression = compression - * ret.upgrade = upgrade - */ - __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_should_close); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 264, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_v_ret->should_close); - __Pyx_DECREF(__pyx_v_ret->should_close); - __pyx_v_ret->should_close = __pyx_t_1; - __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":265 - * ret.raw_headers = raw_headers - * ret.should_close = should_close - * ret.compression = compression # <<<<<<<<<<<<<< - * ret.upgrade = upgrade - * ret.chunked = chunked - */ - __Pyx_INCREF(__pyx_v_compression); - __Pyx_GIVEREF(__pyx_v_compression); - __Pyx_GOTREF(__pyx_v_ret->compression); - __Pyx_DECREF(__pyx_v_ret->compression); - __pyx_v_ret->compression = __pyx_v_compression; - - /* "aiohttp/_http_parser.pyx":266 - * ret.should_close = should_close - * ret.compression = compression - * ret.upgrade = upgrade # <<<<<<<<<<<<<< - * ret.chunked = chunked - * return ret - */ - __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_upgrade); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 266, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_v_ret->upgrade); - __Pyx_DECREF(__pyx_v_ret->upgrade); - __pyx_v_ret->upgrade = __pyx_t_1; - __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":267 - * ret.compression = compression - * ret.upgrade = upgrade - * ret.chunked = chunked # <<<<<<<<<<<<<< - * return ret - * - */ - __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_chunked); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 267, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_v_ret->chunked); - __Pyx_DECREF(__pyx_v_ret->chunked); - __pyx_v_ret->chunked = __pyx_t_1; - __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":268 - * ret.upgrade = upgrade - * ret.chunked = chunked - * return ret # <<<<<<<<<<<<<< - * - * - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(((PyObject *)__pyx_v_ret)); - __pyx_r = ((PyObject *)__pyx_v_ret); - goto __pyx_L0; - - /* "aiohttp/_http_parser.pyx":248 - * - * - * cdef _new_response_message(object version, # <<<<<<<<<<<<<< - * int code, - * str reason, - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("aiohttp._http_parser._new_response_message", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XDECREF((PyObject *)__pyx_v_ret); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":312 - * Py_buffer py_buf - * - * def __cinit__(self): # <<<<<<<<<<<<<< - * self._cparser = \ - * PyMem_Malloc(sizeof(cparser.http_parser)) - */ - -/* Python wrapper */ -static int __pyx_pw_7aiohttp_12_http_parser_10HttpParser_1__cinit__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ -static int __pyx_pw_7aiohttp_12_http_parser_10HttpParser_1__cinit__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { - int __pyx_r; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__cinit__ (wrapper)", 0); - if (unlikely(PyTuple_GET_SIZE(__pyx_args) > 0)) { - __Pyx_RaiseArgtupleInvalid("__cinit__", 1, 0, 0, PyTuple_GET_SIZE(__pyx_args)); return -1;} - if (unlikely(__pyx_kwds) && unlikely(PyDict_Size(__pyx_kwds) > 0) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "__cinit__", 0))) return -1; - __pyx_r = __pyx_pf_7aiohttp_12_http_parser_10HttpParser___cinit__(((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static int __pyx_pf_7aiohttp_12_http_parser_10HttpParser___cinit__(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self) { - int __pyx_r; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__cinit__", 0); - - /* "aiohttp/_http_parser.pyx":313 - * - * def __cinit__(self): - * self._cparser = \ # <<<<<<<<<<<<<< - * PyMem_Malloc(sizeof(cparser.http_parser)) - * if self._cparser is NULL: - */ - __pyx_v_self->_cparser = ((struct http_parser *)PyMem_Malloc((sizeof(struct http_parser)))); - - /* "aiohttp/_http_parser.pyx":315 - * self._cparser = \ - * PyMem_Malloc(sizeof(cparser.http_parser)) - * if self._cparser is NULL: # <<<<<<<<<<<<<< - * raise MemoryError() - * - */ - __pyx_t_1 = ((__pyx_v_self->_cparser == NULL) != 0); - if (unlikely(__pyx_t_1)) { - - /* "aiohttp/_http_parser.pyx":316 - * PyMem_Malloc(sizeof(cparser.http_parser)) - * if self._cparser is NULL: - * raise MemoryError() # <<<<<<<<<<<<<< - * - * self._csettings = \ - */ - PyErr_NoMemory(); __PYX_ERR(0, 316, __pyx_L1_error) - - /* "aiohttp/_http_parser.pyx":315 - * self._cparser = \ - * PyMem_Malloc(sizeof(cparser.http_parser)) - * if self._cparser is NULL: # <<<<<<<<<<<<<< - * raise MemoryError() - * - */ - } - - /* "aiohttp/_http_parser.pyx":318 - * raise MemoryError() - * - * self._csettings = \ # <<<<<<<<<<<<<< - * PyMem_Malloc(sizeof(cparser.http_parser_settings)) - * if self._csettings is NULL: - */ - __pyx_v_self->_csettings = ((struct http_parser_settings *)PyMem_Malloc((sizeof(struct http_parser_settings)))); - - /* "aiohttp/_http_parser.pyx":320 - * self._csettings = \ - * PyMem_Malloc(sizeof(cparser.http_parser_settings)) - * if self._csettings is NULL: # <<<<<<<<<<<<<< - * raise MemoryError() - * - */ - __pyx_t_1 = ((__pyx_v_self->_csettings == NULL) != 0); - if (unlikely(__pyx_t_1)) { - - /* "aiohttp/_http_parser.pyx":321 - * PyMem_Malloc(sizeof(cparser.http_parser_settings)) - * if self._csettings is NULL: - * raise MemoryError() # <<<<<<<<<<<<<< - * - * def __dealloc__(self): - */ - PyErr_NoMemory(); __PYX_ERR(0, 321, __pyx_L1_error) - - /* "aiohttp/_http_parser.pyx":320 - * self._csettings = \ - * PyMem_Malloc(sizeof(cparser.http_parser_settings)) - * if self._csettings is NULL: # <<<<<<<<<<<<<< - * raise MemoryError() - * - */ - } - - /* "aiohttp/_http_parser.pyx":312 - * Py_buffer py_buf - * - * def __cinit__(self): # <<<<<<<<<<<<<< - * self._cparser = \ - * PyMem_Malloc(sizeof(cparser.http_parser)) - */ - - /* function exit code */ - __pyx_r = 0; - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_AddTraceback("aiohttp._http_parser.HttpParser.__cinit__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = -1; - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":323 - * raise MemoryError() - * - * def __dealloc__(self): # <<<<<<<<<<<<<< - * PyMem_Free(self._cparser) - * PyMem_Free(self._csettings) - */ - -/* Python wrapper */ -static void __pyx_pw_7aiohttp_12_http_parser_10HttpParser_3__dealloc__(PyObject *__pyx_v_self); /*proto*/ -static void __pyx_pw_7aiohttp_12_http_parser_10HttpParser_3__dealloc__(PyObject *__pyx_v_self) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__dealloc__ (wrapper)", 0); - __pyx_pf_7aiohttp_12_http_parser_10HttpParser_2__dealloc__(((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); -} - -static void __pyx_pf_7aiohttp_12_http_parser_10HttpParser_2__dealloc__(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__dealloc__", 0); - - /* "aiohttp/_http_parser.pyx":324 - * - * def __dealloc__(self): - * PyMem_Free(self._cparser) # <<<<<<<<<<<<<< - * PyMem_Free(self._csettings) - * - */ - PyMem_Free(__pyx_v_self->_cparser); - - /* "aiohttp/_http_parser.pyx":325 - * def __dealloc__(self): - * PyMem_Free(self._cparser) - * PyMem_Free(self._csettings) # <<<<<<<<<<<<<< - * - * cdef _init(self, cparser.http_parser_type mode, - */ - PyMem_Free(__pyx_v_self->_csettings); - - /* "aiohttp/_http_parser.pyx":323 - * raise MemoryError() - * - * def __dealloc__(self): # <<<<<<<<<<<<<< - * PyMem_Free(self._cparser) - * PyMem_Free(self._csettings) - */ - - /* function exit code */ - __Pyx_RefNannyFinishContext(); -} - -/* "aiohttp/_http_parser.pyx":327 - * PyMem_Free(self._csettings) - * - * cdef _init(self, cparser.http_parser_type mode, # <<<<<<<<<<<<<< - * object protocol, object loop, int limit, - * object timer=None, - */ - -static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__init(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self, enum http_parser_type __pyx_v_mode, PyObject *__pyx_v_protocol, PyObject *__pyx_v_loop, int __pyx_v_limit, struct __pyx_opt_args_7aiohttp_12_http_parser_10HttpParser__init *__pyx_optional_args) { - - /* "aiohttp/_http_parser.pyx":329 - * cdef _init(self, cparser.http_parser_type mode, - * object protocol, object loop, int limit, - * object timer=None, # <<<<<<<<<<<<<< - * size_t max_line_size=8190, size_t max_headers=32768, - * size_t max_field_size=8190, payload_exception=None, - */ - PyObject *__pyx_v_timer = ((PyObject *)Py_None); - size_t __pyx_v_max_line_size = ((size_t)0x1FFE); - size_t __pyx_v_max_headers = ((size_t)0x8000); - size_t __pyx_v_max_field_size = ((size_t)0x1FFE); - - /* "aiohttp/_http_parser.pyx":331 - * object timer=None, - * size_t max_line_size=8190, size_t max_headers=32768, - * size_t max_field_size=8190, payload_exception=None, # <<<<<<<<<<<<<< - * bint response_with_body=True, bint read_until_eof=False, - * bint auto_decompress=True): - */ - PyObject *__pyx_v_payload_exception = ((PyObject *)Py_None); - - /* "aiohttp/_http_parser.pyx":332 - * size_t max_line_size=8190, size_t max_headers=32768, - * size_t max_field_size=8190, payload_exception=None, - * bint response_with_body=True, bint read_until_eof=False, # <<<<<<<<<<<<<< - * bint auto_decompress=True): - * cparser.http_parser_init(self._cparser, mode) - */ - int __pyx_v_response_with_body = ((int)1); - int __pyx_v_read_until_eof = ((int)0); - - /* "aiohttp/_http_parser.pyx":333 - * size_t max_field_size=8190, payload_exception=None, - * bint response_with_body=True, bint read_until_eof=False, - * bint auto_decompress=True): # <<<<<<<<<<<<<< - * cparser.http_parser_init(self._cparser, mode) - * self._cparser.data = self - */ - int __pyx_v_auto_decompress = ((int)1); - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("_init", 0); - if (__pyx_optional_args) { - if (__pyx_optional_args->__pyx_n > 0) { - __pyx_v_timer = __pyx_optional_args->timer; - if (__pyx_optional_args->__pyx_n > 1) { - __pyx_v_max_line_size = __pyx_optional_args->max_line_size; - if (__pyx_optional_args->__pyx_n > 2) { - __pyx_v_max_headers = __pyx_optional_args->max_headers; - if (__pyx_optional_args->__pyx_n > 3) { - __pyx_v_max_field_size = __pyx_optional_args->max_field_size; - if (__pyx_optional_args->__pyx_n > 4) { - __pyx_v_payload_exception = __pyx_optional_args->payload_exception; - if (__pyx_optional_args->__pyx_n > 5) { - __pyx_v_response_with_body = __pyx_optional_args->response_with_body; - if (__pyx_optional_args->__pyx_n > 6) { - __pyx_v_read_until_eof = __pyx_optional_args->read_until_eof; - if (__pyx_optional_args->__pyx_n > 7) { - __pyx_v_auto_decompress = __pyx_optional_args->auto_decompress; - } - } - } - } - } - } - } - } - } - - /* "aiohttp/_http_parser.pyx":334 - * bint response_with_body=True, bint read_until_eof=False, - * bint auto_decompress=True): - * cparser.http_parser_init(self._cparser, mode) # <<<<<<<<<<<<<< - * self._cparser.data = self - * self._cparser.content_length = 0 - */ - http_parser_init(__pyx_v_self->_cparser, __pyx_v_mode); - - /* "aiohttp/_http_parser.pyx":335 - * bint auto_decompress=True): - * cparser.http_parser_init(self._cparser, mode) - * self._cparser.data = self # <<<<<<<<<<<<<< - * self._cparser.content_length = 0 - * - */ - __pyx_v_self->_cparser->data = ((void *)__pyx_v_self); - - /* "aiohttp/_http_parser.pyx":336 - * cparser.http_parser_init(self._cparser, mode) - * self._cparser.data = self - * self._cparser.content_length = 0 # <<<<<<<<<<<<<< - * - * cparser.http_parser_settings_init(self._csettings) - */ - __pyx_v_self->_cparser->content_length = 0; - - /* "aiohttp/_http_parser.pyx":338 - * self._cparser.content_length = 0 - * - * cparser.http_parser_settings_init(self._csettings) # <<<<<<<<<<<<<< - * - * self._protocol = protocol - */ - http_parser_settings_init(__pyx_v_self->_csettings); - - /* "aiohttp/_http_parser.pyx":340 - * cparser.http_parser_settings_init(self._csettings) - * - * self._protocol = protocol # <<<<<<<<<<<<<< - * self._loop = loop - * self._timer = timer - */ - __Pyx_INCREF(__pyx_v_protocol); - __Pyx_GIVEREF(__pyx_v_protocol); - __Pyx_GOTREF(__pyx_v_self->_protocol); - __Pyx_DECREF(__pyx_v_self->_protocol); - __pyx_v_self->_protocol = __pyx_v_protocol; - - /* "aiohttp/_http_parser.pyx":341 - * - * self._protocol = protocol - * self._loop = loop # <<<<<<<<<<<<<< - * self._timer = timer - * - */ - __Pyx_INCREF(__pyx_v_loop); - __Pyx_GIVEREF(__pyx_v_loop); - __Pyx_GOTREF(__pyx_v_self->_loop); - __Pyx_DECREF(__pyx_v_self->_loop); - __pyx_v_self->_loop = __pyx_v_loop; - - /* "aiohttp/_http_parser.pyx":342 - * self._protocol = protocol - * self._loop = loop - * self._timer = timer # <<<<<<<<<<<<<< - * - * self._buf = bytearray() - */ - __Pyx_INCREF(__pyx_v_timer); - __Pyx_GIVEREF(__pyx_v_timer); - __Pyx_GOTREF(__pyx_v_self->_timer); - __Pyx_DECREF(__pyx_v_self->_timer); - __pyx_v_self->_timer = __pyx_v_timer; - - /* "aiohttp/_http_parser.pyx":344 - * self._timer = timer - * - * self._buf = bytearray() # <<<<<<<<<<<<<< - * self._payload = None - * self._payload_error = 0 - */ - __pyx_t_1 = __Pyx_PyObject_CallNoArg(((PyObject *)(&PyByteArray_Type))); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 344, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_v_self->_buf); - __Pyx_DECREF(__pyx_v_self->_buf); - __pyx_v_self->_buf = ((PyObject*)__pyx_t_1); - __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":345 - * - * self._buf = bytearray() - * self._payload = None # <<<<<<<<<<<<<< - * self._payload_error = 0 - * self._payload_exception = payload_exception - */ - __Pyx_INCREF(Py_None); - __Pyx_GIVEREF(Py_None); - __Pyx_GOTREF(__pyx_v_self->_payload); - __Pyx_DECREF(__pyx_v_self->_payload); - __pyx_v_self->_payload = Py_None; - - /* "aiohttp/_http_parser.pyx":346 - * self._buf = bytearray() - * self._payload = None - * self._payload_error = 0 # <<<<<<<<<<<<<< - * self._payload_exception = payload_exception - * self._messages = [] - */ - __pyx_v_self->_payload_error = 0; - - /* "aiohttp/_http_parser.pyx":347 - * self._payload = None - * self._payload_error = 0 - * self._payload_exception = payload_exception # <<<<<<<<<<<<<< - * self._messages = [] - * - */ - __Pyx_INCREF(__pyx_v_payload_exception); - __Pyx_GIVEREF(__pyx_v_payload_exception); - __Pyx_GOTREF(__pyx_v_self->_payload_exception); - __Pyx_DECREF(__pyx_v_self->_payload_exception); - __pyx_v_self->_payload_exception = __pyx_v_payload_exception; - - /* "aiohttp/_http_parser.pyx":348 - * self._payload_error = 0 - * self._payload_exception = payload_exception - * self._messages = [] # <<<<<<<<<<<<<< - * - * self._raw_name = bytearray() - */ - __pyx_t_1 = PyList_New(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 348, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_v_self->_messages); - __Pyx_DECREF(__pyx_v_self->_messages); - __pyx_v_self->_messages = ((PyObject*)__pyx_t_1); - __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":350 - * self._messages = [] - * - * self._raw_name = bytearray() # <<<<<<<<<<<<<< - * self._raw_value = bytearray() - * self._has_value = False - */ - __pyx_t_1 = __Pyx_PyObject_CallNoArg(((PyObject *)(&PyByteArray_Type))); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 350, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_v_self->_raw_name); - __Pyx_DECREF(__pyx_v_self->_raw_name); - __pyx_v_self->_raw_name = ((PyObject*)__pyx_t_1); - __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":351 - * - * self._raw_name = bytearray() - * self._raw_value = bytearray() # <<<<<<<<<<<<<< - * self._has_value = False - * - */ - __pyx_t_1 = __Pyx_PyObject_CallNoArg(((PyObject *)(&PyByteArray_Type))); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 351, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_v_self->_raw_value); - __Pyx_DECREF(__pyx_v_self->_raw_value); - __pyx_v_self->_raw_value = ((PyObject*)__pyx_t_1); - __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":352 - * self._raw_name = bytearray() - * self._raw_value = bytearray() - * self._has_value = False # <<<<<<<<<<<<<< - * - * self._max_line_size = max_line_size - */ - __pyx_v_self->_has_value = 0; - - /* "aiohttp/_http_parser.pyx":354 - * self._has_value = False - * - * self._max_line_size = max_line_size # <<<<<<<<<<<<<< - * self._max_headers = max_headers - * self._max_field_size = max_field_size - */ - __pyx_v_self->_max_line_size = __pyx_v_max_line_size; - - /* "aiohttp/_http_parser.pyx":355 - * - * self._max_line_size = max_line_size - * self._max_headers = max_headers # <<<<<<<<<<<<<< - * self._max_field_size = max_field_size - * self._response_with_body = response_with_body - */ - __pyx_v_self->_max_headers = __pyx_v_max_headers; - - /* "aiohttp/_http_parser.pyx":356 - * self._max_line_size = max_line_size - * self._max_headers = max_headers - * self._max_field_size = max_field_size # <<<<<<<<<<<<<< - * self._response_with_body = response_with_body - * self._read_until_eof = read_until_eof - */ - __pyx_v_self->_max_field_size = __pyx_v_max_field_size; - - /* "aiohttp/_http_parser.pyx":357 - * self._max_headers = max_headers - * self._max_field_size = max_field_size - * self._response_with_body = response_with_body # <<<<<<<<<<<<<< - * self._read_until_eof = read_until_eof - * self._upgraded = False - */ - __pyx_v_self->_response_with_body = __pyx_v_response_with_body; - - /* "aiohttp/_http_parser.pyx":358 - * self._max_field_size = max_field_size - * self._response_with_body = response_with_body - * self._read_until_eof = read_until_eof # <<<<<<<<<<<<<< - * self._upgraded = False - * self._auto_decompress = auto_decompress - */ - __pyx_v_self->_read_until_eof = __pyx_v_read_until_eof; - - /* "aiohttp/_http_parser.pyx":359 - * self._response_with_body = response_with_body - * self._read_until_eof = read_until_eof - * self._upgraded = False # <<<<<<<<<<<<<< - * self._auto_decompress = auto_decompress - * self._content_encoding = None - */ - __pyx_v_self->_upgraded = 0; - - /* "aiohttp/_http_parser.pyx":360 - * self._read_until_eof = read_until_eof - * self._upgraded = False - * self._auto_decompress = auto_decompress # <<<<<<<<<<<<<< - * self._content_encoding = None - * - */ - __pyx_v_self->_auto_decompress = __pyx_v_auto_decompress; - - /* "aiohttp/_http_parser.pyx":361 - * self._upgraded = False - * self._auto_decompress = auto_decompress - * self._content_encoding = None # <<<<<<<<<<<<<< - * - * self._csettings.on_url = cb_on_url - */ - __Pyx_INCREF(Py_None); - __Pyx_GIVEREF(Py_None); - __Pyx_GOTREF(__pyx_v_self->_content_encoding); - __Pyx_DECREF(__pyx_v_self->_content_encoding); - __pyx_v_self->_content_encoding = ((PyObject*)Py_None); - - /* "aiohttp/_http_parser.pyx":363 - * self._content_encoding = None - * - * self._csettings.on_url = cb_on_url # <<<<<<<<<<<<<< - * self._csettings.on_status = cb_on_status - * self._csettings.on_header_field = cb_on_header_field - */ - __pyx_v_self->_csettings->on_url = __pyx_f_7aiohttp_12_http_parser_cb_on_url; - - /* "aiohttp/_http_parser.pyx":364 - * - * self._csettings.on_url = cb_on_url - * self._csettings.on_status = cb_on_status # <<<<<<<<<<<<<< - * self._csettings.on_header_field = cb_on_header_field - * self._csettings.on_header_value = cb_on_header_value - */ - __pyx_v_self->_csettings->on_status = __pyx_f_7aiohttp_12_http_parser_cb_on_status; - - /* "aiohttp/_http_parser.pyx":365 - * self._csettings.on_url = cb_on_url - * self._csettings.on_status = cb_on_status - * self._csettings.on_header_field = cb_on_header_field # <<<<<<<<<<<<<< - * self._csettings.on_header_value = cb_on_header_value - * self._csettings.on_headers_complete = cb_on_headers_complete - */ - __pyx_v_self->_csettings->on_header_field = __pyx_f_7aiohttp_12_http_parser_cb_on_header_field; - - /* "aiohttp/_http_parser.pyx":366 - * self._csettings.on_status = cb_on_status - * self._csettings.on_header_field = cb_on_header_field - * self._csettings.on_header_value = cb_on_header_value # <<<<<<<<<<<<<< - * self._csettings.on_headers_complete = cb_on_headers_complete - * self._csettings.on_body = cb_on_body - */ - __pyx_v_self->_csettings->on_header_value = __pyx_f_7aiohttp_12_http_parser_cb_on_header_value; - - /* "aiohttp/_http_parser.pyx":367 - * self._csettings.on_header_field = cb_on_header_field - * self._csettings.on_header_value = cb_on_header_value - * self._csettings.on_headers_complete = cb_on_headers_complete # <<<<<<<<<<<<<< - * self._csettings.on_body = cb_on_body - * self._csettings.on_message_begin = cb_on_message_begin - */ - __pyx_v_self->_csettings->on_headers_complete = __pyx_f_7aiohttp_12_http_parser_cb_on_headers_complete; - - /* "aiohttp/_http_parser.pyx":368 - * self._csettings.on_header_value = cb_on_header_value - * self._csettings.on_headers_complete = cb_on_headers_complete - * self._csettings.on_body = cb_on_body # <<<<<<<<<<<<<< - * self._csettings.on_message_begin = cb_on_message_begin - * self._csettings.on_message_complete = cb_on_message_complete - */ - __pyx_v_self->_csettings->on_body = __pyx_f_7aiohttp_12_http_parser_cb_on_body; - - /* "aiohttp/_http_parser.pyx":369 - * self._csettings.on_headers_complete = cb_on_headers_complete - * self._csettings.on_body = cb_on_body - * self._csettings.on_message_begin = cb_on_message_begin # <<<<<<<<<<<<<< - * self._csettings.on_message_complete = cb_on_message_complete - * self._csettings.on_chunk_header = cb_on_chunk_header - */ - __pyx_v_self->_csettings->on_message_begin = __pyx_f_7aiohttp_12_http_parser_cb_on_message_begin; - - /* "aiohttp/_http_parser.pyx":370 - * self._csettings.on_body = cb_on_body - * self._csettings.on_message_begin = cb_on_message_begin - * self._csettings.on_message_complete = cb_on_message_complete # <<<<<<<<<<<<<< - * self._csettings.on_chunk_header = cb_on_chunk_header - * self._csettings.on_chunk_complete = cb_on_chunk_complete - */ - __pyx_v_self->_csettings->on_message_complete = __pyx_f_7aiohttp_12_http_parser_cb_on_message_complete; - - /* "aiohttp/_http_parser.pyx":371 - * self._csettings.on_message_begin = cb_on_message_begin - * self._csettings.on_message_complete = cb_on_message_complete - * self._csettings.on_chunk_header = cb_on_chunk_header # <<<<<<<<<<<<<< - * self._csettings.on_chunk_complete = cb_on_chunk_complete - * - */ - __pyx_v_self->_csettings->on_chunk_header = __pyx_f_7aiohttp_12_http_parser_cb_on_chunk_header; - - /* "aiohttp/_http_parser.pyx":372 - * self._csettings.on_message_complete = cb_on_message_complete - * self._csettings.on_chunk_header = cb_on_chunk_header - * self._csettings.on_chunk_complete = cb_on_chunk_complete # <<<<<<<<<<<<<< - * - * self._last_error = None - */ - __pyx_v_self->_csettings->on_chunk_complete = __pyx_f_7aiohttp_12_http_parser_cb_on_chunk_complete; - - /* "aiohttp/_http_parser.pyx":374 - * self._csettings.on_chunk_complete = cb_on_chunk_complete - * - * self._last_error = None # <<<<<<<<<<<<<< - * self._limit = limit - * - */ - __Pyx_INCREF(Py_None); - __Pyx_GIVEREF(Py_None); - __Pyx_GOTREF(__pyx_v_self->_last_error); - __Pyx_DECREF(__pyx_v_self->_last_error); - __pyx_v_self->_last_error = Py_None; - - /* "aiohttp/_http_parser.pyx":375 - * - * self._last_error = None - * self._limit = limit # <<<<<<<<<<<<<< - * - * cdef _process_header(self): - */ - __pyx_v_self->_limit = __pyx_v_limit; - - /* "aiohttp/_http_parser.pyx":327 - * PyMem_Free(self._csettings) - * - * cdef _init(self, cparser.http_parser_type mode, # <<<<<<<<<<<<<< - * object protocol, object loop, int limit, - * object timer=None, - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("aiohttp._http_parser.HttpParser._init", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":377 - * self._limit = limit - * - * cdef _process_header(self): # <<<<<<<<<<<<<< - * if self._raw_name: - * raw_name = bytes(self._raw_name) - */ - -static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__process_header(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self) { - PyObject *__pyx_v_raw_name = NULL; - PyObject *__pyx_v_raw_value = NULL; - PyObject *__pyx_v_name = NULL; - PyObject *__pyx_v_value = NULL; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - int __pyx_t_5; - PyObject *__pyx_t_6 = NULL; - int __pyx_t_7; - int __pyx_t_8; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("_process_header", 0); - - /* "aiohttp/_http_parser.pyx":378 - * - * cdef _process_header(self): - * if self._raw_name: # <<<<<<<<<<<<<< - * raw_name = bytes(self._raw_name) - * raw_value = bytes(self._raw_value) - */ - __pyx_t_1 = (__pyx_v_self->_raw_name != Py_None)&&(PyByteArray_GET_SIZE(__pyx_v_self->_raw_name) != 0); - if (__pyx_t_1) { - - /* "aiohttp/_http_parser.pyx":379 - * cdef _process_header(self): - * if self._raw_name: - * raw_name = bytes(self._raw_name) # <<<<<<<<<<<<<< - * raw_value = bytes(self._raw_value) - * - */ - __pyx_t_2 = __Pyx_PyObject_CallOneArg(((PyObject *)(&PyBytes_Type)), __pyx_v_self->_raw_name); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 379, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_v_raw_name = ((PyObject*)__pyx_t_2); - __pyx_t_2 = 0; - - /* "aiohttp/_http_parser.pyx":380 - * if self._raw_name: - * raw_name = bytes(self._raw_name) - * raw_value = bytes(self._raw_value) # <<<<<<<<<<<<<< - * - * name = find_header(raw_name) - */ - __pyx_t_2 = __Pyx_PyObject_CallOneArg(((PyObject *)(&PyBytes_Type)), __pyx_v_self->_raw_value); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 380, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_v_raw_value = ((PyObject*)__pyx_t_2); - __pyx_t_2 = 0; - - /* "aiohttp/_http_parser.pyx":382 - * raw_value = bytes(self._raw_value) - * - * name = find_header(raw_name) # <<<<<<<<<<<<<< - * value = raw_value.decode('utf-8', 'surrogateescape') - * - */ - __pyx_t_2 = __pyx_f_7aiohttp_12_http_parser_find_header(__pyx_v_raw_name); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 382, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_v_name = __pyx_t_2; - __pyx_t_2 = 0; - - /* "aiohttp/_http_parser.pyx":383 - * - * name = find_header(raw_name) - * value = raw_value.decode('utf-8', 'surrogateescape') # <<<<<<<<<<<<<< - * - * self._headers.add(name, value) - */ - __pyx_t_2 = __Pyx_decode_bytes(__pyx_v_raw_value, 0, PY_SSIZE_T_MAX, NULL, ((char const *)"surrogateescape"), PyUnicode_DecodeUTF8); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 383, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_v_value = __pyx_t_2; - __pyx_t_2 = 0; - - /* "aiohttp/_http_parser.pyx":385 - * value = raw_value.decode('utf-8', 'surrogateescape') - * - * self._headers.add(name, value) # <<<<<<<<<<<<<< - * - * if name is CONTENT_ENCODING: - */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->_headers, __pyx_n_s_add); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 385, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_4 = NULL; - __pyx_t_5 = 0; - if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_3))) { - __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); - if (likely(__pyx_t_4)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); - __Pyx_INCREF(__pyx_t_4); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_3, function); - __pyx_t_5 = 1; - } - } - #if CYTHON_FAST_PYCALL - if (PyFunction_Check(__pyx_t_3)) { - PyObject *__pyx_temp[3] = {__pyx_t_4, __pyx_v_name, __pyx_v_value}; - __pyx_t_2 = __Pyx_PyFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_5, 2+__pyx_t_5); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 385, __pyx_L1_error) - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - __Pyx_GOTREF(__pyx_t_2); - } else - #endif - #if CYTHON_FAST_PYCCALL - if (__Pyx_PyFastCFunction_Check(__pyx_t_3)) { - PyObject *__pyx_temp[3] = {__pyx_t_4, __pyx_v_name, __pyx_v_value}; - __pyx_t_2 = __Pyx_PyCFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_5, 2+__pyx_t_5); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 385, __pyx_L1_error) - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - __Pyx_GOTREF(__pyx_t_2); - } else - #endif - { - __pyx_t_6 = PyTuple_New(2+__pyx_t_5); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 385, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_6); - if (__pyx_t_4) { - __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_4); __pyx_t_4 = NULL; - } - __Pyx_INCREF(__pyx_v_name); - __Pyx_GIVEREF(__pyx_v_name); - PyTuple_SET_ITEM(__pyx_t_6, 0+__pyx_t_5, __pyx_v_name); - __Pyx_INCREF(__pyx_v_value); - __Pyx_GIVEREF(__pyx_v_value); - PyTuple_SET_ITEM(__pyx_t_6, 1+__pyx_t_5, __pyx_v_value); - __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_6, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 385, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - } - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "aiohttp/_http_parser.pyx":387 - * self._headers.add(name, value) - * - * if name is CONTENT_ENCODING: # <<<<<<<<<<<<<< - * self._content_encoding = value - * - */ - __pyx_t_1 = (__pyx_v_name == __pyx_v_7aiohttp_12_http_parser_CONTENT_ENCODING); - __pyx_t_7 = (__pyx_t_1 != 0); - if (__pyx_t_7) { - - /* "aiohttp/_http_parser.pyx":388 - * - * if name is CONTENT_ENCODING: - * self._content_encoding = value # <<<<<<<<<<<<<< - * - * PyByteArray_Resize(self._raw_name, 0) - */ - if (!(likely(PyUnicode_CheckExact(__pyx_v_value))||((__pyx_v_value) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "unicode", Py_TYPE(__pyx_v_value)->tp_name), 0))) __PYX_ERR(0, 388, __pyx_L1_error) - __pyx_t_2 = __pyx_v_value; - __Pyx_INCREF(__pyx_t_2); - __Pyx_GIVEREF(__pyx_t_2); - __Pyx_GOTREF(__pyx_v_self->_content_encoding); - __Pyx_DECREF(__pyx_v_self->_content_encoding); - __pyx_v_self->_content_encoding = ((PyObject*)__pyx_t_2); - __pyx_t_2 = 0; - - /* "aiohttp/_http_parser.pyx":387 - * self._headers.add(name, value) - * - * if name is CONTENT_ENCODING: # <<<<<<<<<<<<<< - * self._content_encoding = value - * - */ - } - - /* "aiohttp/_http_parser.pyx":390 - * self._content_encoding = value - * - * PyByteArray_Resize(self._raw_name, 0) # <<<<<<<<<<<<<< - * PyByteArray_Resize(self._raw_value, 0) - * self._has_value = False - */ - __pyx_t_2 = __pyx_v_self->_raw_name; - __Pyx_INCREF(__pyx_t_2); - __pyx_t_5 = PyByteArray_Resize(__pyx_t_2, 0); if (unlikely(__pyx_t_5 == ((int)-1))) __PYX_ERR(0, 390, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "aiohttp/_http_parser.pyx":391 - * - * PyByteArray_Resize(self._raw_name, 0) - * PyByteArray_Resize(self._raw_value, 0) # <<<<<<<<<<<<<< - * self._has_value = False - * self._raw_headers.append((raw_name, raw_value)) - */ - __pyx_t_2 = __pyx_v_self->_raw_value; - __Pyx_INCREF(__pyx_t_2); - __pyx_t_5 = PyByteArray_Resize(__pyx_t_2, 0); if (unlikely(__pyx_t_5 == ((int)-1))) __PYX_ERR(0, 391, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "aiohttp/_http_parser.pyx":392 - * PyByteArray_Resize(self._raw_name, 0) - * PyByteArray_Resize(self._raw_value, 0) - * self._has_value = False # <<<<<<<<<<<<<< - * self._raw_headers.append((raw_name, raw_value)) - * - */ - __pyx_v_self->_has_value = 0; - - /* "aiohttp/_http_parser.pyx":393 - * PyByteArray_Resize(self._raw_value, 0) - * self._has_value = False - * self._raw_headers.append((raw_name, raw_value)) # <<<<<<<<<<<<<< - * - * cdef _on_header_field(self, char* at, size_t length): - */ - if (unlikely(__pyx_v_self->_raw_headers == Py_None)) { - PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "append"); - __PYX_ERR(0, 393, __pyx_L1_error) - } - __pyx_t_2 = PyTuple_New(2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 393, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(__pyx_v_raw_name); - __Pyx_GIVEREF(__pyx_v_raw_name); - PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_v_raw_name); - __Pyx_INCREF(__pyx_v_raw_value); - __Pyx_GIVEREF(__pyx_v_raw_value); - PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_v_raw_value); - __pyx_t_8 = __Pyx_PyList_Append(__pyx_v_self->_raw_headers, __pyx_t_2); if (unlikely(__pyx_t_8 == ((int)-1))) __PYX_ERR(0, 393, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "aiohttp/_http_parser.pyx":378 - * - * cdef _process_header(self): - * if self._raw_name: # <<<<<<<<<<<<<< - * raw_name = bytes(self._raw_name) - * raw_value = bytes(self._raw_value) - */ - } - - /* "aiohttp/_http_parser.pyx":377 - * self._limit = limit - * - * cdef _process_header(self): # <<<<<<<<<<<<<< - * if self._raw_name: - * raw_name = bytes(self._raw_name) - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_XDECREF(__pyx_t_6); - __Pyx_AddTraceback("aiohttp._http_parser.HttpParser._process_header", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v_raw_name); - __Pyx_XDECREF(__pyx_v_raw_value); - __Pyx_XDECREF(__pyx_v_name); - __Pyx_XDECREF(__pyx_v_value); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":395 - * self._raw_headers.append((raw_name, raw_value)) - * - * cdef _on_header_field(self, char* at, size_t length): # <<<<<<<<<<<<<< - * cdef Py_ssize_t size - * cdef char *buf - */ - -static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_header_field(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self, char *__pyx_v_at, size_t __pyx_v_length) { - Py_ssize_t __pyx_v_size; - char *__pyx_v_buf; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - PyObject *__pyx_t_2 = NULL; - Py_ssize_t __pyx_t_3; - int __pyx_t_4; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("_on_header_field", 0); - - /* "aiohttp/_http_parser.pyx":398 - * cdef Py_ssize_t size - * cdef char *buf - * if self._has_value: # <<<<<<<<<<<<<< - * self._process_header() - * - */ - __pyx_t_1 = (__pyx_v_self->_has_value != 0); - if (__pyx_t_1) { - - /* "aiohttp/_http_parser.pyx":399 - * cdef char *buf - * if self._has_value: - * self._process_header() # <<<<<<<<<<<<<< - * - * size = PyByteArray_Size(self._raw_name) - */ - __pyx_t_2 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *)__pyx_v_self->__pyx_vtab)->_process_header(__pyx_v_self); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 399, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "aiohttp/_http_parser.pyx":398 - * cdef Py_ssize_t size - * cdef char *buf - * if self._has_value: # <<<<<<<<<<<<<< - * self._process_header() - * - */ - } - - /* "aiohttp/_http_parser.pyx":401 - * self._process_header() - * - * size = PyByteArray_Size(self._raw_name) # <<<<<<<<<<<<<< - * PyByteArray_Resize(self._raw_name, size + length) - * buf = PyByteArray_AsString(self._raw_name) - */ - __pyx_t_2 = __pyx_v_self->_raw_name; - __Pyx_INCREF(__pyx_t_2); - __pyx_t_3 = PyByteArray_Size(__pyx_t_2); if (unlikely(__pyx_t_3 == ((Py_ssize_t)-1L))) __PYX_ERR(0, 401, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_v_size = __pyx_t_3; - - /* "aiohttp/_http_parser.pyx":402 - * - * size = PyByteArray_Size(self._raw_name) - * PyByteArray_Resize(self._raw_name, size + length) # <<<<<<<<<<<<<< - * buf = PyByteArray_AsString(self._raw_name) - * memcpy(buf + size, at, length) - */ - __pyx_t_2 = __pyx_v_self->_raw_name; - __Pyx_INCREF(__pyx_t_2); - __pyx_t_4 = PyByteArray_Resize(__pyx_t_2, (__pyx_v_size + __pyx_v_length)); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(0, 402, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "aiohttp/_http_parser.pyx":403 - * size = PyByteArray_Size(self._raw_name) - * PyByteArray_Resize(self._raw_name, size + length) - * buf = PyByteArray_AsString(self._raw_name) # <<<<<<<<<<<<<< - * memcpy(buf + size, at, length) - * - */ - __pyx_t_2 = __pyx_v_self->_raw_name; - __Pyx_INCREF(__pyx_t_2); - __pyx_v_buf = PyByteArray_AsString(__pyx_t_2); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "aiohttp/_http_parser.pyx":404 - * PyByteArray_Resize(self._raw_name, size + length) - * buf = PyByteArray_AsString(self._raw_name) - * memcpy(buf + size, at, length) # <<<<<<<<<<<<<< - * - * cdef _on_header_value(self, char* at, size_t length): - */ - (void)(memcpy((__pyx_v_buf + __pyx_v_size), __pyx_v_at, __pyx_v_length)); - - /* "aiohttp/_http_parser.pyx":395 - * self._raw_headers.append((raw_name, raw_value)) - * - * cdef _on_header_field(self, char* at, size_t length): # <<<<<<<<<<<<<< - * cdef Py_ssize_t size - * cdef char *buf - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_AddTraceback("aiohttp._http_parser.HttpParser._on_header_field", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":406 - * memcpy(buf + size, at, length) - * - * cdef _on_header_value(self, char* at, size_t length): # <<<<<<<<<<<<<< - * cdef Py_ssize_t size - * cdef char *buf - */ - -static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_header_value(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self, char *__pyx_v_at, size_t __pyx_v_length) { - Py_ssize_t __pyx_v_size; - char *__pyx_v_buf; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - Py_ssize_t __pyx_t_2; - int __pyx_t_3; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("_on_header_value", 0); - - /* "aiohttp/_http_parser.pyx":410 - * cdef char *buf - * - * size = PyByteArray_Size(self._raw_value) # <<<<<<<<<<<<<< - * PyByteArray_Resize(self._raw_value, size + length) - * buf = PyByteArray_AsString(self._raw_value) - */ - __pyx_t_1 = __pyx_v_self->_raw_value; - __Pyx_INCREF(__pyx_t_1); - __pyx_t_2 = PyByteArray_Size(__pyx_t_1); if (unlikely(__pyx_t_2 == ((Py_ssize_t)-1L))) __PYX_ERR(0, 410, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_v_size = __pyx_t_2; - - /* "aiohttp/_http_parser.pyx":411 - * - * size = PyByteArray_Size(self._raw_value) - * PyByteArray_Resize(self._raw_value, size + length) # <<<<<<<<<<<<<< - * buf = PyByteArray_AsString(self._raw_value) - * memcpy(buf + size, at, length) - */ - __pyx_t_1 = __pyx_v_self->_raw_value; - __Pyx_INCREF(__pyx_t_1); - __pyx_t_3 = PyByteArray_Resize(__pyx_t_1, (__pyx_v_size + __pyx_v_length)); if (unlikely(__pyx_t_3 == ((int)-1))) __PYX_ERR(0, 411, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":412 - * size = PyByteArray_Size(self._raw_value) - * PyByteArray_Resize(self._raw_value, size + length) - * buf = PyByteArray_AsString(self._raw_value) # <<<<<<<<<<<<<< - * memcpy(buf + size, at, length) - * self._has_value = True - */ - __pyx_t_1 = __pyx_v_self->_raw_value; - __Pyx_INCREF(__pyx_t_1); - __pyx_v_buf = PyByteArray_AsString(__pyx_t_1); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":413 - * PyByteArray_Resize(self._raw_value, size + length) - * buf = PyByteArray_AsString(self._raw_value) - * memcpy(buf + size, at, length) # <<<<<<<<<<<<<< - * self._has_value = True - * - */ - (void)(memcpy((__pyx_v_buf + __pyx_v_size), __pyx_v_at, __pyx_v_length)); - - /* "aiohttp/_http_parser.pyx":414 - * buf = PyByteArray_AsString(self._raw_value) - * memcpy(buf + size, at, length) - * self._has_value = True # <<<<<<<<<<<<<< - * - * cdef _on_headers_complete(self): - */ - __pyx_v_self->_has_value = 1; - - /* "aiohttp/_http_parser.pyx":406 - * memcpy(buf + size, at, length) - * - * cdef _on_header_value(self, char* at, size_t length): # <<<<<<<<<<<<<< - * cdef Py_ssize_t size - * cdef char *buf - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("aiohttp._http_parser.HttpParser._on_header_value", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":416 - * self._has_value = True - * - * cdef _on_headers_complete(self): # <<<<<<<<<<<<<< - * self._process_header() - * - */ - -static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_headers_complete(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self) { - PyObject *__pyx_v_method = NULL; - int __pyx_v_should_close; - unsigned int __pyx_v_upgrade; - unsigned int __pyx_v_chunked; - PyObject *__pyx_v_raw_headers = NULL; - PyObject *__pyx_v_headers = NULL; - PyObject *__pyx_v_encoding = NULL; - PyObject *__pyx_v_enc = NULL; - PyObject *__pyx_v_msg = NULL; - PyObject *__pyx_v_payload = NULL; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - unsigned int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - int __pyx_t_5; - int __pyx_t_6; - PyObject *__pyx_t_7 = NULL; - int __pyx_t_8; - int __pyx_t_9; - int __pyx_t_10; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("_on_headers_complete", 0); - - /* "aiohttp/_http_parser.pyx":417 - * - * cdef _on_headers_complete(self): - * self._process_header() # <<<<<<<<<<<<<< - * - * method = http_method_str(self._cparser.method) - */ - __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *)__pyx_v_self->__pyx_vtab)->_process_header(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 417, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":419 - * self._process_header() - * - * method = http_method_str(self._cparser.method) # <<<<<<<<<<<<<< - * should_close = not cparser.http_should_keep_alive(self._cparser) - * upgrade = self._cparser.upgrade - */ - __pyx_t_1 = __pyx_f_7aiohttp_12_http_parser_http_method_str(__pyx_v_self->_cparser->method); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 419, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_v_method = ((PyObject*)__pyx_t_1); - __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":420 - * - * method = http_method_str(self._cparser.method) - * should_close = not cparser.http_should_keep_alive(self._cparser) # <<<<<<<<<<<<<< - * upgrade = self._cparser.upgrade - * chunked = self._cparser.flags & cparser.F_CHUNKED - */ - __pyx_v_should_close = (!(http_should_keep_alive(__pyx_v_self->_cparser) != 0)); - - /* "aiohttp/_http_parser.pyx":421 - * method = http_method_str(self._cparser.method) - * should_close = not cparser.http_should_keep_alive(self._cparser) - * upgrade = self._cparser.upgrade # <<<<<<<<<<<<<< - * chunked = self._cparser.flags & cparser.F_CHUNKED - * - */ - __pyx_t_2 = __pyx_v_self->_cparser->upgrade; - __pyx_v_upgrade = __pyx_t_2; - - /* "aiohttp/_http_parser.pyx":422 - * should_close = not cparser.http_should_keep_alive(self._cparser) - * upgrade = self._cparser.upgrade - * chunked = self._cparser.flags & cparser.F_CHUNKED # <<<<<<<<<<<<<< - * - * raw_headers = tuple(self._raw_headers) - */ - __pyx_v_chunked = (__pyx_v_self->_cparser->flags & F_CHUNKED); - - /* "aiohttp/_http_parser.pyx":424 - * chunked = self._cparser.flags & cparser.F_CHUNKED - * - * raw_headers = tuple(self._raw_headers) # <<<<<<<<<<<<<< - * headers = CIMultiDictProxy(self._headers) - * - */ - if (unlikely(__pyx_v_self->_raw_headers == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); - __PYX_ERR(0, 424, __pyx_L1_error) - } - __pyx_t_1 = PyList_AsTuple(__pyx_v_self->_raw_headers); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 424, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_v_raw_headers = ((PyObject*)__pyx_t_1); - __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":425 - * - * raw_headers = tuple(self._raw_headers) - * headers = CIMultiDictProxy(self._headers) # <<<<<<<<<<<<<< - * - * if upgrade or self._cparser.method == 5: # cparser.CONNECT: - */ - __Pyx_INCREF(__pyx_v_7aiohttp_12_http_parser_CIMultiDictProxy); - __pyx_t_3 = __pyx_v_7aiohttp_12_http_parser_CIMultiDictProxy; __pyx_t_4 = NULL; - if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) { - __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); - if (likely(__pyx_t_4)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); - __Pyx_INCREF(__pyx_t_4); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_3, function); - } - } - __pyx_t_1 = (__pyx_t_4) ? __Pyx_PyObject_Call2Args(__pyx_t_3, __pyx_t_4, __pyx_v_self->_headers) : __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_v_self->_headers); - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 425, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_v_headers = __pyx_t_1; - __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":427 - * headers = CIMultiDictProxy(self._headers) - * - * if upgrade or self._cparser.method == 5: # cparser.CONNECT: # <<<<<<<<<<<<<< - * self._upgraded = True - * - */ - __pyx_t_6 = (__pyx_v_upgrade != 0); - if (!__pyx_t_6) { - } else { - __pyx_t_5 = __pyx_t_6; - goto __pyx_L4_bool_binop_done; - } - __pyx_t_6 = ((__pyx_v_self->_cparser->method == 5) != 0); - __pyx_t_5 = __pyx_t_6; - __pyx_L4_bool_binop_done:; - if (__pyx_t_5) { - - /* "aiohttp/_http_parser.pyx":428 - * - * if upgrade or self._cparser.method == 5: # cparser.CONNECT: - * self._upgraded = True # <<<<<<<<<<<<<< - * - * # do not support old websocket spec - */ - __pyx_v_self->_upgraded = 1; - - /* "aiohttp/_http_parser.pyx":427 - * headers = CIMultiDictProxy(self._headers) - * - * if upgrade or self._cparser.method == 5: # cparser.CONNECT: # <<<<<<<<<<<<<< - * self._upgraded = True - * - */ - } - - /* "aiohttp/_http_parser.pyx":431 - * - * # do not support old websocket spec - * if SEC_WEBSOCKET_KEY1 in headers: # <<<<<<<<<<<<<< - * raise InvalidHeader(SEC_WEBSOCKET_KEY1) - * - */ - __pyx_t_5 = (__Pyx_PySequence_ContainsTF(__pyx_v_7aiohttp_12_http_parser_SEC_WEBSOCKET_KEY1, __pyx_v_headers, Py_EQ)); if (unlikely(__pyx_t_5 < 0)) __PYX_ERR(0, 431, __pyx_L1_error) - __pyx_t_6 = (__pyx_t_5 != 0); - if (unlikely(__pyx_t_6)) { - - /* "aiohttp/_http_parser.pyx":432 - * # do not support old websocket spec - * if SEC_WEBSOCKET_KEY1 in headers: - * raise InvalidHeader(SEC_WEBSOCKET_KEY1) # <<<<<<<<<<<<<< - * - * encoding = None - */ - __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_InvalidHeader); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 432, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_4 = NULL; - if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) { - __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); - if (likely(__pyx_t_4)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); - __Pyx_INCREF(__pyx_t_4); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_3, function); - } - } - __pyx_t_1 = (__pyx_t_4) ? __Pyx_PyObject_Call2Args(__pyx_t_3, __pyx_t_4, __pyx_v_7aiohttp_12_http_parser_SEC_WEBSOCKET_KEY1) : __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_v_7aiohttp_12_http_parser_SEC_WEBSOCKET_KEY1); - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 432, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_Raise(__pyx_t_1, 0, 0, 0); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __PYX_ERR(0, 432, __pyx_L1_error) - - /* "aiohttp/_http_parser.pyx":431 - * - * # do not support old websocket spec - * if SEC_WEBSOCKET_KEY1 in headers: # <<<<<<<<<<<<<< - * raise InvalidHeader(SEC_WEBSOCKET_KEY1) - * - */ - } - - /* "aiohttp/_http_parser.pyx":434 - * raise InvalidHeader(SEC_WEBSOCKET_KEY1) - * - * encoding = None # <<<<<<<<<<<<<< - * enc = self._content_encoding - * if enc is not None: - */ - __Pyx_INCREF(Py_None); - __pyx_v_encoding = Py_None; - - /* "aiohttp/_http_parser.pyx":435 - * - * encoding = None - * enc = self._content_encoding # <<<<<<<<<<<<<< - * if enc is not None: - * self._content_encoding = None - */ - __pyx_t_1 = __pyx_v_self->_content_encoding; - __Pyx_INCREF(__pyx_t_1); - __pyx_v_enc = __pyx_t_1; - __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":436 - * encoding = None - * enc = self._content_encoding - * if enc is not None: # <<<<<<<<<<<<<< - * self._content_encoding = None - * enc = enc.lower() - */ - __pyx_t_6 = (__pyx_v_enc != Py_None); - __pyx_t_5 = (__pyx_t_6 != 0); - if (__pyx_t_5) { - - /* "aiohttp/_http_parser.pyx":437 - * enc = self._content_encoding - * if enc is not None: - * self._content_encoding = None # <<<<<<<<<<<<<< - * enc = enc.lower() - * if enc in ('gzip', 'deflate', 'br'): - */ - __Pyx_INCREF(Py_None); - __Pyx_GIVEREF(Py_None); - __Pyx_GOTREF(__pyx_v_self->_content_encoding); - __Pyx_DECREF(__pyx_v_self->_content_encoding); - __pyx_v_self->_content_encoding = ((PyObject*)Py_None); - - /* "aiohttp/_http_parser.pyx":438 - * if enc is not None: - * self._content_encoding = None - * enc = enc.lower() # <<<<<<<<<<<<<< - * if enc in ('gzip', 'deflate', 'br'): - * encoding = enc - */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_enc, __pyx_n_s_lower); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 438, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_4 = NULL; - if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_3))) { - __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); - if (likely(__pyx_t_4)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); - __Pyx_INCREF(__pyx_t_4); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_3, function); - } - } - __pyx_t_1 = (__pyx_t_4) ? __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_t_4) : __Pyx_PyObject_CallNoArg(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 438, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF_SET(__pyx_v_enc, __pyx_t_1); - __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":439 - * self._content_encoding = None - * enc = enc.lower() - * if enc in ('gzip', 'deflate', 'br'): # <<<<<<<<<<<<<< - * encoding = enc - * - */ - __Pyx_INCREF(__pyx_v_enc); - __pyx_t_1 = __pyx_v_enc; - __pyx_t_6 = (__Pyx_PyUnicode_Equals(__pyx_t_1, __pyx_n_u_gzip, Py_EQ)); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(0, 439, __pyx_L1_error) - if (!__pyx_t_6) { - } else { - __pyx_t_5 = __pyx_t_6; - goto __pyx_L9_bool_binop_done; - } - __pyx_t_6 = (__Pyx_PyUnicode_Equals(__pyx_t_1, __pyx_n_u_deflate, Py_EQ)); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(0, 439, __pyx_L1_error) - if (!__pyx_t_6) { - } else { - __pyx_t_5 = __pyx_t_6; - goto __pyx_L9_bool_binop_done; - } - __pyx_t_6 = (__Pyx_PyUnicode_Equals(__pyx_t_1, __pyx_n_u_br, Py_EQ)); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(0, 439, __pyx_L1_error) - __pyx_t_5 = __pyx_t_6; - __pyx_L9_bool_binop_done:; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_6 = (__pyx_t_5 != 0); - if (__pyx_t_6) { - - /* "aiohttp/_http_parser.pyx":440 - * enc = enc.lower() - * if enc in ('gzip', 'deflate', 'br'): - * encoding = enc # <<<<<<<<<<<<<< - * - * if self._cparser.type == cparser.HTTP_REQUEST: - */ - __Pyx_INCREF(__pyx_v_enc); - __Pyx_DECREF_SET(__pyx_v_encoding, __pyx_v_enc); - - /* "aiohttp/_http_parser.pyx":439 - * self._content_encoding = None - * enc = enc.lower() - * if enc in ('gzip', 'deflate', 'br'): # <<<<<<<<<<<<<< - * encoding = enc - * - */ - } - - /* "aiohttp/_http_parser.pyx":436 - * encoding = None - * enc = self._content_encoding - * if enc is not None: # <<<<<<<<<<<<<< - * self._content_encoding = None - * enc = enc.lower() - */ - } - - /* "aiohttp/_http_parser.pyx":442 - * encoding = enc - * - * if self._cparser.type == cparser.HTTP_REQUEST: # <<<<<<<<<<<<<< - * msg = _new_request_message( - * method, self._path, - */ - __pyx_t_6 = ((__pyx_v_self->_cparser->type == HTTP_REQUEST) != 0); - if (__pyx_t_6) { - - /* "aiohttp/_http_parser.pyx":444 - * if self._cparser.type == cparser.HTTP_REQUEST: - * msg = _new_request_message( - * method, self._path, # <<<<<<<<<<<<<< - * self.http_version(), headers, raw_headers, - * should_close, encoding, upgrade, chunked, self._url) - */ - __pyx_t_1 = __pyx_v_self->_path; - __Pyx_INCREF(__pyx_t_1); - - /* "aiohttp/_http_parser.pyx":445 - * msg = _new_request_message( - * method, self._path, - * self.http_version(), headers, raw_headers, # <<<<<<<<<<<<<< - * should_close, encoding, upgrade, chunked, self._url) - * else: - */ - __pyx_t_3 = __pyx_f_7aiohttp_12_http_parser_10HttpParser_http_version(__pyx_v_self); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 445, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - - /* "aiohttp/_http_parser.pyx":446 - * method, self._path, - * self.http_version(), headers, raw_headers, - * should_close, encoding, upgrade, chunked, self._url) # <<<<<<<<<<<<<< - * else: - * msg = _new_response_message( - */ - __pyx_t_4 = __pyx_v_self->_url; - __Pyx_INCREF(__pyx_t_4); - - /* "aiohttp/_http_parser.pyx":443 - * - * if self._cparser.type == cparser.HTTP_REQUEST: - * msg = _new_request_message( # <<<<<<<<<<<<<< - * method, self._path, - * self.http_version(), headers, raw_headers, - */ - __pyx_t_7 = __pyx_f_7aiohttp_12_http_parser__new_request_message(__pyx_v_method, ((PyObject*)__pyx_t_1), __pyx_t_3, __pyx_v_headers, __pyx_v_raw_headers, __pyx_v_should_close, __pyx_v_encoding, __pyx_v_upgrade, __pyx_v_chunked, __pyx_t_4); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 443, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_7); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __pyx_v_msg = __pyx_t_7; - __pyx_t_7 = 0; - - /* "aiohttp/_http_parser.pyx":442 - * encoding = enc - * - * if self._cparser.type == cparser.HTTP_REQUEST: # <<<<<<<<<<<<<< - * msg = _new_request_message( - * method, self._path, - */ - goto __pyx_L12; - } - - /* "aiohttp/_http_parser.pyx":448 - * should_close, encoding, upgrade, chunked, self._url) - * else: - * msg = _new_response_message( # <<<<<<<<<<<<<< - * self.http_version(), self._cparser.status_code, self._reason, - * headers, raw_headers, should_close, encoding, - */ - /*else*/ { - - /* "aiohttp/_http_parser.pyx":449 - * else: - * msg = _new_response_message( - * self.http_version(), self._cparser.status_code, self._reason, # <<<<<<<<<<<<<< - * headers, raw_headers, should_close, encoding, - * upgrade, chunked) - */ - __pyx_t_7 = __pyx_f_7aiohttp_12_http_parser_10HttpParser_http_version(__pyx_v_self); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 449, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_7); - __pyx_t_4 = __pyx_v_self->_reason; - __Pyx_INCREF(__pyx_t_4); - - /* "aiohttp/_http_parser.pyx":448 - * should_close, encoding, upgrade, chunked, self._url) - * else: - * msg = _new_response_message( # <<<<<<<<<<<<<< - * self.http_version(), self._cparser.status_code, self._reason, - * headers, raw_headers, should_close, encoding, - */ - __pyx_t_3 = __pyx_f_7aiohttp_12_http_parser__new_response_message(__pyx_t_7, __pyx_v_self->_cparser->status_code, ((PyObject*)__pyx_t_4), __pyx_v_headers, __pyx_v_raw_headers, __pyx_v_should_close, __pyx_v_encoding, __pyx_v_upgrade, __pyx_v_chunked); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 448, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __pyx_v_msg = __pyx_t_3; - __pyx_t_3 = 0; - } - __pyx_L12:; - - /* "aiohttp/_http_parser.pyx":453 - * upgrade, chunked) - * - * if (ULLONG_MAX > self._cparser.content_length > 0 or chunked or # <<<<<<<<<<<<<< - * self._cparser.method == 5 or # CONNECT: 5 - * (self._cparser.status_code >= 199 and - */ - __pyx_t_5 = (ULLONG_MAX > __pyx_v_self->_cparser->content_length); - if (__pyx_t_5) { - __pyx_t_5 = (__pyx_v_self->_cparser->content_length > 0); - } - __pyx_t_8 = (__pyx_t_5 != 0); - if (!__pyx_t_8) { - } else { - __pyx_t_6 = __pyx_t_8; - goto __pyx_L14_bool_binop_done; - } - __pyx_t_8 = (__pyx_v_chunked != 0); - if (!__pyx_t_8) { - } else { - __pyx_t_6 = __pyx_t_8; - goto __pyx_L14_bool_binop_done; - } - - /* "aiohttp/_http_parser.pyx":454 - * - * if (ULLONG_MAX > self._cparser.content_length > 0 or chunked or - * self._cparser.method == 5 or # CONNECT: 5 # <<<<<<<<<<<<<< - * (self._cparser.status_code >= 199 and - * self._cparser.content_length == ULLONG_MAX and - */ - __pyx_t_8 = ((__pyx_v_self->_cparser->method == 5) != 0); - if (!__pyx_t_8) { - } else { - __pyx_t_6 = __pyx_t_8; - goto __pyx_L14_bool_binop_done; - } - - /* "aiohttp/_http_parser.pyx":455 - * if (ULLONG_MAX > self._cparser.content_length > 0 or chunked or - * self._cparser.method == 5 or # CONNECT: 5 - * (self._cparser.status_code >= 199 and # <<<<<<<<<<<<<< - * self._cparser.content_length == ULLONG_MAX and - * self._read_until_eof) - */ - __pyx_t_8 = ((__pyx_v_self->_cparser->status_code >= 0xC7) != 0); - if (__pyx_t_8) { - } else { - __pyx_t_6 = __pyx_t_8; - goto __pyx_L14_bool_binop_done; - } - - /* "aiohttp/_http_parser.pyx":456 - * self._cparser.method == 5 or # CONNECT: 5 - * (self._cparser.status_code >= 199 and - * self._cparser.content_length == ULLONG_MAX and # <<<<<<<<<<<<<< - * self._read_until_eof) - * ): - */ - __pyx_t_8 = ((__pyx_v_self->_cparser->content_length == ULLONG_MAX) != 0); - if (__pyx_t_8) { - } else { - __pyx_t_6 = __pyx_t_8; - goto __pyx_L14_bool_binop_done; - } - - /* "aiohttp/_http_parser.pyx":457 - * (self._cparser.status_code >= 199 and - * self._cparser.content_length == ULLONG_MAX and - * self._read_until_eof) # <<<<<<<<<<<<<< - * ): - * payload = StreamReader( - */ - __pyx_t_8 = (__pyx_v_self->_read_until_eof != 0); - __pyx_t_6 = __pyx_t_8; - __pyx_L14_bool_binop_done:; - - /* "aiohttp/_http_parser.pyx":453 - * upgrade, chunked) - * - * if (ULLONG_MAX > self._cparser.content_length > 0 or chunked or # <<<<<<<<<<<<<< - * self._cparser.method == 5 or # CONNECT: 5 - * (self._cparser.status_code >= 199 and - */ - if (__pyx_t_6) { - - /* "aiohttp/_http_parser.pyx":459 - * self._read_until_eof) - * ): - * payload = StreamReader( # <<<<<<<<<<<<<< - * self._protocol, timer=self._timer, loop=self._loop, - * limit=self._limit) - */ - __pyx_t_3 = PyTuple_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 459, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_INCREF(__pyx_v_self->_protocol); - __Pyx_GIVEREF(__pyx_v_self->_protocol); - PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_v_self->_protocol); - - /* "aiohttp/_http_parser.pyx":460 - * ): - * payload = StreamReader( - * self._protocol, timer=self._timer, loop=self._loop, # <<<<<<<<<<<<<< - * limit=self._limit) - * else: - */ - __pyx_t_4 = __Pyx_PyDict_NewPresized(3); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 460, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - if (PyDict_SetItem(__pyx_t_4, __pyx_n_s_timer, __pyx_v_self->_timer) < 0) __PYX_ERR(0, 460, __pyx_L1_error) - if (PyDict_SetItem(__pyx_t_4, __pyx_n_s_loop, __pyx_v_self->_loop) < 0) __PYX_ERR(0, 460, __pyx_L1_error) - - /* "aiohttp/_http_parser.pyx":461 - * payload = StreamReader( - * self._protocol, timer=self._timer, loop=self._loop, - * limit=self._limit) # <<<<<<<<<<<<<< - * else: - * payload = EMPTY_PAYLOAD - */ - __pyx_t_7 = __Pyx_PyInt_From_int(__pyx_v_self->_limit); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 461, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_7); - if (PyDict_SetItem(__pyx_t_4, __pyx_n_s_limit, __pyx_t_7) < 0) __PYX_ERR(0, 460, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - - /* "aiohttp/_http_parser.pyx":459 - * self._read_until_eof) - * ): - * payload = StreamReader( # <<<<<<<<<<<<<< - * self._protocol, timer=self._timer, loop=self._loop, - * limit=self._limit) - */ - __pyx_t_7 = __Pyx_PyObject_Call(__pyx_v_7aiohttp_12_http_parser_StreamReader, __pyx_t_3, __pyx_t_4); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 459, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_7); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __pyx_v_payload = __pyx_t_7; - __pyx_t_7 = 0; - - /* "aiohttp/_http_parser.pyx":453 - * upgrade, chunked) - * - * if (ULLONG_MAX > self._cparser.content_length > 0 or chunked or # <<<<<<<<<<<<<< - * self._cparser.method == 5 or # CONNECT: 5 - * (self._cparser.status_code >= 199 and - */ - goto __pyx_L13; - } - - /* "aiohttp/_http_parser.pyx":463 - * limit=self._limit) - * else: - * payload = EMPTY_PAYLOAD # <<<<<<<<<<<<<< - * - * self._payload = payload - */ - /*else*/ { - __Pyx_INCREF(__pyx_v_7aiohttp_12_http_parser_EMPTY_PAYLOAD); - __pyx_v_payload = __pyx_v_7aiohttp_12_http_parser_EMPTY_PAYLOAD; - } - __pyx_L13:; - - /* "aiohttp/_http_parser.pyx":465 - * payload = EMPTY_PAYLOAD - * - * self._payload = payload # <<<<<<<<<<<<<< - * if encoding is not None and self._auto_decompress: - * self._payload = DeflateBuffer(payload, encoding) - */ - __Pyx_INCREF(__pyx_v_payload); - __Pyx_GIVEREF(__pyx_v_payload); - __Pyx_GOTREF(__pyx_v_self->_payload); - __Pyx_DECREF(__pyx_v_self->_payload); - __pyx_v_self->_payload = __pyx_v_payload; - - /* "aiohttp/_http_parser.pyx":466 - * - * self._payload = payload - * if encoding is not None and self._auto_decompress: # <<<<<<<<<<<<<< - * self._payload = DeflateBuffer(payload, encoding) - * - */ - __pyx_t_8 = (__pyx_v_encoding != Py_None); - __pyx_t_5 = (__pyx_t_8 != 0); - if (__pyx_t_5) { - } else { - __pyx_t_6 = __pyx_t_5; - goto __pyx_L21_bool_binop_done; - } - __pyx_t_5 = (__pyx_v_self->_auto_decompress != 0); - __pyx_t_6 = __pyx_t_5; - __pyx_L21_bool_binop_done:; - if (__pyx_t_6) { - - /* "aiohttp/_http_parser.pyx":467 - * self._payload = payload - * if encoding is not None and self._auto_decompress: - * self._payload = DeflateBuffer(payload, encoding) # <<<<<<<<<<<<<< - * - * if not self._response_with_body: - */ - __Pyx_INCREF(__pyx_v_7aiohttp_12_http_parser_DeflateBuffer); - __pyx_t_4 = __pyx_v_7aiohttp_12_http_parser_DeflateBuffer; __pyx_t_3 = NULL; - __pyx_t_9 = 0; - if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_4))) { - __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_4); - if (likely(__pyx_t_3)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); - __Pyx_INCREF(__pyx_t_3); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_4, function); - __pyx_t_9 = 1; - } - } - #if CYTHON_FAST_PYCALL - if (PyFunction_Check(__pyx_t_4)) { - PyObject *__pyx_temp[3] = {__pyx_t_3, __pyx_v_payload, __pyx_v_encoding}; - __pyx_t_7 = __Pyx_PyFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_9, 2+__pyx_t_9); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 467, __pyx_L1_error) - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_GOTREF(__pyx_t_7); - } else - #endif - #if CYTHON_FAST_PYCCALL - if (__Pyx_PyFastCFunction_Check(__pyx_t_4)) { - PyObject *__pyx_temp[3] = {__pyx_t_3, __pyx_v_payload, __pyx_v_encoding}; - __pyx_t_7 = __Pyx_PyCFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_9, 2+__pyx_t_9); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 467, __pyx_L1_error) - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_GOTREF(__pyx_t_7); - } else - #endif - { - __pyx_t_1 = PyTuple_New(2+__pyx_t_9); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 467, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (__pyx_t_3) { - __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_t_3); __pyx_t_3 = NULL; - } - __Pyx_INCREF(__pyx_v_payload); - __Pyx_GIVEREF(__pyx_v_payload); - PyTuple_SET_ITEM(__pyx_t_1, 0+__pyx_t_9, __pyx_v_payload); - __Pyx_INCREF(__pyx_v_encoding); - __Pyx_GIVEREF(__pyx_v_encoding); - PyTuple_SET_ITEM(__pyx_t_1, 1+__pyx_t_9, __pyx_v_encoding); - __pyx_t_7 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_1, NULL); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 467, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_7); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - } - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __Pyx_GIVEREF(__pyx_t_7); - __Pyx_GOTREF(__pyx_v_self->_payload); - __Pyx_DECREF(__pyx_v_self->_payload); - __pyx_v_self->_payload = __pyx_t_7; - __pyx_t_7 = 0; - - /* "aiohttp/_http_parser.pyx":466 - * - * self._payload = payload - * if encoding is not None and self._auto_decompress: # <<<<<<<<<<<<<< - * self._payload = DeflateBuffer(payload, encoding) - * - */ - } - - /* "aiohttp/_http_parser.pyx":469 - * self._payload = DeflateBuffer(payload, encoding) - * - * if not self._response_with_body: # <<<<<<<<<<<<<< - * payload = EMPTY_PAYLOAD - * - */ - __pyx_t_6 = ((!(__pyx_v_self->_response_with_body != 0)) != 0); - if (__pyx_t_6) { - - /* "aiohttp/_http_parser.pyx":470 - * - * if not self._response_with_body: - * payload = EMPTY_PAYLOAD # <<<<<<<<<<<<<< - * - * self._messages.append((msg, payload)) - */ - __Pyx_INCREF(__pyx_v_7aiohttp_12_http_parser_EMPTY_PAYLOAD); - __Pyx_DECREF_SET(__pyx_v_payload, __pyx_v_7aiohttp_12_http_parser_EMPTY_PAYLOAD); - - /* "aiohttp/_http_parser.pyx":469 - * self._payload = DeflateBuffer(payload, encoding) - * - * if not self._response_with_body: # <<<<<<<<<<<<<< - * payload = EMPTY_PAYLOAD - * - */ - } - - /* "aiohttp/_http_parser.pyx":472 - * payload = EMPTY_PAYLOAD - * - * self._messages.append((msg, payload)) # <<<<<<<<<<<<<< - * - * cdef _on_message_complete(self): - */ - if (unlikely(__pyx_v_self->_messages == Py_None)) { - PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "append"); - __PYX_ERR(0, 472, __pyx_L1_error) - } - __pyx_t_7 = PyTuple_New(2); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 472, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_7); - __Pyx_INCREF(__pyx_v_msg); - __Pyx_GIVEREF(__pyx_v_msg); - PyTuple_SET_ITEM(__pyx_t_7, 0, __pyx_v_msg); - __Pyx_INCREF(__pyx_v_payload); - __Pyx_GIVEREF(__pyx_v_payload); - PyTuple_SET_ITEM(__pyx_t_7, 1, __pyx_v_payload); - __pyx_t_10 = __Pyx_PyList_Append(__pyx_v_self->_messages, __pyx_t_7); if (unlikely(__pyx_t_10 == ((int)-1))) __PYX_ERR(0, 472, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - - /* "aiohttp/_http_parser.pyx":416 - * self._has_value = True - * - * cdef _on_headers_complete(self): # <<<<<<<<<<<<<< - * self._process_header() - * - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_XDECREF(__pyx_t_7); - __Pyx_AddTraceback("aiohttp._http_parser.HttpParser._on_headers_complete", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v_method); - __Pyx_XDECREF(__pyx_v_raw_headers); - __Pyx_XDECREF(__pyx_v_headers); - __Pyx_XDECREF(__pyx_v_encoding); - __Pyx_XDECREF(__pyx_v_enc); - __Pyx_XDECREF(__pyx_v_msg); - __Pyx_XDECREF(__pyx_v_payload); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":474 - * self._messages.append((msg, payload)) - * - * cdef _on_message_complete(self): # <<<<<<<<<<<<<< - * self._payload.feed_eof() - * self._payload = None - */ - -static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_message_complete(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("_on_message_complete", 0); - - /* "aiohttp/_http_parser.pyx":475 - * - * cdef _on_message_complete(self): - * self._payload.feed_eof() # <<<<<<<<<<<<<< - * self._payload = None - * - */ - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->_payload, __pyx_n_s_feed_eof); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 475, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = NULL; - if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_3)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_3); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - } - } - __pyx_t_1 = (__pyx_t_3) ? __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_3) : __Pyx_PyObject_CallNoArg(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 475, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":476 - * cdef _on_message_complete(self): - * self._payload.feed_eof() - * self._payload = None # <<<<<<<<<<<<<< - * - * cdef _on_chunk_header(self): - */ - __Pyx_INCREF(Py_None); - __Pyx_GIVEREF(Py_None); - __Pyx_GOTREF(__pyx_v_self->_payload); - __Pyx_DECREF(__pyx_v_self->_payload); - __pyx_v_self->_payload = Py_None; - - /* "aiohttp/_http_parser.pyx":474 - * self._messages.append((msg, payload)) - * - * cdef _on_message_complete(self): # <<<<<<<<<<<<<< - * self._payload.feed_eof() - * self._payload = None - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("aiohttp._http_parser.HttpParser._on_message_complete", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":478 - * self._payload = None - * - * cdef _on_chunk_header(self): # <<<<<<<<<<<<<< - * self._payload.begin_http_chunk_receiving() - * - */ - -static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_chunk_header(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("_on_chunk_header", 0); - - /* "aiohttp/_http_parser.pyx":479 - * - * cdef _on_chunk_header(self): - * self._payload.begin_http_chunk_receiving() # <<<<<<<<<<<<<< - * - * cdef _on_chunk_complete(self): - */ - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->_payload, __pyx_n_s_begin_http_chunk_receiving); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 479, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = NULL; - if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_3)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_3); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - } - } - __pyx_t_1 = (__pyx_t_3) ? __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_3) : __Pyx_PyObject_CallNoArg(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 479, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":478 - * self._payload = None - * - * cdef _on_chunk_header(self): # <<<<<<<<<<<<<< - * self._payload.begin_http_chunk_receiving() - * - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("aiohttp._http_parser.HttpParser._on_chunk_header", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":481 - * self._payload.begin_http_chunk_receiving() - * - * cdef _on_chunk_complete(self): # <<<<<<<<<<<<<< - * self._payload.end_http_chunk_receiving() - * - */ - -static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_chunk_complete(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("_on_chunk_complete", 0); - - /* "aiohttp/_http_parser.pyx":482 - * - * cdef _on_chunk_complete(self): - * self._payload.end_http_chunk_receiving() # <<<<<<<<<<<<<< - * - * cdef object _on_status_complete(self): - */ - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->_payload, __pyx_n_s_end_http_chunk_receiving); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 482, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = NULL; - if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_3)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_3); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - } - } - __pyx_t_1 = (__pyx_t_3) ? __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_3) : __Pyx_PyObject_CallNoArg(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 482, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":481 - * self._payload.begin_http_chunk_receiving() - * - * cdef _on_chunk_complete(self): # <<<<<<<<<<<<<< - * self._payload.end_http_chunk_receiving() - * - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("aiohttp._http_parser.HttpParser._on_chunk_complete", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":484 - * self._payload.end_http_chunk_receiving() - * - * cdef object _on_status_complete(self): # <<<<<<<<<<<<<< - * pass - * - */ - -static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_status_complete(CYTHON_UNUSED struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("_on_status_complete", 0); - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":487 - * pass - * - * cdef inline http_version(self): # <<<<<<<<<<<<<< - * cdef cparser.http_parser* parser = self._cparser - * - */ - -static CYTHON_INLINE PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser_http_version(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self) { - struct http_parser *__pyx_v_parser; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - struct http_parser *__pyx_t_1; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - PyObject *__pyx_t_5 = NULL; - PyObject *__pyx_t_6 = NULL; - PyObject *__pyx_t_7 = NULL; - int __pyx_t_8; - PyObject *__pyx_t_9 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("http_version", 0); - - /* "aiohttp/_http_parser.pyx":488 - * - * cdef inline http_version(self): - * cdef cparser.http_parser* parser = self._cparser # <<<<<<<<<<<<<< - * - * if parser.http_major == 1: - */ - __pyx_t_1 = __pyx_v_self->_cparser; - __pyx_v_parser = __pyx_t_1; - - /* "aiohttp/_http_parser.pyx":490 - * cdef cparser.http_parser* parser = self._cparser - * - * if parser.http_major == 1: # <<<<<<<<<<<<<< - * if parser.http_minor == 0: - * return HttpVersion10 - */ - __pyx_t_2 = ((__pyx_v_parser->http_major == 1) != 0); - if (__pyx_t_2) { - - /* "aiohttp/_http_parser.pyx":491 - * - * if parser.http_major == 1: - * if parser.http_minor == 0: # <<<<<<<<<<<<<< - * return HttpVersion10 - * elif parser.http_minor == 1: - */ - switch (__pyx_v_parser->http_minor) { - case 0: - - /* "aiohttp/_http_parser.pyx":492 - * if parser.http_major == 1: - * if parser.http_minor == 0: - * return HttpVersion10 # <<<<<<<<<<<<<< - * elif parser.http_minor == 1: - * return HttpVersion11 - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v_7aiohttp_12_http_parser_HttpVersion10); - __pyx_r = __pyx_v_7aiohttp_12_http_parser_HttpVersion10; - goto __pyx_L0; - - /* "aiohttp/_http_parser.pyx":491 - * - * if parser.http_major == 1: - * if parser.http_minor == 0: # <<<<<<<<<<<<<< - * return HttpVersion10 - * elif parser.http_minor == 1: - */ - break; - case 1: - - /* "aiohttp/_http_parser.pyx":494 - * return HttpVersion10 - * elif parser.http_minor == 1: - * return HttpVersion11 # <<<<<<<<<<<<<< - * - * return HttpVersion(parser.http_major, parser.http_minor) - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v_7aiohttp_12_http_parser_HttpVersion11); - __pyx_r = __pyx_v_7aiohttp_12_http_parser_HttpVersion11; - goto __pyx_L0; - - /* "aiohttp/_http_parser.pyx":493 - * if parser.http_minor == 0: - * return HttpVersion10 - * elif parser.http_minor == 1: # <<<<<<<<<<<<<< - * return HttpVersion11 - * - */ - break; - default: break; - } - - /* "aiohttp/_http_parser.pyx":490 - * cdef cparser.http_parser* parser = self._cparser - * - * if parser.http_major == 1: # <<<<<<<<<<<<<< - * if parser.http_minor == 0: - * return HttpVersion10 - */ - } - - /* "aiohttp/_http_parser.pyx":496 - * return HttpVersion11 - * - * return HttpVersion(parser.http_major, parser.http_minor) # <<<<<<<<<<<<<< - * - * ### Public API ### - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_4 = __Pyx_PyInt_From_unsigned_short(__pyx_v_parser->http_major); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 496, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_5 = __Pyx_PyInt_From_unsigned_short(__pyx_v_parser->http_minor); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 496, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_5); - __Pyx_INCREF(__pyx_v_7aiohttp_12_http_parser_HttpVersion); - __pyx_t_6 = __pyx_v_7aiohttp_12_http_parser_HttpVersion; __pyx_t_7 = NULL; - __pyx_t_8 = 0; - if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_6))) { - __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_6); - if (likely(__pyx_t_7)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); - __Pyx_INCREF(__pyx_t_7); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_6, function); - __pyx_t_8 = 1; - } - } - #if CYTHON_FAST_PYCALL - if (PyFunction_Check(__pyx_t_6)) { - PyObject *__pyx_temp[3] = {__pyx_t_7, __pyx_t_4, __pyx_t_5}; - __pyx_t_3 = __Pyx_PyFunction_FastCall(__pyx_t_6, __pyx_temp+1-__pyx_t_8, 2+__pyx_t_8); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 496, __pyx_L1_error) - __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - } else - #endif - #if CYTHON_FAST_PYCCALL - if (__Pyx_PyFastCFunction_Check(__pyx_t_6)) { - PyObject *__pyx_temp[3] = {__pyx_t_7, __pyx_t_4, __pyx_t_5}; - __pyx_t_3 = __Pyx_PyCFunction_FastCall(__pyx_t_6, __pyx_temp+1-__pyx_t_8, 2+__pyx_t_8); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 496, __pyx_L1_error) - __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - } else - #endif - { - __pyx_t_9 = PyTuple_New(2+__pyx_t_8); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 496, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_9); - if (__pyx_t_7) { - __Pyx_GIVEREF(__pyx_t_7); PyTuple_SET_ITEM(__pyx_t_9, 0, __pyx_t_7); __pyx_t_7 = NULL; - } - __Pyx_GIVEREF(__pyx_t_4); - PyTuple_SET_ITEM(__pyx_t_9, 0+__pyx_t_8, __pyx_t_4); - __Pyx_GIVEREF(__pyx_t_5); - PyTuple_SET_ITEM(__pyx_t_9, 1+__pyx_t_8, __pyx_t_5); - __pyx_t_4 = 0; - __pyx_t_5 = 0; - __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_6, __pyx_t_9, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 496, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; - } - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - __pyx_r = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L0; - - /* "aiohttp/_http_parser.pyx":487 - * pass - * - * cdef inline http_version(self): # <<<<<<<<<<<<<< - * cdef cparser.http_parser* parser = self._cparser - * - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_XDECREF(__pyx_t_5); - __Pyx_XDECREF(__pyx_t_6); - __Pyx_XDECREF(__pyx_t_7); - __Pyx_XDECREF(__pyx_t_9); - __Pyx_AddTraceback("aiohttp._http_parser.HttpParser.http_version", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":500 - * ### Public API ### - * - * def feed_eof(self): # <<<<<<<<<<<<<< - * cdef bytes desc - * - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_10HttpParser_5feed_eof(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_10HttpParser_5feed_eof(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("feed_eof (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_12_http_parser_10HttpParser_4feed_eof(((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_4feed_eof(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self) { - PyObject *__pyx_v_desc = 0; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - PyObject *__pyx_t_5 = NULL; - PyObject *__pyx_t_6 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("feed_eof", 0); - - /* "aiohttp/_http_parser.pyx":503 - * cdef bytes desc - * - * if self._payload is not None: # <<<<<<<<<<<<<< - * if self._cparser.flags & cparser.F_CHUNKED: - * raise TransferEncodingError( - */ - __pyx_t_1 = (__pyx_v_self->_payload != Py_None); - __pyx_t_2 = (__pyx_t_1 != 0); - if (__pyx_t_2) { - - /* "aiohttp/_http_parser.pyx":504 - * - * if self._payload is not None: - * if self._cparser.flags & cparser.F_CHUNKED: # <<<<<<<<<<<<<< - * raise TransferEncodingError( - * "Not enough data for satisfy transfer length header.") - */ - __pyx_t_2 = ((__pyx_v_self->_cparser->flags & F_CHUNKED) != 0); - if (unlikely(__pyx_t_2)) { - - /* "aiohttp/_http_parser.pyx":505 - * if self._payload is not None: - * if self._cparser.flags & cparser.F_CHUNKED: - * raise TransferEncodingError( # <<<<<<<<<<<<<< - * "Not enough data for satisfy transfer length header.") - * elif self._cparser.flags & cparser.F_CONTENTLENGTH: - */ - __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_TransferEncodingError); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 505, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_5 = NULL; - if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_4))) { - __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_4); - if (likely(__pyx_t_5)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); - __Pyx_INCREF(__pyx_t_5); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_4, function); - } - } - __pyx_t_3 = (__pyx_t_5) ? __Pyx_PyObject_Call2Args(__pyx_t_4, __pyx_t_5, __pyx_kp_u_Not_enough_data_for_satisfy_tran) : __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_kp_u_Not_enough_data_for_satisfy_tran); - __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; - if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 505, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __Pyx_Raise(__pyx_t_3, 0, 0, 0); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __PYX_ERR(0, 505, __pyx_L1_error) - - /* "aiohttp/_http_parser.pyx":504 - * - * if self._payload is not None: - * if self._cparser.flags & cparser.F_CHUNKED: # <<<<<<<<<<<<<< - * raise TransferEncodingError( - * "Not enough data for satisfy transfer length header.") - */ - } - - /* "aiohttp/_http_parser.pyx":507 - * raise TransferEncodingError( - * "Not enough data for satisfy transfer length header.") - * elif self._cparser.flags & cparser.F_CONTENTLENGTH: # <<<<<<<<<<<<<< - * raise ContentLengthError( - * "Not enough data for satisfy content length header.") - */ - __pyx_t_2 = ((__pyx_v_self->_cparser->flags & F_CONTENTLENGTH) != 0); - if (unlikely(__pyx_t_2)) { - - /* "aiohttp/_http_parser.pyx":508 - * "Not enough data for satisfy transfer length header.") - * elif self._cparser.flags & cparser.F_CONTENTLENGTH: - * raise ContentLengthError( # <<<<<<<<<<<<<< - * "Not enough data for satisfy content length header.") - * elif self._cparser.http_errno != cparser.HPE_OK: - */ - __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_ContentLengthError); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 508, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_5 = NULL; - if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_4))) { - __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_4); - if (likely(__pyx_t_5)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); - __Pyx_INCREF(__pyx_t_5); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_4, function); - } - } - __pyx_t_3 = (__pyx_t_5) ? __Pyx_PyObject_Call2Args(__pyx_t_4, __pyx_t_5, __pyx_kp_u_Not_enough_data_for_satisfy_cont) : __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_kp_u_Not_enough_data_for_satisfy_cont); - __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; - if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 508, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __Pyx_Raise(__pyx_t_3, 0, 0, 0); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __PYX_ERR(0, 508, __pyx_L1_error) - - /* "aiohttp/_http_parser.pyx":507 - * raise TransferEncodingError( - * "Not enough data for satisfy transfer length header.") - * elif self._cparser.flags & cparser.F_CONTENTLENGTH: # <<<<<<<<<<<<<< - * raise ContentLengthError( - * "Not enough data for satisfy content length header.") - */ - } - - /* "aiohttp/_http_parser.pyx":510 - * raise ContentLengthError( - * "Not enough data for satisfy content length header.") - * elif self._cparser.http_errno != cparser.HPE_OK: # <<<<<<<<<<<<<< - * desc = cparser.http_errno_description( - * self._cparser.http_errno) - */ - __pyx_t_2 = ((__pyx_v_self->_cparser->http_errno != HPE_OK) != 0); - if (unlikely(__pyx_t_2)) { - - /* "aiohttp/_http_parser.pyx":511 - * "Not enough data for satisfy content length header.") - * elif self._cparser.http_errno != cparser.HPE_OK: - * desc = cparser.http_errno_description( # <<<<<<<<<<<<<< - * self._cparser.http_errno) - * raise PayloadEncodingError(desc.decode('latin-1')) - */ - __pyx_t_3 = __Pyx_PyBytes_FromString(http_errno_description(((enum http_errno)__pyx_v_self->_cparser->http_errno))); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 511, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_v_desc = ((PyObject*)__pyx_t_3); - __pyx_t_3 = 0; - - /* "aiohttp/_http_parser.pyx":513 - * desc = cparser.http_errno_description( - * self._cparser.http_errno) - * raise PayloadEncodingError(desc.decode('latin-1')) # <<<<<<<<<<<<<< - * else: - * self._payload.feed_eof() - */ - __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_PayloadEncodingError); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 513, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_5 = __Pyx_decode_bytes(__pyx_v_desc, 0, PY_SSIZE_T_MAX, NULL, NULL, PyUnicode_DecodeLatin1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 513, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_5); - __pyx_t_6 = NULL; - if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_4))) { - __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_4); - if (likely(__pyx_t_6)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); - __Pyx_INCREF(__pyx_t_6); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_4, function); - } - } - __pyx_t_3 = (__pyx_t_6) ? __Pyx_PyObject_Call2Args(__pyx_t_4, __pyx_t_6, __pyx_t_5) : __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_t_5); - __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; - __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 513, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __Pyx_Raise(__pyx_t_3, 0, 0, 0); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __PYX_ERR(0, 513, __pyx_L1_error) - - /* "aiohttp/_http_parser.pyx":510 - * raise ContentLengthError( - * "Not enough data for satisfy content length header.") - * elif self._cparser.http_errno != cparser.HPE_OK: # <<<<<<<<<<<<<< - * desc = cparser.http_errno_description( - * self._cparser.http_errno) - */ - } - - /* "aiohttp/_http_parser.pyx":515 - * raise PayloadEncodingError(desc.decode('latin-1')) - * else: - * self._payload.feed_eof() # <<<<<<<<<<<<<< - * elif self._started: - * self._on_headers_complete() - */ - /*else*/ { - __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->_payload, __pyx_n_s_feed_eof); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 515, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_5 = NULL; - if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) { - __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_4); - if (likely(__pyx_t_5)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); - __Pyx_INCREF(__pyx_t_5); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_4, function); - } - } - __pyx_t_3 = (__pyx_t_5) ? __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_t_5) : __Pyx_PyObject_CallNoArg(__pyx_t_4); - __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; - if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 515, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - } - - /* "aiohttp/_http_parser.pyx":503 - * cdef bytes desc - * - * if self._payload is not None: # <<<<<<<<<<<<<< - * if self._cparser.flags & cparser.F_CHUNKED: - * raise TransferEncodingError( - */ - goto __pyx_L3; - } - - /* "aiohttp/_http_parser.pyx":516 - * else: - * self._payload.feed_eof() - * elif self._started: # <<<<<<<<<<<<<< - * self._on_headers_complete() - * if self._messages: - */ - __pyx_t_2 = (__pyx_v_self->_started != 0); - if (__pyx_t_2) { - - /* "aiohttp/_http_parser.pyx":517 - * self._payload.feed_eof() - * elif self._started: - * self._on_headers_complete() # <<<<<<<<<<<<<< - * if self._messages: - * return self._messages[-1][0] - */ - __pyx_t_3 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *)__pyx_v_self->__pyx_vtab)->_on_headers_complete(__pyx_v_self); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 517, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "aiohttp/_http_parser.pyx":518 - * elif self._started: - * self._on_headers_complete() - * if self._messages: # <<<<<<<<<<<<<< - * return self._messages[-1][0] - * - */ - __pyx_t_2 = (__pyx_v_self->_messages != Py_None)&&(PyList_GET_SIZE(__pyx_v_self->_messages) != 0); - if (__pyx_t_2) { - - /* "aiohttp/_http_parser.pyx":519 - * self._on_headers_complete() - * if self._messages: - * return self._messages[-1][0] # <<<<<<<<<<<<<< - * - * def feed_data(self, data): - */ - __Pyx_XDECREF(__pyx_r); - if (unlikely(__pyx_v_self->_messages == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(0, 519, __pyx_L1_error) - } - __pyx_t_3 = __Pyx_GetItemInt_List(__pyx_v_self->_messages, -1L, long, 1, __Pyx_PyInt_From_long, 1, 1, 1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 519, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_4 = __Pyx_GetItemInt(__pyx_t_3, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 519, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_r = __pyx_t_4; - __pyx_t_4 = 0; - goto __pyx_L0; - - /* "aiohttp/_http_parser.pyx":518 - * elif self._started: - * self._on_headers_complete() - * if self._messages: # <<<<<<<<<<<<<< - * return self._messages[-1][0] - * - */ - } - - /* "aiohttp/_http_parser.pyx":516 - * else: - * self._payload.feed_eof() - * elif self._started: # <<<<<<<<<<<<<< - * self._on_headers_complete() - * if self._messages: - */ - } - __pyx_L3:; - - /* "aiohttp/_http_parser.pyx":500 - * ### Public API ### - * - * def feed_eof(self): # <<<<<<<<<<<<<< - * cdef bytes desc - * - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_XDECREF(__pyx_t_5); - __Pyx_XDECREF(__pyx_t_6); - __Pyx_AddTraceback("aiohttp._http_parser.HttpParser.feed_eof", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v_desc); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":521 - * return self._messages[-1][0] - * - * def feed_data(self, data): # <<<<<<<<<<<<<< - * cdef: - * size_t data_len - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_10HttpParser_7feed_data(PyObject *__pyx_v_self, PyObject *__pyx_v_data); /*proto*/ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_10HttpParser_7feed_data(PyObject *__pyx_v_self, PyObject *__pyx_v_data) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("feed_data (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_12_http_parser_10HttpParser_6feed_data(((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_v_self), ((PyObject *)__pyx_v_data)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_6feed_data(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self, PyObject *__pyx_v_data) { - size_t __pyx_v_data_len; - size_t __pyx_v_nb; - PyObject *__pyx_v_ex = NULL; - PyObject *__pyx_v_messages = NULL; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - int __pyx_t_2; - int __pyx_t_3; - PyObject *__pyx_t_4 = NULL; - PyObject *__pyx_t_5 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("feed_data", 0); - - /* "aiohttp/_http_parser.pyx":526 - * size_t nb - * - * PyObject_GetBuffer(data, &self.py_buf, PyBUF_SIMPLE) # <<<<<<<<<<<<<< - * data_len = self.py_buf.len - * - */ - __pyx_t_1 = PyObject_GetBuffer(__pyx_v_data, (&__pyx_v_self->py_buf), PyBUF_SIMPLE); if (unlikely(__pyx_t_1 == ((int)-1))) __PYX_ERR(0, 526, __pyx_L1_error) - - /* "aiohttp/_http_parser.pyx":527 - * - * PyObject_GetBuffer(data, &self.py_buf, PyBUF_SIMPLE) - * data_len = self.py_buf.len # <<<<<<<<<<<<<< - * - * nb = cparser.http_parser_execute( - */ - __pyx_v_data_len = ((size_t)__pyx_v_self->py_buf.len); - - /* "aiohttp/_http_parser.pyx":529 - * data_len = self.py_buf.len - * - * nb = cparser.http_parser_execute( # <<<<<<<<<<<<<< - * self._cparser, - * self._csettings, - */ - __pyx_v_nb = http_parser_execute(__pyx_v_self->_cparser, __pyx_v_self->_csettings, ((char *)__pyx_v_self->py_buf.buf), __pyx_v_data_len); - - /* "aiohttp/_http_parser.pyx":535 - * data_len) - * - * PyBuffer_Release(&self.py_buf) # <<<<<<<<<<<<<< - * - * if (self._cparser.http_errno != cparser.HPE_OK): - */ - PyBuffer_Release((&__pyx_v_self->py_buf)); - - /* "aiohttp/_http_parser.pyx":537 - * PyBuffer_Release(&self.py_buf) - * - * if (self._cparser.http_errno != cparser.HPE_OK): # <<<<<<<<<<<<<< - * if self._payload_error == 0: - * if self._last_error is not None: - */ - __pyx_t_2 = ((__pyx_v_self->_cparser->http_errno != HPE_OK) != 0); - if (__pyx_t_2) { - - /* "aiohttp/_http_parser.pyx":538 - * - * if (self._cparser.http_errno != cparser.HPE_OK): - * if self._payload_error == 0: # <<<<<<<<<<<<<< - * if self._last_error is not None: - * ex = self._last_error - */ - __pyx_t_2 = ((__pyx_v_self->_payload_error == 0) != 0); - if (__pyx_t_2) { - - /* "aiohttp/_http_parser.pyx":539 - * if (self._cparser.http_errno != cparser.HPE_OK): - * if self._payload_error == 0: - * if self._last_error is not None: # <<<<<<<<<<<<<< - * ex = self._last_error - * self._last_error = None - */ - __pyx_t_2 = (__pyx_v_self->_last_error != Py_None); - __pyx_t_3 = (__pyx_t_2 != 0); - if (__pyx_t_3) { - - /* "aiohttp/_http_parser.pyx":540 - * if self._payload_error == 0: - * if self._last_error is not None: - * ex = self._last_error # <<<<<<<<<<<<<< - * self._last_error = None - * else: - */ - __pyx_t_4 = __pyx_v_self->_last_error; - __Pyx_INCREF(__pyx_t_4); - __pyx_v_ex = __pyx_t_4; - __pyx_t_4 = 0; - - /* "aiohttp/_http_parser.pyx":541 - * if self._last_error is not None: - * ex = self._last_error - * self._last_error = None # <<<<<<<<<<<<<< - * else: - * ex = parser_error_from_errno( - */ - __Pyx_INCREF(Py_None); - __Pyx_GIVEREF(Py_None); - __Pyx_GOTREF(__pyx_v_self->_last_error); - __Pyx_DECREF(__pyx_v_self->_last_error); - __pyx_v_self->_last_error = Py_None; - - /* "aiohttp/_http_parser.pyx":539 - * if (self._cparser.http_errno != cparser.HPE_OK): - * if self._payload_error == 0: - * if self._last_error is not None: # <<<<<<<<<<<<<< - * ex = self._last_error - * self._last_error = None - */ - goto __pyx_L5; - } - - /* "aiohttp/_http_parser.pyx":543 - * self._last_error = None - * else: - * ex = parser_error_from_errno( # <<<<<<<<<<<<<< - * self._cparser.http_errno) - * self._payload = None - */ - /*else*/ { - - /* "aiohttp/_http_parser.pyx":544 - * else: - * ex = parser_error_from_errno( - * self._cparser.http_errno) # <<<<<<<<<<<<<< - * self._payload = None - * raise ex - */ - __pyx_t_4 = __pyx_f_7aiohttp_12_http_parser_parser_error_from_errno(((enum http_errno)__pyx_v_self->_cparser->http_errno)); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 543, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __pyx_v_ex = __pyx_t_4; - __pyx_t_4 = 0; - } - __pyx_L5:; - - /* "aiohttp/_http_parser.pyx":545 - * ex = parser_error_from_errno( - * self._cparser.http_errno) - * self._payload = None # <<<<<<<<<<<<<< - * raise ex - * - */ - __Pyx_INCREF(Py_None); - __Pyx_GIVEREF(Py_None); - __Pyx_GOTREF(__pyx_v_self->_payload); - __Pyx_DECREF(__pyx_v_self->_payload); - __pyx_v_self->_payload = Py_None; - - /* "aiohttp/_http_parser.pyx":546 - * self._cparser.http_errno) - * self._payload = None - * raise ex # <<<<<<<<<<<<<< - * - * if self._messages: - */ - __Pyx_Raise(__pyx_v_ex, 0, 0, 0); - __PYX_ERR(0, 546, __pyx_L1_error) - - /* "aiohttp/_http_parser.pyx":538 - * - * if (self._cparser.http_errno != cparser.HPE_OK): - * if self._payload_error == 0: # <<<<<<<<<<<<<< - * if self._last_error is not None: - * ex = self._last_error - */ - } - - /* "aiohttp/_http_parser.pyx":537 - * PyBuffer_Release(&self.py_buf) - * - * if (self._cparser.http_errno != cparser.HPE_OK): # <<<<<<<<<<<<<< - * if self._payload_error == 0: - * if self._last_error is not None: - */ - } - - /* "aiohttp/_http_parser.pyx":548 - * raise ex - * - * if self._messages: # <<<<<<<<<<<<<< - * messages = self._messages - * self._messages = [] - */ - __pyx_t_3 = (__pyx_v_self->_messages != Py_None)&&(PyList_GET_SIZE(__pyx_v_self->_messages) != 0); - if (__pyx_t_3) { - - /* "aiohttp/_http_parser.pyx":549 - * - * if self._messages: - * messages = self._messages # <<<<<<<<<<<<<< - * self._messages = [] - * else: - */ - __pyx_t_4 = __pyx_v_self->_messages; - __Pyx_INCREF(__pyx_t_4); - __pyx_v_messages = __pyx_t_4; - __pyx_t_4 = 0; - - /* "aiohttp/_http_parser.pyx":550 - * if self._messages: - * messages = self._messages - * self._messages = [] # <<<<<<<<<<<<<< - * else: - * messages = () - */ - __pyx_t_4 = PyList_New(0); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 550, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_GIVEREF(__pyx_t_4); - __Pyx_GOTREF(__pyx_v_self->_messages); - __Pyx_DECREF(__pyx_v_self->_messages); - __pyx_v_self->_messages = ((PyObject*)__pyx_t_4); - __pyx_t_4 = 0; - - /* "aiohttp/_http_parser.pyx":548 - * raise ex - * - * if self._messages: # <<<<<<<<<<<<<< - * messages = self._messages - * self._messages = [] - */ - goto __pyx_L6; - } - - /* "aiohttp/_http_parser.pyx":552 - * self._messages = [] - * else: - * messages = () # <<<<<<<<<<<<<< - * - * if self._upgraded: - */ - /*else*/ { - __Pyx_INCREF(__pyx_empty_tuple); - __pyx_v_messages = __pyx_empty_tuple; - } - __pyx_L6:; - - /* "aiohttp/_http_parser.pyx":554 - * messages = () - * - * if self._upgraded: # <<<<<<<<<<<<<< - * return messages, True, data[nb:] - * else: - */ - __pyx_t_3 = (__pyx_v_self->_upgraded != 0); - if (__pyx_t_3) { - - /* "aiohttp/_http_parser.pyx":555 - * - * if self._upgraded: - * return messages, True, data[nb:] # <<<<<<<<<<<<<< - * else: - * return messages, False, b'' - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_4 = __Pyx_PyObject_GetSlice(__pyx_v_data, __pyx_v_nb, 0, NULL, NULL, NULL, 1, 0, 1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 555, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_5 = PyTuple_New(3); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 555, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_5); - __Pyx_INCREF(__pyx_v_messages); - __Pyx_GIVEREF(__pyx_v_messages); - PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_v_messages); - __Pyx_INCREF(Py_True); - __Pyx_GIVEREF(Py_True); - PyTuple_SET_ITEM(__pyx_t_5, 1, Py_True); - __Pyx_GIVEREF(__pyx_t_4); - PyTuple_SET_ITEM(__pyx_t_5, 2, __pyx_t_4); - __pyx_t_4 = 0; - __pyx_r = __pyx_t_5; - __pyx_t_5 = 0; - goto __pyx_L0; - - /* "aiohttp/_http_parser.pyx":554 - * messages = () - * - * if self._upgraded: # <<<<<<<<<<<<<< - * return messages, True, data[nb:] - * else: - */ - } - - /* "aiohttp/_http_parser.pyx":557 - * return messages, True, data[nb:] - * else: - * return messages, False, b'' # <<<<<<<<<<<<<< - * - * def set_upgraded(self, val): - */ - /*else*/ { - __Pyx_XDECREF(__pyx_r); - __pyx_t_5 = PyTuple_New(3); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 557, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_5); - __Pyx_INCREF(__pyx_v_messages); - __Pyx_GIVEREF(__pyx_v_messages); - PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_v_messages); - __Pyx_INCREF(Py_False); - __Pyx_GIVEREF(Py_False); - PyTuple_SET_ITEM(__pyx_t_5, 1, Py_False); - __Pyx_INCREF(__pyx_kp_b__4); - __Pyx_GIVEREF(__pyx_kp_b__4); - PyTuple_SET_ITEM(__pyx_t_5, 2, __pyx_kp_b__4); - __pyx_r = __pyx_t_5; - __pyx_t_5 = 0; - goto __pyx_L0; - } - - /* "aiohttp/_http_parser.pyx":521 - * return self._messages[-1][0] - * - * def feed_data(self, data): # <<<<<<<<<<<<<< - * cdef: - * size_t data_len - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_4); - __Pyx_XDECREF(__pyx_t_5); - __Pyx_AddTraceback("aiohttp._http_parser.HttpParser.feed_data", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v_ex); - __Pyx_XDECREF(__pyx_v_messages); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":559 - * return messages, False, b'' - * - * def set_upgraded(self, val): # <<<<<<<<<<<<<< - * self._upgraded = val - * - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_10HttpParser_9set_upgraded(PyObject *__pyx_v_self, PyObject *__pyx_v_val); /*proto*/ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_10HttpParser_9set_upgraded(PyObject *__pyx_v_self, PyObject *__pyx_v_val) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("set_upgraded (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_12_http_parser_10HttpParser_8set_upgraded(((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_v_self), ((PyObject *)__pyx_v_val)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_8set_upgraded(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self, PyObject *__pyx_v_val) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("set_upgraded", 0); - - /* "aiohttp/_http_parser.pyx":560 - * - * def set_upgraded(self, val): - * self._upgraded = val # <<<<<<<<<<<<<< - * - * - */ - __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_v_val); if (unlikely((__pyx_t_1 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 560, __pyx_L1_error) - __pyx_v_self->_upgraded = __pyx_t_1; - - /* "aiohttp/_http_parser.pyx":559 - * return messages, False, b'' - * - * def set_upgraded(self, val): # <<<<<<<<<<<<<< - * self._upgraded = val - * - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_AddTraceback("aiohttp._http_parser.HttpParser.set_upgraded", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "(tree fragment)":1 - * def __reduce_cython__(self): # <<<<<<<<<<<<<< - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") - * def __setstate_cython__(self, __pyx_state): - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_10HttpParser_11__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_10HttpParser_11__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_12_http_parser_10HttpParser_10__reduce_cython__(((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_10__reduce_cython__(CYTHON_UNUSED struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__reduce_cython__", 0); - - /* "(tree fragment)":2 - * def __reduce_cython__(self): - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< - * def __setstate_cython__(self, __pyx_state): - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") - */ - __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__5, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 2, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_Raise(__pyx_t_1, 0, 0, 0); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __PYX_ERR(1, 2, __pyx_L1_error) - - /* "(tree fragment)":1 - * def __reduce_cython__(self): # <<<<<<<<<<<<<< - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") - * def __setstate_cython__(self, __pyx_state): - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("aiohttp._http_parser.HttpParser.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "(tree fragment)":3 - * def __reduce_cython__(self): - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") - * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_10HttpParser_13__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state); /*proto*/ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_10HttpParser_13__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_12_http_parser_10HttpParser_12__setstate_cython__(((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_v_self), ((PyObject *)__pyx_v___pyx_state)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_12__setstate_cython__(CYTHON_UNUSED struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v___pyx_state) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__setstate_cython__", 0); - - /* "(tree fragment)":4 - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") - * def __setstate_cython__(self, __pyx_state): - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< - */ - __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__6, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 4, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_Raise(__pyx_t_1, 0, 0, 0); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __PYX_ERR(1, 4, __pyx_L1_error) - - /* "(tree fragment)":3 - * def __reduce_cython__(self): - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") - * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("aiohttp._http_parser.HttpParser.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":565 - * cdef class HttpRequestParser(HttpParser): - * - * def __init__(self, protocol, loop, int limit, timer=None, # <<<<<<<<<<<<<< - * size_t max_line_size=8190, size_t max_headers=32768, - * size_t max_field_size=8190, payload_exception=None, - */ - -/* Python wrapper */ -static int __pyx_pw_7aiohttp_12_http_parser_17HttpRequestParser_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ -static int __pyx_pw_7aiohttp_12_http_parser_17HttpRequestParser_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { - PyObject *__pyx_v_protocol = 0; - PyObject *__pyx_v_loop = 0; - int __pyx_v_limit; - PyObject *__pyx_v_timer = 0; - size_t __pyx_v_max_line_size; - size_t __pyx_v_max_headers; - size_t __pyx_v_max_field_size; - PyObject *__pyx_v_payload_exception = 0; - int __pyx_v_response_with_body; - int __pyx_v_read_until_eof; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - int __pyx_r; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__init__ (wrapper)", 0); - { - static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_protocol,&__pyx_n_s_loop,&__pyx_n_s_limit,&__pyx_n_s_timer,&__pyx_n_s_max_line_size,&__pyx_n_s_max_headers,&__pyx_n_s_max_field_size,&__pyx_n_s_payload_exception,&__pyx_n_s_response_with_body,&__pyx_n_s_read_until_eof,0}; - PyObject* values[10] = {0,0,0,0,0,0,0,0,0,0}; - values[3] = ((PyObject *)Py_None); - - /* "aiohttp/_http_parser.pyx":567 - * def __init__(self, protocol, loop, int limit, timer=None, - * size_t max_line_size=8190, size_t max_headers=32768, - * size_t max_field_size=8190, payload_exception=None, # <<<<<<<<<<<<<< - * bint response_with_body=True, bint read_until_eof=False, - * ): - */ - values[7] = ((PyObject *)Py_None); - if (unlikely(__pyx_kwds)) { - Py_ssize_t kw_args; - const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); - switch (pos_args) { - case 10: values[9] = PyTuple_GET_ITEM(__pyx_args, 9); - CYTHON_FALLTHROUGH; - case 9: values[8] = PyTuple_GET_ITEM(__pyx_args, 8); - CYTHON_FALLTHROUGH; - case 8: values[7] = PyTuple_GET_ITEM(__pyx_args, 7); - CYTHON_FALLTHROUGH; - case 7: values[6] = PyTuple_GET_ITEM(__pyx_args, 6); - CYTHON_FALLTHROUGH; - case 6: values[5] = PyTuple_GET_ITEM(__pyx_args, 5); - CYTHON_FALLTHROUGH; - case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4); - CYTHON_FALLTHROUGH; - case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); - CYTHON_FALLTHROUGH; - case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); - CYTHON_FALLTHROUGH; - case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); - CYTHON_FALLTHROUGH; - case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = PyDict_Size(__pyx_kwds); - switch (pos_args) { - case 0: - if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_protocol)) != 0)) kw_args--; - else goto __pyx_L5_argtuple_error; - CYTHON_FALLTHROUGH; - case 1: - if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_loop)) != 0)) kw_args--; - else { - __Pyx_RaiseArgtupleInvalid("__init__", 0, 3, 10, 1); __PYX_ERR(0, 565, __pyx_L3_error) - } - CYTHON_FALLTHROUGH; - case 2: - if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_limit)) != 0)) kw_args--; - else { - __Pyx_RaiseArgtupleInvalid("__init__", 0, 3, 10, 2); __PYX_ERR(0, 565, __pyx_L3_error) - } - CYTHON_FALLTHROUGH; - case 3: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_timer); - if (value) { values[3] = value; kw_args--; } - } - CYTHON_FALLTHROUGH; - case 4: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_max_line_size); - if (value) { values[4] = value; kw_args--; } - } - CYTHON_FALLTHROUGH; - case 5: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_max_headers); - if (value) { values[5] = value; kw_args--; } - } - CYTHON_FALLTHROUGH; - case 6: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_max_field_size); - if (value) { values[6] = value; kw_args--; } - } - CYTHON_FALLTHROUGH; - case 7: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_payload_exception); - if (value) { values[7] = value; kw_args--; } - } - CYTHON_FALLTHROUGH; - case 8: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_response_with_body); - if (value) { values[8] = value; kw_args--; } - } - CYTHON_FALLTHROUGH; - case 9: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_read_until_eof); - if (value) { values[9] = value; kw_args--; } - } - } - if (unlikely(kw_args > 0)) { - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__init__") < 0)) __PYX_ERR(0, 565, __pyx_L3_error) - } - } else { - switch (PyTuple_GET_SIZE(__pyx_args)) { - case 10: values[9] = PyTuple_GET_ITEM(__pyx_args, 9); - CYTHON_FALLTHROUGH; - case 9: values[8] = PyTuple_GET_ITEM(__pyx_args, 8); - CYTHON_FALLTHROUGH; - case 8: values[7] = PyTuple_GET_ITEM(__pyx_args, 7); - CYTHON_FALLTHROUGH; - case 7: values[6] = PyTuple_GET_ITEM(__pyx_args, 6); - CYTHON_FALLTHROUGH; - case 6: values[5] = PyTuple_GET_ITEM(__pyx_args, 5); - CYTHON_FALLTHROUGH; - case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4); - CYTHON_FALLTHROUGH; - case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); - CYTHON_FALLTHROUGH; - case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); - values[1] = PyTuple_GET_ITEM(__pyx_args, 1); - values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - break; - default: goto __pyx_L5_argtuple_error; - } - } - __pyx_v_protocol = values[0]; - __pyx_v_loop = values[1]; - __pyx_v_limit = __Pyx_PyInt_As_int(values[2]); if (unlikely((__pyx_v_limit == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 565, __pyx_L3_error) - __pyx_v_timer = values[3]; - if (values[4]) { - __pyx_v_max_line_size = __Pyx_PyInt_As_size_t(values[4]); if (unlikely((__pyx_v_max_line_size == (size_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 566, __pyx_L3_error) - } else { - __pyx_v_max_line_size = ((size_t)0x1FFE); - } - if (values[5]) { - __pyx_v_max_headers = __Pyx_PyInt_As_size_t(values[5]); if (unlikely((__pyx_v_max_headers == (size_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 566, __pyx_L3_error) - } else { - __pyx_v_max_headers = ((size_t)0x8000); - } - if (values[6]) { - __pyx_v_max_field_size = __Pyx_PyInt_As_size_t(values[6]); if (unlikely((__pyx_v_max_field_size == (size_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 567, __pyx_L3_error) - } else { - __pyx_v_max_field_size = ((size_t)0x1FFE); - } - __pyx_v_payload_exception = values[7]; - if (values[8]) { - __pyx_v_response_with_body = __Pyx_PyObject_IsTrue(values[8]); if (unlikely((__pyx_v_response_with_body == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 568, __pyx_L3_error) - } else { - - /* "aiohttp/_http_parser.pyx":568 - * size_t max_line_size=8190, size_t max_headers=32768, - * size_t max_field_size=8190, payload_exception=None, - * bint response_with_body=True, bint read_until_eof=False, # <<<<<<<<<<<<<< - * ): - * self._init(cparser.HTTP_REQUEST, protocol, loop, limit, timer, - */ - __pyx_v_response_with_body = ((int)1); - } - if (values[9]) { - __pyx_v_read_until_eof = __Pyx_PyObject_IsTrue(values[9]); if (unlikely((__pyx_v_read_until_eof == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 568, __pyx_L3_error) - } else { - __pyx_v_read_until_eof = ((int)0); - } - } - goto __pyx_L4_argument_unpacking_done; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("__init__", 0, 3, 10, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 565, __pyx_L3_error) - __pyx_L3_error:; - __Pyx_AddTraceback("aiohttp._http_parser.HttpRequestParser.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return -1; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_7aiohttp_12_http_parser_17HttpRequestParser___init__(((struct __pyx_obj_7aiohttp_12_http_parser_HttpRequestParser *)__pyx_v_self), __pyx_v_protocol, __pyx_v_loop, __pyx_v_limit, __pyx_v_timer, __pyx_v_max_line_size, __pyx_v_max_headers, __pyx_v_max_field_size, __pyx_v_payload_exception, __pyx_v_response_with_body, __pyx_v_read_until_eof); - - /* "aiohttp/_http_parser.pyx":565 - * cdef class HttpRequestParser(HttpParser): - * - * def __init__(self, protocol, loop, int limit, timer=None, # <<<<<<<<<<<<<< - * size_t max_line_size=8190, size_t max_headers=32768, - * size_t max_field_size=8190, payload_exception=None, - */ - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static int __pyx_pf_7aiohttp_12_http_parser_17HttpRequestParser___init__(struct __pyx_obj_7aiohttp_12_http_parser_HttpRequestParser *__pyx_v_self, PyObject *__pyx_v_protocol, PyObject *__pyx_v_loop, int __pyx_v_limit, PyObject *__pyx_v_timer, size_t __pyx_v_max_line_size, size_t __pyx_v_max_headers, size_t __pyx_v_max_field_size, PyObject *__pyx_v_payload_exception, int __pyx_v_response_with_body, int __pyx_v_read_until_eof) { - int __pyx_r; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - struct __pyx_opt_args_7aiohttp_12_http_parser_10HttpParser__init __pyx_t_2; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__init__", 0); - - /* "aiohttp/_http_parser.pyx":570 - * bint response_with_body=True, bint read_until_eof=False, - * ): - * self._init(cparser.HTTP_REQUEST, protocol, loop, limit, timer, # <<<<<<<<<<<<<< - * max_line_size, max_headers, max_field_size, - * payload_exception, response_with_body, read_until_eof) - */ - __pyx_t_2.__pyx_n = 7; - __pyx_t_2.timer = __pyx_v_timer; - __pyx_t_2.max_line_size = __pyx_v_max_line_size; - __pyx_t_2.max_headers = __pyx_v_max_headers; - __pyx_t_2.max_field_size = __pyx_v_max_field_size; - __pyx_t_2.payload_exception = __pyx_v_payload_exception; - __pyx_t_2.response_with_body = __pyx_v_response_with_body; - __pyx_t_2.read_until_eof = __pyx_v_read_until_eof; - __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpRequestParser *)__pyx_v_self->__pyx_base.__pyx_vtab)->__pyx_base._init(((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_v_self), HTTP_REQUEST, __pyx_v_protocol, __pyx_v_loop, __pyx_v_limit, &__pyx_t_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 570, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":565 - * cdef class HttpRequestParser(HttpParser): - * - * def __init__(self, protocol, loop, int limit, timer=None, # <<<<<<<<<<<<<< - * size_t max_line_size=8190, size_t max_headers=32768, - * size_t max_field_size=8190, payload_exception=None, - */ - - /* function exit code */ - __pyx_r = 0; - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("aiohttp._http_parser.HttpRequestParser.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = -1; - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":574 - * payload_exception, response_with_body, read_until_eof) - * - * cdef object _on_status_complete(self): # <<<<<<<<<<<<<< - * cdef Py_buffer py_buf - * if not self._buf: - */ - -static PyObject *__pyx_f_7aiohttp_12_http_parser_17HttpRequestParser__on_status_complete(struct __pyx_obj_7aiohttp_12_http_parser_HttpRequestParser *__pyx_v_self) { - Py_buffer __pyx_v_py_buf; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - PyObject *__pyx_t_5 = NULL; - int __pyx_t_6; - int __pyx_t_7; - char const *__pyx_t_8; - PyObject *__pyx_t_9 = NULL; - PyObject *__pyx_t_10 = NULL; - PyObject *__pyx_t_11 = NULL; - PyObject *__pyx_t_12 = NULL; - PyObject *__pyx_t_13 = NULL; - PyObject *__pyx_t_14 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("_on_status_complete", 0); - - /* "aiohttp/_http_parser.pyx":576 - * cdef object _on_status_complete(self): - * cdef Py_buffer py_buf - * if not self._buf: # <<<<<<<<<<<<<< - * return - * self._path = self._buf.decode('utf-8', 'surrogateescape') - */ - __pyx_t_1 = (__pyx_v_self->__pyx_base._buf != Py_None)&&(PyByteArray_GET_SIZE(__pyx_v_self->__pyx_base._buf) != 0); - __pyx_t_2 = ((!__pyx_t_1) != 0); - if (__pyx_t_2) { - - /* "aiohttp/_http_parser.pyx":577 - * cdef Py_buffer py_buf - * if not self._buf: - * return # <<<<<<<<<<<<<< - * self._path = self._buf.decode('utf-8', 'surrogateescape') - * if self._cparser.method == 5: # CONNECT - */ - __Pyx_XDECREF(__pyx_r); - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - - /* "aiohttp/_http_parser.pyx":576 - * cdef object _on_status_complete(self): - * cdef Py_buffer py_buf - * if not self._buf: # <<<<<<<<<<<<<< - * return - * self._path = self._buf.decode('utf-8', 'surrogateescape') - */ - } - - /* "aiohttp/_http_parser.pyx":578 - * if not self._buf: - * return - * self._path = self._buf.decode('utf-8', 'surrogateescape') # <<<<<<<<<<<<<< - * if self._cparser.method == 5: # CONNECT - * self._url = URL(self._path) - */ - if (unlikely(__pyx_v_self->__pyx_base._buf == Py_None)) { - PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "decode"); - __PYX_ERR(0, 578, __pyx_L1_error) - } - __pyx_t_3 = __Pyx_decode_bytearray(__pyx_v_self->__pyx_base._buf, 0, PY_SSIZE_T_MAX, NULL, ((char const *)"surrogateescape"), PyUnicode_DecodeUTF8); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 578, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_GIVEREF(__pyx_t_3); - __Pyx_GOTREF(__pyx_v_self->__pyx_base._path); - __Pyx_DECREF(__pyx_v_self->__pyx_base._path); - __pyx_v_self->__pyx_base._path = ((PyObject*)__pyx_t_3); - __pyx_t_3 = 0; - - /* "aiohttp/_http_parser.pyx":579 - * return - * self._path = self._buf.decode('utf-8', 'surrogateescape') - * if self._cparser.method == 5: # CONNECT # <<<<<<<<<<<<<< - * self._url = URL(self._path) - * else: - */ - __pyx_t_2 = ((__pyx_v_self->__pyx_base._cparser->method == 5) != 0); - if (__pyx_t_2) { - - /* "aiohttp/_http_parser.pyx":580 - * self._path = self._buf.decode('utf-8', 'surrogateescape') - * if self._cparser.method == 5: # CONNECT - * self._url = URL(self._path) # <<<<<<<<<<<<<< - * else: - * PyObject_GetBuffer(self._buf, &py_buf, PyBUF_SIMPLE) - */ - __Pyx_INCREF(__pyx_v_7aiohttp_12_http_parser_URL); - __pyx_t_4 = __pyx_v_7aiohttp_12_http_parser_URL; __pyx_t_5 = NULL; - if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_4))) { - __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_4); - if (likely(__pyx_t_5)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); - __Pyx_INCREF(__pyx_t_5); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_4, function); - } - } - __pyx_t_3 = (__pyx_t_5) ? __Pyx_PyObject_Call2Args(__pyx_t_4, __pyx_t_5, __pyx_v_self->__pyx_base._path) : __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_v_self->__pyx_base._path); - __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; - if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 580, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __Pyx_GIVEREF(__pyx_t_3); - __Pyx_GOTREF(__pyx_v_self->__pyx_base._url); - __Pyx_DECREF(__pyx_v_self->__pyx_base._url); - __pyx_v_self->__pyx_base._url = __pyx_t_3; - __pyx_t_3 = 0; - - /* "aiohttp/_http_parser.pyx":579 - * return - * self._path = self._buf.decode('utf-8', 'surrogateescape') - * if self._cparser.method == 5: # CONNECT # <<<<<<<<<<<<<< - * self._url = URL(self._path) - * else: - */ - goto __pyx_L4; - } - - /* "aiohttp/_http_parser.pyx":582 - * self._url = URL(self._path) - * else: - * PyObject_GetBuffer(self._buf, &py_buf, PyBUF_SIMPLE) # <<<<<<<<<<<<<< - * try: - * self._url = _parse_url(py_buf.buf, - */ - /*else*/ { - __pyx_t_3 = __pyx_v_self->__pyx_base._buf; - __Pyx_INCREF(__pyx_t_3); - __pyx_t_6 = PyObject_GetBuffer(__pyx_t_3, (&__pyx_v_py_buf), PyBUF_SIMPLE); if (unlikely(__pyx_t_6 == ((int)-1))) __PYX_ERR(0, 582, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "aiohttp/_http_parser.pyx":583 - * else: - * PyObject_GetBuffer(self._buf, &py_buf, PyBUF_SIMPLE) - * try: # <<<<<<<<<<<<<< - * self._url = _parse_url(py_buf.buf, - * py_buf.len) - */ - /*try:*/ { - - /* "aiohttp/_http_parser.pyx":584 - * PyObject_GetBuffer(self._buf, &py_buf, PyBUF_SIMPLE) - * try: - * self._url = _parse_url(py_buf.buf, # <<<<<<<<<<<<<< - * py_buf.len) - * finally: - */ - __pyx_t_3 = __pyx_f_7aiohttp_12_http_parser__parse_url(((char *)__pyx_v_py_buf.buf), __pyx_v_py_buf.len); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 584, __pyx_L6_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_GIVEREF(__pyx_t_3); - __Pyx_GOTREF(__pyx_v_self->__pyx_base._url); - __Pyx_DECREF(__pyx_v_self->__pyx_base._url); - __pyx_v_self->__pyx_base._url = __pyx_t_3; - __pyx_t_3 = 0; - } - - /* "aiohttp/_http_parser.pyx":587 - * py_buf.len) - * finally: - * PyBuffer_Release(&py_buf) # <<<<<<<<<<<<<< - * PyByteArray_Resize(self._buf, 0) - * - */ - /*finally:*/ { - /*normal exit:*/{ - PyBuffer_Release((&__pyx_v_py_buf)); - goto __pyx_L7; - } - __pyx_L6_error:; - /*exception exit:*/{ - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - __pyx_t_9 = 0; __pyx_t_10 = 0; __pyx_t_11 = 0; __pyx_t_12 = 0; __pyx_t_13 = 0; __pyx_t_14 = 0; - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; - if (PY_MAJOR_VERSION >= 3) __Pyx_ExceptionSwap(&__pyx_t_12, &__pyx_t_13, &__pyx_t_14); - if ((PY_MAJOR_VERSION < 3) || unlikely(__Pyx_GetException(&__pyx_t_9, &__pyx_t_10, &__pyx_t_11) < 0)) __Pyx_ErrFetch(&__pyx_t_9, &__pyx_t_10, &__pyx_t_11); - __Pyx_XGOTREF(__pyx_t_9); - __Pyx_XGOTREF(__pyx_t_10); - __Pyx_XGOTREF(__pyx_t_11); - __Pyx_XGOTREF(__pyx_t_12); - __Pyx_XGOTREF(__pyx_t_13); - __Pyx_XGOTREF(__pyx_t_14); - __pyx_t_6 = __pyx_lineno; __pyx_t_7 = __pyx_clineno; __pyx_t_8 = __pyx_filename; - { - PyBuffer_Release((&__pyx_v_py_buf)); - } - if (PY_MAJOR_VERSION >= 3) { - __Pyx_XGIVEREF(__pyx_t_12); - __Pyx_XGIVEREF(__pyx_t_13); - __Pyx_XGIVEREF(__pyx_t_14); - __Pyx_ExceptionReset(__pyx_t_12, __pyx_t_13, __pyx_t_14); - } - __Pyx_XGIVEREF(__pyx_t_9); - __Pyx_XGIVEREF(__pyx_t_10); - __Pyx_XGIVEREF(__pyx_t_11); - __Pyx_ErrRestore(__pyx_t_9, __pyx_t_10, __pyx_t_11); - __pyx_t_9 = 0; __pyx_t_10 = 0; __pyx_t_11 = 0; __pyx_t_12 = 0; __pyx_t_13 = 0; __pyx_t_14 = 0; - __pyx_lineno = __pyx_t_6; __pyx_clineno = __pyx_t_7; __pyx_filename = __pyx_t_8; - goto __pyx_L1_error; - } - __pyx_L7:; - } - } - __pyx_L4:; - - /* "aiohttp/_http_parser.pyx":588 - * finally: - * PyBuffer_Release(&py_buf) - * PyByteArray_Resize(self._buf, 0) # <<<<<<<<<<<<<< - * - * - */ - __pyx_t_3 = __pyx_v_self->__pyx_base._buf; - __Pyx_INCREF(__pyx_t_3); - __pyx_t_7 = PyByteArray_Resize(__pyx_t_3, 0); if (unlikely(__pyx_t_7 == ((int)-1))) __PYX_ERR(0, 588, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "aiohttp/_http_parser.pyx":574 - * payload_exception, response_with_body, read_until_eof) - * - * cdef object _on_status_complete(self): # <<<<<<<<<<<<<< - * cdef Py_buffer py_buf - * if not self._buf: - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_XDECREF(__pyx_t_5); - __Pyx_AddTraceback("aiohttp._http_parser.HttpRequestParser._on_status_complete", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "(tree fragment)":1 - * def __reduce_cython__(self): # <<<<<<<<<<<<<< - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") - * def __setstate_cython__(self, __pyx_state): - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_17HttpRequestParser_3__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_17HttpRequestParser_3__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_12_http_parser_17HttpRequestParser_2__reduce_cython__(((struct __pyx_obj_7aiohttp_12_http_parser_HttpRequestParser *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_12_http_parser_17HttpRequestParser_2__reduce_cython__(CYTHON_UNUSED struct __pyx_obj_7aiohttp_12_http_parser_HttpRequestParser *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__reduce_cython__", 0); - - /* "(tree fragment)":2 - * def __reduce_cython__(self): - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< - * def __setstate_cython__(self, __pyx_state): - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") - */ - __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__7, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 2, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_Raise(__pyx_t_1, 0, 0, 0); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __PYX_ERR(1, 2, __pyx_L1_error) - - /* "(tree fragment)":1 - * def __reduce_cython__(self): # <<<<<<<<<<<<<< - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") - * def __setstate_cython__(self, __pyx_state): - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("aiohttp._http_parser.HttpRequestParser.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "(tree fragment)":3 - * def __reduce_cython__(self): - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") - * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_17HttpRequestParser_5__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state); /*proto*/ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_17HttpRequestParser_5__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_12_http_parser_17HttpRequestParser_4__setstate_cython__(((struct __pyx_obj_7aiohttp_12_http_parser_HttpRequestParser *)__pyx_v_self), ((PyObject *)__pyx_v___pyx_state)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_12_http_parser_17HttpRequestParser_4__setstate_cython__(CYTHON_UNUSED struct __pyx_obj_7aiohttp_12_http_parser_HttpRequestParser *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v___pyx_state) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__setstate_cython__", 0); - - /* "(tree fragment)":4 - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") - * def __setstate_cython__(self, __pyx_state): - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< - */ - __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__8, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 4, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_Raise(__pyx_t_1, 0, 0, 0); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __PYX_ERR(1, 4, __pyx_L1_error) - - /* "(tree fragment)":3 - * def __reduce_cython__(self): - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") - * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("aiohttp._http_parser.HttpRequestParser.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":593 - * cdef class HttpResponseParser(HttpParser): - * - * def __init__(self, protocol, loop, int limit, timer=None, # <<<<<<<<<<<<<< - * size_t max_line_size=8190, size_t max_headers=32768, - * size_t max_field_size=8190, payload_exception=None, - */ - -/* Python wrapper */ -static int __pyx_pw_7aiohttp_12_http_parser_18HttpResponseParser_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ -static int __pyx_pw_7aiohttp_12_http_parser_18HttpResponseParser_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { - PyObject *__pyx_v_protocol = 0; - PyObject *__pyx_v_loop = 0; - int __pyx_v_limit; - PyObject *__pyx_v_timer = 0; - size_t __pyx_v_max_line_size; - size_t __pyx_v_max_headers; - size_t __pyx_v_max_field_size; - PyObject *__pyx_v_payload_exception = 0; - int __pyx_v_response_with_body; - int __pyx_v_read_until_eof; - int __pyx_v_auto_decompress; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - int __pyx_r; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__init__ (wrapper)", 0); - { - static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_protocol,&__pyx_n_s_loop,&__pyx_n_s_limit,&__pyx_n_s_timer,&__pyx_n_s_max_line_size,&__pyx_n_s_max_headers,&__pyx_n_s_max_field_size,&__pyx_n_s_payload_exception,&__pyx_n_s_response_with_body,&__pyx_n_s_read_until_eof,&__pyx_n_s_auto_decompress,0}; - PyObject* values[11] = {0,0,0,0,0,0,0,0,0,0,0}; - values[3] = ((PyObject *)Py_None); - - /* "aiohttp/_http_parser.pyx":595 - * def __init__(self, protocol, loop, int limit, timer=None, - * size_t max_line_size=8190, size_t max_headers=32768, - * size_t max_field_size=8190, payload_exception=None, # <<<<<<<<<<<<<< - * bint response_with_body=True, bint read_until_eof=False, - * bint auto_decompress=True - */ - values[7] = ((PyObject *)Py_None); - if (unlikely(__pyx_kwds)) { - Py_ssize_t kw_args; - const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); - switch (pos_args) { - case 11: values[10] = PyTuple_GET_ITEM(__pyx_args, 10); - CYTHON_FALLTHROUGH; - case 10: values[9] = PyTuple_GET_ITEM(__pyx_args, 9); - CYTHON_FALLTHROUGH; - case 9: values[8] = PyTuple_GET_ITEM(__pyx_args, 8); - CYTHON_FALLTHROUGH; - case 8: values[7] = PyTuple_GET_ITEM(__pyx_args, 7); - CYTHON_FALLTHROUGH; - case 7: values[6] = PyTuple_GET_ITEM(__pyx_args, 6); - CYTHON_FALLTHROUGH; - case 6: values[5] = PyTuple_GET_ITEM(__pyx_args, 5); - CYTHON_FALLTHROUGH; - case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4); - CYTHON_FALLTHROUGH; - case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); - CYTHON_FALLTHROUGH; - case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); - CYTHON_FALLTHROUGH; - case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); - CYTHON_FALLTHROUGH; - case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = PyDict_Size(__pyx_kwds); - switch (pos_args) { - case 0: - if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_protocol)) != 0)) kw_args--; - else goto __pyx_L5_argtuple_error; - CYTHON_FALLTHROUGH; - case 1: - if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_loop)) != 0)) kw_args--; - else { - __Pyx_RaiseArgtupleInvalid("__init__", 0, 3, 11, 1); __PYX_ERR(0, 593, __pyx_L3_error) - } - CYTHON_FALLTHROUGH; - case 2: - if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_limit)) != 0)) kw_args--; - else { - __Pyx_RaiseArgtupleInvalid("__init__", 0, 3, 11, 2); __PYX_ERR(0, 593, __pyx_L3_error) - } - CYTHON_FALLTHROUGH; - case 3: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_timer); - if (value) { values[3] = value; kw_args--; } - } - CYTHON_FALLTHROUGH; - case 4: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_max_line_size); - if (value) { values[4] = value; kw_args--; } - } - CYTHON_FALLTHROUGH; - case 5: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_max_headers); - if (value) { values[5] = value; kw_args--; } - } - CYTHON_FALLTHROUGH; - case 6: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_max_field_size); - if (value) { values[6] = value; kw_args--; } - } - CYTHON_FALLTHROUGH; - case 7: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_payload_exception); - if (value) { values[7] = value; kw_args--; } - } - CYTHON_FALLTHROUGH; - case 8: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_response_with_body); - if (value) { values[8] = value; kw_args--; } - } - CYTHON_FALLTHROUGH; - case 9: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_read_until_eof); - if (value) { values[9] = value; kw_args--; } - } - CYTHON_FALLTHROUGH; - case 10: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_auto_decompress); - if (value) { values[10] = value; kw_args--; } - } - } - if (unlikely(kw_args > 0)) { - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__init__") < 0)) __PYX_ERR(0, 593, __pyx_L3_error) - } - } else { - switch (PyTuple_GET_SIZE(__pyx_args)) { - case 11: values[10] = PyTuple_GET_ITEM(__pyx_args, 10); - CYTHON_FALLTHROUGH; - case 10: values[9] = PyTuple_GET_ITEM(__pyx_args, 9); - CYTHON_FALLTHROUGH; - case 9: values[8] = PyTuple_GET_ITEM(__pyx_args, 8); - CYTHON_FALLTHROUGH; - case 8: values[7] = PyTuple_GET_ITEM(__pyx_args, 7); - CYTHON_FALLTHROUGH; - case 7: values[6] = PyTuple_GET_ITEM(__pyx_args, 6); - CYTHON_FALLTHROUGH; - case 6: values[5] = PyTuple_GET_ITEM(__pyx_args, 5); - CYTHON_FALLTHROUGH; - case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4); - CYTHON_FALLTHROUGH; - case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); - CYTHON_FALLTHROUGH; - case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); - values[1] = PyTuple_GET_ITEM(__pyx_args, 1); - values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - break; - default: goto __pyx_L5_argtuple_error; - } - } - __pyx_v_protocol = values[0]; - __pyx_v_loop = values[1]; - __pyx_v_limit = __Pyx_PyInt_As_int(values[2]); if (unlikely((__pyx_v_limit == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 593, __pyx_L3_error) - __pyx_v_timer = values[3]; - if (values[4]) { - __pyx_v_max_line_size = __Pyx_PyInt_As_size_t(values[4]); if (unlikely((__pyx_v_max_line_size == (size_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 594, __pyx_L3_error) - } else { - __pyx_v_max_line_size = ((size_t)0x1FFE); - } - if (values[5]) { - __pyx_v_max_headers = __Pyx_PyInt_As_size_t(values[5]); if (unlikely((__pyx_v_max_headers == (size_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 594, __pyx_L3_error) - } else { - __pyx_v_max_headers = ((size_t)0x8000); - } - if (values[6]) { - __pyx_v_max_field_size = __Pyx_PyInt_As_size_t(values[6]); if (unlikely((__pyx_v_max_field_size == (size_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 595, __pyx_L3_error) - } else { - __pyx_v_max_field_size = ((size_t)0x1FFE); - } - __pyx_v_payload_exception = values[7]; - if (values[8]) { - __pyx_v_response_with_body = __Pyx_PyObject_IsTrue(values[8]); if (unlikely((__pyx_v_response_with_body == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 596, __pyx_L3_error) - } else { - - /* "aiohttp/_http_parser.pyx":596 - * size_t max_line_size=8190, size_t max_headers=32768, - * size_t max_field_size=8190, payload_exception=None, - * bint response_with_body=True, bint read_until_eof=False, # <<<<<<<<<<<<<< - * bint auto_decompress=True - * ): - */ - __pyx_v_response_with_body = ((int)1); - } - if (values[9]) { - __pyx_v_read_until_eof = __Pyx_PyObject_IsTrue(values[9]); if (unlikely((__pyx_v_read_until_eof == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 596, __pyx_L3_error) - } else { - __pyx_v_read_until_eof = ((int)0); - } - if (values[10]) { - __pyx_v_auto_decompress = __Pyx_PyObject_IsTrue(values[10]); if (unlikely((__pyx_v_auto_decompress == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 597, __pyx_L3_error) - } else { - - /* "aiohttp/_http_parser.pyx":597 - * size_t max_field_size=8190, payload_exception=None, - * bint response_with_body=True, bint read_until_eof=False, - * bint auto_decompress=True # <<<<<<<<<<<<<< - * ): - * self._init(cparser.HTTP_RESPONSE, protocol, loop, limit, timer, - */ - __pyx_v_auto_decompress = ((int)1); - } - } - goto __pyx_L4_argument_unpacking_done; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("__init__", 0, 3, 11, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 593, __pyx_L3_error) - __pyx_L3_error:; - __Pyx_AddTraceback("aiohttp._http_parser.HttpResponseParser.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return -1; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_7aiohttp_12_http_parser_18HttpResponseParser___init__(((struct __pyx_obj_7aiohttp_12_http_parser_HttpResponseParser *)__pyx_v_self), __pyx_v_protocol, __pyx_v_loop, __pyx_v_limit, __pyx_v_timer, __pyx_v_max_line_size, __pyx_v_max_headers, __pyx_v_max_field_size, __pyx_v_payload_exception, __pyx_v_response_with_body, __pyx_v_read_until_eof, __pyx_v_auto_decompress); - - /* "aiohttp/_http_parser.pyx":593 - * cdef class HttpResponseParser(HttpParser): - * - * def __init__(self, protocol, loop, int limit, timer=None, # <<<<<<<<<<<<<< - * size_t max_line_size=8190, size_t max_headers=32768, - * size_t max_field_size=8190, payload_exception=None, - */ - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static int __pyx_pf_7aiohttp_12_http_parser_18HttpResponseParser___init__(struct __pyx_obj_7aiohttp_12_http_parser_HttpResponseParser *__pyx_v_self, PyObject *__pyx_v_protocol, PyObject *__pyx_v_loop, int __pyx_v_limit, PyObject *__pyx_v_timer, size_t __pyx_v_max_line_size, size_t __pyx_v_max_headers, size_t __pyx_v_max_field_size, PyObject *__pyx_v_payload_exception, int __pyx_v_response_with_body, int __pyx_v_read_until_eof, int __pyx_v_auto_decompress) { - int __pyx_r; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - struct __pyx_opt_args_7aiohttp_12_http_parser_10HttpParser__init __pyx_t_2; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__init__", 0); - - /* "aiohttp/_http_parser.pyx":599 - * bint auto_decompress=True - * ): - * self._init(cparser.HTTP_RESPONSE, protocol, loop, limit, timer, # <<<<<<<<<<<<<< - * max_line_size, max_headers, max_field_size, - * payload_exception, response_with_body, read_until_eof, - */ - __pyx_t_2.__pyx_n = 8; - __pyx_t_2.timer = __pyx_v_timer; - __pyx_t_2.max_line_size = __pyx_v_max_line_size; - __pyx_t_2.max_headers = __pyx_v_max_headers; - __pyx_t_2.max_field_size = __pyx_v_max_field_size; - __pyx_t_2.payload_exception = __pyx_v_payload_exception; - __pyx_t_2.response_with_body = __pyx_v_response_with_body; - __pyx_t_2.read_until_eof = __pyx_v_read_until_eof; - __pyx_t_2.auto_decompress = __pyx_v_auto_decompress; - __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpResponseParser *)__pyx_v_self->__pyx_base.__pyx_vtab)->__pyx_base._init(((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_v_self), HTTP_RESPONSE, __pyx_v_protocol, __pyx_v_loop, __pyx_v_limit, &__pyx_t_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 599, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":593 - * cdef class HttpResponseParser(HttpParser): - * - * def __init__(self, protocol, loop, int limit, timer=None, # <<<<<<<<<<<<<< - * size_t max_line_size=8190, size_t max_headers=32768, - * size_t max_field_size=8190, payload_exception=None, - */ - - /* function exit code */ - __pyx_r = 0; - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("aiohttp._http_parser.HttpResponseParser.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = -1; - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":604 - * auto_decompress) - * - * cdef object _on_status_complete(self): # <<<<<<<<<<<<<< - * if self._buf: - * self._reason = self._buf.decode('utf-8', 'surrogateescape') - */ - -static PyObject *__pyx_f_7aiohttp_12_http_parser_18HttpResponseParser__on_status_complete(struct __pyx_obj_7aiohttp_12_http_parser_HttpResponseParser *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - PyObject *__pyx_t_2 = NULL; - int __pyx_t_3; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("_on_status_complete", 0); - - /* "aiohttp/_http_parser.pyx":605 - * - * cdef object _on_status_complete(self): - * if self._buf: # <<<<<<<<<<<<<< - * self._reason = self._buf.decode('utf-8', 'surrogateescape') - * PyByteArray_Resize(self._buf, 0) - */ - __pyx_t_1 = (__pyx_v_self->__pyx_base._buf != Py_None)&&(PyByteArray_GET_SIZE(__pyx_v_self->__pyx_base._buf) != 0); - if (__pyx_t_1) { - - /* "aiohttp/_http_parser.pyx":606 - * cdef object _on_status_complete(self): - * if self._buf: - * self._reason = self._buf.decode('utf-8', 'surrogateescape') # <<<<<<<<<<<<<< - * PyByteArray_Resize(self._buf, 0) - * else: - */ - if (unlikely(__pyx_v_self->__pyx_base._buf == Py_None)) { - PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "decode"); - __PYX_ERR(0, 606, __pyx_L1_error) - } - __pyx_t_2 = __Pyx_decode_bytearray(__pyx_v_self->__pyx_base._buf, 0, PY_SSIZE_T_MAX, NULL, ((char const *)"surrogateescape"), PyUnicode_DecodeUTF8); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 606, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_GIVEREF(__pyx_t_2); - __Pyx_GOTREF(__pyx_v_self->__pyx_base._reason); - __Pyx_DECREF(__pyx_v_self->__pyx_base._reason); - __pyx_v_self->__pyx_base._reason = ((PyObject*)__pyx_t_2); - __pyx_t_2 = 0; - - /* "aiohttp/_http_parser.pyx":607 - * if self._buf: - * self._reason = self._buf.decode('utf-8', 'surrogateescape') - * PyByteArray_Resize(self._buf, 0) # <<<<<<<<<<<<<< - * else: - * self._reason = self._reason or '' - */ - __pyx_t_2 = __pyx_v_self->__pyx_base._buf; - __Pyx_INCREF(__pyx_t_2); - __pyx_t_3 = PyByteArray_Resize(__pyx_t_2, 0); if (unlikely(__pyx_t_3 == ((int)-1))) __PYX_ERR(0, 607, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "aiohttp/_http_parser.pyx":605 - * - * cdef object _on_status_complete(self): - * if self._buf: # <<<<<<<<<<<<<< - * self._reason = self._buf.decode('utf-8', 'surrogateescape') - * PyByteArray_Resize(self._buf, 0) - */ - goto __pyx_L3; - } - - /* "aiohttp/_http_parser.pyx":609 - * PyByteArray_Resize(self._buf, 0) - * else: - * self._reason = self._reason or '' # <<<<<<<<<<<<<< - * - * cdef int cb_on_message_begin(cparser.http_parser* parser) except -1: - */ - /*else*/ { - __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_v_self->__pyx_base._reason); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(0, 609, __pyx_L1_error) - if (!__pyx_t_1) { - } else { - __Pyx_INCREF(__pyx_v_self->__pyx_base._reason); - __pyx_t_2 = __pyx_v_self->__pyx_base._reason; - goto __pyx_L4_bool_binop_done; - } - __Pyx_INCREF(__pyx_kp_u__4); - __pyx_t_2 = __pyx_kp_u__4; - __pyx_L4_bool_binop_done:; - __Pyx_GIVEREF(__pyx_t_2); - __Pyx_GOTREF(__pyx_v_self->__pyx_base._reason); - __Pyx_DECREF(__pyx_v_self->__pyx_base._reason); - __pyx_v_self->__pyx_base._reason = ((PyObject*)__pyx_t_2); - __pyx_t_2 = 0; - } - __pyx_L3:; - - /* "aiohttp/_http_parser.pyx":604 - * auto_decompress) - * - * cdef object _on_status_complete(self): # <<<<<<<<<<<<<< - * if self._buf: - * self._reason = self._buf.decode('utf-8', 'surrogateescape') - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_AddTraceback("aiohttp._http_parser.HttpResponseParser._on_status_complete", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "(tree fragment)":1 - * def __reduce_cython__(self): # <<<<<<<<<<<<<< - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") - * def __setstate_cython__(self, __pyx_state): - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_18HttpResponseParser_3__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_18HttpResponseParser_3__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_12_http_parser_18HttpResponseParser_2__reduce_cython__(((struct __pyx_obj_7aiohttp_12_http_parser_HttpResponseParser *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_12_http_parser_18HttpResponseParser_2__reduce_cython__(CYTHON_UNUSED struct __pyx_obj_7aiohttp_12_http_parser_HttpResponseParser *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__reduce_cython__", 0); - - /* "(tree fragment)":2 - * def __reduce_cython__(self): - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< - * def __setstate_cython__(self, __pyx_state): - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") - */ - __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__9, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 2, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_Raise(__pyx_t_1, 0, 0, 0); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __PYX_ERR(1, 2, __pyx_L1_error) - - /* "(tree fragment)":1 - * def __reduce_cython__(self): # <<<<<<<<<<<<<< - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") - * def __setstate_cython__(self, __pyx_state): - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("aiohttp._http_parser.HttpResponseParser.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "(tree fragment)":3 - * def __reduce_cython__(self): - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") - * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_18HttpResponseParser_5__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state); /*proto*/ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_18HttpResponseParser_5__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_12_http_parser_18HttpResponseParser_4__setstate_cython__(((struct __pyx_obj_7aiohttp_12_http_parser_HttpResponseParser *)__pyx_v_self), ((PyObject *)__pyx_v___pyx_state)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_12_http_parser_18HttpResponseParser_4__setstate_cython__(CYTHON_UNUSED struct __pyx_obj_7aiohttp_12_http_parser_HttpResponseParser *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v___pyx_state) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__setstate_cython__", 0); - - /* "(tree fragment)":4 - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") - * def __setstate_cython__(self, __pyx_state): - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< - */ - __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__10, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 4, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_Raise(__pyx_t_1, 0, 0, 0); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __PYX_ERR(1, 4, __pyx_L1_error) - - /* "(tree fragment)":3 - * def __reduce_cython__(self): - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") - * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("aiohttp._http_parser.HttpResponseParser.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":611 - * self._reason = self._reason or '' - * - * cdef int cb_on_message_begin(cparser.http_parser* parser) except -1: # <<<<<<<<<<<<<< - * cdef HttpParser pyparser = parser.data - * - */ - -static int __pyx_f_7aiohttp_12_http_parser_cb_on_message_begin(struct http_parser *__pyx_v_parser) { - struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_pyparser = 0; - int __pyx_r; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - int __pyx_t_4; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("cb_on_message_begin", 0); - - /* "aiohttp/_http_parser.pyx":612 - * - * cdef int cb_on_message_begin(cparser.http_parser* parser) except -1: - * cdef HttpParser pyparser = parser.data # <<<<<<<<<<<<<< - * - * pyparser._started = True - */ - __pyx_t_1 = ((PyObject *)__pyx_v_parser->data); - __Pyx_INCREF(__pyx_t_1); - __pyx_v_pyparser = ((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_t_1); - __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":614 - * cdef HttpParser pyparser = parser.data - * - * pyparser._started = True # <<<<<<<<<<<<<< - * pyparser._headers = CIMultiDict() - * pyparser._raw_headers = [] - */ - __pyx_v_pyparser->_started = 1; - - /* "aiohttp/_http_parser.pyx":615 - * - * pyparser._started = True - * pyparser._headers = CIMultiDict() # <<<<<<<<<<<<<< - * pyparser._raw_headers = [] - * PyByteArray_Resize(pyparser._buf, 0) - */ - __Pyx_INCREF(__pyx_v_7aiohttp_12_http_parser_CIMultiDict); - __pyx_t_2 = __pyx_v_7aiohttp_12_http_parser_CIMultiDict; __pyx_t_3 = NULL; - if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_3)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_3); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - } - } - __pyx_t_1 = (__pyx_t_3) ? __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_3) : __Pyx_PyObject_CallNoArg(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 615, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_GIVEREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_v_pyparser->_headers); - __Pyx_DECREF(__pyx_v_pyparser->_headers); - __pyx_v_pyparser->_headers = __pyx_t_1; - __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":616 - * pyparser._started = True - * pyparser._headers = CIMultiDict() - * pyparser._raw_headers = [] # <<<<<<<<<<<<<< - * PyByteArray_Resize(pyparser._buf, 0) - * pyparser._path = None - */ - __pyx_t_1 = PyList_New(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 616, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_v_pyparser->_raw_headers); - __Pyx_DECREF(__pyx_v_pyparser->_raw_headers); - __pyx_v_pyparser->_raw_headers = ((PyObject*)__pyx_t_1); - __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":617 - * pyparser._headers = CIMultiDict() - * pyparser._raw_headers = [] - * PyByteArray_Resize(pyparser._buf, 0) # <<<<<<<<<<<<<< - * pyparser._path = None - * pyparser._reason = None - */ - __pyx_t_1 = __pyx_v_pyparser->_buf; - __Pyx_INCREF(__pyx_t_1); - __pyx_t_4 = PyByteArray_Resize(__pyx_t_1, 0); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(0, 617, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":618 - * pyparser._raw_headers = [] - * PyByteArray_Resize(pyparser._buf, 0) - * pyparser._path = None # <<<<<<<<<<<<<< - * pyparser._reason = None - * return 0 - */ - __Pyx_INCREF(Py_None); - __Pyx_GIVEREF(Py_None); - __Pyx_GOTREF(__pyx_v_pyparser->_path); - __Pyx_DECREF(__pyx_v_pyparser->_path); - __pyx_v_pyparser->_path = ((PyObject*)Py_None); - - /* "aiohttp/_http_parser.pyx":619 - * PyByteArray_Resize(pyparser._buf, 0) - * pyparser._path = None - * pyparser._reason = None # <<<<<<<<<<<<<< - * return 0 - * - */ - __Pyx_INCREF(Py_None); - __Pyx_GIVEREF(Py_None); - __Pyx_GOTREF(__pyx_v_pyparser->_reason); - __Pyx_DECREF(__pyx_v_pyparser->_reason); - __pyx_v_pyparser->_reason = ((PyObject*)Py_None); - - /* "aiohttp/_http_parser.pyx":620 - * pyparser._path = None - * pyparser._reason = None - * return 0 # <<<<<<<<<<<<<< - * - * - */ - __pyx_r = 0; - goto __pyx_L0; - - /* "aiohttp/_http_parser.pyx":611 - * self._reason = self._reason or '' - * - * cdef int cb_on_message_begin(cparser.http_parser* parser) except -1: # <<<<<<<<<<<<<< - * cdef HttpParser pyparser = parser.data - * - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("aiohttp._http_parser.cb_on_message_begin", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = -1; - __pyx_L0:; - __Pyx_XDECREF((PyObject *)__pyx_v_pyparser); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":623 - * - * - * cdef int cb_on_url(cparser.http_parser* parser, # <<<<<<<<<<<<<< - * const char *at, size_t length) except -1: - * cdef HttpParser pyparser = parser.data - */ - -static int __pyx_f_7aiohttp_12_http_parser_cb_on_url(struct http_parser *__pyx_v_parser, char const *__pyx_v_at, size_t __pyx_v_length) { - struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_pyparser = 0; - PyObject *__pyx_v_ex = NULL; - int __pyx_r; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - int __pyx_t_5; - PyObject *__pyx_t_6 = NULL; - PyObject *__pyx_t_7 = NULL; - PyObject *__pyx_t_8 = NULL; - PyObject *__pyx_t_9 = NULL; - int __pyx_t_10; - PyObject *__pyx_t_11 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("cb_on_url", 0); - - /* "aiohttp/_http_parser.pyx":625 - * cdef int cb_on_url(cparser.http_parser* parser, - * const char *at, size_t length) except -1: - * cdef HttpParser pyparser = parser.data # <<<<<<<<<<<<<< - * try: - * if length > pyparser._max_line_size: - */ - __pyx_t_1 = ((PyObject *)__pyx_v_parser->data); - __Pyx_INCREF(__pyx_t_1); - __pyx_v_pyparser = ((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_t_1); - __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":626 - * const char *at, size_t length) except -1: - * cdef HttpParser pyparser = parser.data - * try: # <<<<<<<<<<<<<< - * if length > pyparser._max_line_size: - * raise LineTooLong( - */ - { - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - __Pyx_ExceptionSave(&__pyx_t_2, &__pyx_t_3, &__pyx_t_4); - __Pyx_XGOTREF(__pyx_t_2); - __Pyx_XGOTREF(__pyx_t_3); - __Pyx_XGOTREF(__pyx_t_4); - /*try:*/ { - - /* "aiohttp/_http_parser.pyx":627 - * cdef HttpParser pyparser = parser.data - * try: - * if length > pyparser._max_line_size: # <<<<<<<<<<<<<< - * raise LineTooLong( - * 'Status line is too long', pyparser._max_line_size, length) - */ - __pyx_t_5 = ((__pyx_v_length > __pyx_v_pyparser->_max_line_size) != 0); - if (unlikely(__pyx_t_5)) { - - /* "aiohttp/_http_parser.pyx":628 - * try: - * if length > pyparser._max_line_size: - * raise LineTooLong( # <<<<<<<<<<<<<< - * 'Status line is too long', pyparser._max_line_size, length) - * extend(pyparser._buf, at, length) - */ - __Pyx_GetModuleGlobalName(__pyx_t_6, __pyx_n_s_LineTooLong); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 628, __pyx_L3_error) - __Pyx_GOTREF(__pyx_t_6); - - /* "aiohttp/_http_parser.pyx":629 - * if length > pyparser._max_line_size: - * raise LineTooLong( - * 'Status line is too long', pyparser._max_line_size, length) # <<<<<<<<<<<<<< - * extend(pyparser._buf, at, length) - * except BaseException as ex: - */ - __pyx_t_7 = __Pyx_PyInt_FromSize_t(__pyx_v_pyparser->_max_line_size); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 629, __pyx_L3_error) - __Pyx_GOTREF(__pyx_t_7); - __pyx_t_8 = __Pyx_PyInt_FromSize_t(__pyx_v_length); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 629, __pyx_L3_error) - __Pyx_GOTREF(__pyx_t_8); - __pyx_t_9 = NULL; - __pyx_t_10 = 0; - if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_6))) { - __pyx_t_9 = PyMethod_GET_SELF(__pyx_t_6); - if (likely(__pyx_t_9)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); - __Pyx_INCREF(__pyx_t_9); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_6, function); - __pyx_t_10 = 1; - } - } - #if CYTHON_FAST_PYCALL - if (PyFunction_Check(__pyx_t_6)) { - PyObject *__pyx_temp[4] = {__pyx_t_9, __pyx_kp_u_Status_line_is_too_long, __pyx_t_7, __pyx_t_8}; - __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_6, __pyx_temp+1-__pyx_t_10, 3+__pyx_t_10); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 628, __pyx_L3_error) - __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - } else - #endif - #if CYTHON_FAST_PYCCALL - if (__Pyx_PyFastCFunction_Check(__pyx_t_6)) { - PyObject *__pyx_temp[4] = {__pyx_t_9, __pyx_kp_u_Status_line_is_too_long, __pyx_t_7, __pyx_t_8}; - __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_6, __pyx_temp+1-__pyx_t_10, 3+__pyx_t_10); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 628, __pyx_L3_error) - __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - } else - #endif - { - __pyx_t_11 = PyTuple_New(3+__pyx_t_10); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 628, __pyx_L3_error) - __Pyx_GOTREF(__pyx_t_11); - if (__pyx_t_9) { - __Pyx_GIVEREF(__pyx_t_9); PyTuple_SET_ITEM(__pyx_t_11, 0, __pyx_t_9); __pyx_t_9 = NULL; - } - __Pyx_INCREF(__pyx_kp_u_Status_line_is_too_long); - __Pyx_GIVEREF(__pyx_kp_u_Status_line_is_too_long); - PyTuple_SET_ITEM(__pyx_t_11, 0+__pyx_t_10, __pyx_kp_u_Status_line_is_too_long); - __Pyx_GIVEREF(__pyx_t_7); - PyTuple_SET_ITEM(__pyx_t_11, 1+__pyx_t_10, __pyx_t_7); - __Pyx_GIVEREF(__pyx_t_8); - PyTuple_SET_ITEM(__pyx_t_11, 2+__pyx_t_10, __pyx_t_8); - __pyx_t_7 = 0; - __pyx_t_8 = 0; - __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_6, __pyx_t_11, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 628, __pyx_L3_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; - } - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - __Pyx_Raise(__pyx_t_1, 0, 0, 0); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __PYX_ERR(0, 628, __pyx_L3_error) - - /* "aiohttp/_http_parser.pyx":627 - * cdef HttpParser pyparser = parser.data - * try: - * if length > pyparser._max_line_size: # <<<<<<<<<<<<<< - * raise LineTooLong( - * 'Status line is too long', pyparser._max_line_size, length) - */ - } - - /* "aiohttp/_http_parser.pyx":630 - * raise LineTooLong( - * 'Status line is too long', pyparser._max_line_size, length) - * extend(pyparser._buf, at, length) # <<<<<<<<<<<<<< - * except BaseException as ex: - * pyparser._last_error = ex - */ - __pyx_t_1 = __pyx_v_pyparser->_buf; - __Pyx_INCREF(__pyx_t_1); - __pyx_t_6 = __pyx_f_7aiohttp_12_http_parser_extend(__pyx_t_1, __pyx_v_at, __pyx_v_length); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 630, __pyx_L3_error) - __Pyx_GOTREF(__pyx_t_6); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - - /* "aiohttp/_http_parser.pyx":626 - * const char *at, size_t length) except -1: - * cdef HttpParser pyparser = parser.data - * try: # <<<<<<<<<<<<<< - * if length > pyparser._max_line_size: - * raise LineTooLong( - */ - } - - /* "aiohttp/_http_parser.pyx":635 - * return -1 - * else: - * return 0 # <<<<<<<<<<<<<< - * - * - */ - /*else:*/ { - __pyx_r = 0; - goto __pyx_L6_except_return; - } - __pyx_L3_error:; - __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0; - __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; - __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; - __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; - __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; - - /* "aiohttp/_http_parser.pyx":631 - * 'Status line is too long', pyparser._max_line_size, length) - * extend(pyparser._buf, at, length) - * except BaseException as ex: # <<<<<<<<<<<<<< - * pyparser._last_error = ex - * return -1 - */ - __pyx_t_10 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_BaseException); - if (__pyx_t_10) { - __Pyx_AddTraceback("aiohttp._http_parser.cb_on_url", __pyx_clineno, __pyx_lineno, __pyx_filename); - if (__Pyx_GetException(&__pyx_t_6, &__pyx_t_1, &__pyx_t_11) < 0) __PYX_ERR(0, 631, __pyx_L5_except_error) - __Pyx_GOTREF(__pyx_t_6); - __Pyx_GOTREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_t_11); - __Pyx_INCREF(__pyx_t_1); - __pyx_v_ex = __pyx_t_1; - /*try:*/ { - - /* "aiohttp/_http_parser.pyx":632 - * extend(pyparser._buf, at, length) - * except BaseException as ex: - * pyparser._last_error = ex # <<<<<<<<<<<<<< - * return -1 - * else: - */ - __Pyx_INCREF(__pyx_v_ex); - __Pyx_GIVEREF(__pyx_v_ex); - __Pyx_GOTREF(__pyx_v_pyparser->_last_error); - __Pyx_DECREF(__pyx_v_pyparser->_last_error); - __pyx_v_pyparser->_last_error = __pyx_v_ex; - - /* "aiohttp/_http_parser.pyx":633 - * except BaseException as ex: - * pyparser._last_error = ex - * return -1 # <<<<<<<<<<<<<< - * else: - * return 0 - */ - __pyx_r = -1; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; - goto __pyx_L14_return; - } - - /* "aiohttp/_http_parser.pyx":631 - * 'Status line is too long', pyparser._max_line_size, length) - * extend(pyparser._buf, at, length) - * except BaseException as ex: # <<<<<<<<<<<<<< - * pyparser._last_error = ex - * return -1 - */ - /*finally:*/ { - __pyx_L14_return: { - __pyx_t_10 = __pyx_r; - __Pyx_DECREF(__pyx_v_ex); - __pyx_v_ex = NULL; - __pyx_r = __pyx_t_10; - goto __pyx_L6_except_return; - } - } - } - goto __pyx_L5_except_error; - __pyx_L5_except_error:; - - /* "aiohttp/_http_parser.pyx":626 - * const char *at, size_t length) except -1: - * cdef HttpParser pyparser = parser.data - * try: # <<<<<<<<<<<<<< - * if length > pyparser._max_line_size: - * raise LineTooLong( - */ - __Pyx_XGIVEREF(__pyx_t_2); - __Pyx_XGIVEREF(__pyx_t_3); - __Pyx_XGIVEREF(__pyx_t_4); - __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); - goto __pyx_L1_error; - __pyx_L6_except_return:; - __Pyx_XGIVEREF(__pyx_t_2); - __Pyx_XGIVEREF(__pyx_t_3); - __Pyx_XGIVEREF(__pyx_t_4); - __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); - goto __pyx_L0; - } - - /* "aiohttp/_http_parser.pyx":623 - * - * - * cdef int cb_on_url(cparser.http_parser* parser, # <<<<<<<<<<<<<< - * const char *at, size_t length) except -1: - * cdef HttpParser pyparser = parser.data - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_6); - __Pyx_XDECREF(__pyx_t_7); - __Pyx_XDECREF(__pyx_t_8); - __Pyx_XDECREF(__pyx_t_9); - __Pyx_XDECREF(__pyx_t_11); - __Pyx_AddTraceback("aiohttp._http_parser.cb_on_url", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = -1; - __pyx_L0:; - __Pyx_XDECREF((PyObject *)__pyx_v_pyparser); - __Pyx_XDECREF(__pyx_v_ex); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":638 - * - * - * cdef int cb_on_status(cparser.http_parser* parser, # <<<<<<<<<<<<<< - * const char *at, size_t length) except -1: - * cdef HttpParser pyparser = parser.data - */ - -static int __pyx_f_7aiohttp_12_http_parser_cb_on_status(struct http_parser *__pyx_v_parser, char const *__pyx_v_at, size_t __pyx_v_length) { - struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_pyparser = 0; - PyObject *__pyx_v_ex = NULL; - int __pyx_r; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - int __pyx_t_5; - PyObject *__pyx_t_6 = NULL; - PyObject *__pyx_t_7 = NULL; - PyObject *__pyx_t_8 = NULL; - PyObject *__pyx_t_9 = NULL; - int __pyx_t_10; - PyObject *__pyx_t_11 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("cb_on_status", 0); - - /* "aiohttp/_http_parser.pyx":640 - * cdef int cb_on_status(cparser.http_parser* parser, - * const char *at, size_t length) except -1: - * cdef HttpParser pyparser = parser.data # <<<<<<<<<<<<<< - * cdef str reason - * try: - */ - __pyx_t_1 = ((PyObject *)__pyx_v_parser->data); - __Pyx_INCREF(__pyx_t_1); - __pyx_v_pyparser = ((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_t_1); - __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":642 - * cdef HttpParser pyparser = parser.data - * cdef str reason - * try: # <<<<<<<<<<<<<< - * if length > pyparser._max_line_size: - * raise LineTooLong( - */ - { - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - __Pyx_ExceptionSave(&__pyx_t_2, &__pyx_t_3, &__pyx_t_4); - __Pyx_XGOTREF(__pyx_t_2); - __Pyx_XGOTREF(__pyx_t_3); - __Pyx_XGOTREF(__pyx_t_4); - /*try:*/ { - - /* "aiohttp/_http_parser.pyx":643 - * cdef str reason - * try: - * if length > pyparser._max_line_size: # <<<<<<<<<<<<<< - * raise LineTooLong( - * 'Status line is too long', pyparser._max_line_size, length) - */ - __pyx_t_5 = ((__pyx_v_length > __pyx_v_pyparser->_max_line_size) != 0); - if (unlikely(__pyx_t_5)) { - - /* "aiohttp/_http_parser.pyx":644 - * try: - * if length > pyparser._max_line_size: - * raise LineTooLong( # <<<<<<<<<<<<<< - * 'Status line is too long', pyparser._max_line_size, length) - * extend(pyparser._buf, at, length) - */ - __Pyx_GetModuleGlobalName(__pyx_t_6, __pyx_n_s_LineTooLong); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 644, __pyx_L3_error) - __Pyx_GOTREF(__pyx_t_6); - - /* "aiohttp/_http_parser.pyx":645 - * if length > pyparser._max_line_size: - * raise LineTooLong( - * 'Status line is too long', pyparser._max_line_size, length) # <<<<<<<<<<<<<< - * extend(pyparser._buf, at, length) - * except BaseException as ex: - */ - __pyx_t_7 = __Pyx_PyInt_FromSize_t(__pyx_v_pyparser->_max_line_size); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 645, __pyx_L3_error) - __Pyx_GOTREF(__pyx_t_7); - __pyx_t_8 = __Pyx_PyInt_FromSize_t(__pyx_v_length); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 645, __pyx_L3_error) - __Pyx_GOTREF(__pyx_t_8); - __pyx_t_9 = NULL; - __pyx_t_10 = 0; - if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_6))) { - __pyx_t_9 = PyMethod_GET_SELF(__pyx_t_6); - if (likely(__pyx_t_9)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); - __Pyx_INCREF(__pyx_t_9); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_6, function); - __pyx_t_10 = 1; - } - } - #if CYTHON_FAST_PYCALL - if (PyFunction_Check(__pyx_t_6)) { - PyObject *__pyx_temp[4] = {__pyx_t_9, __pyx_kp_u_Status_line_is_too_long, __pyx_t_7, __pyx_t_8}; - __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_6, __pyx_temp+1-__pyx_t_10, 3+__pyx_t_10); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 644, __pyx_L3_error) - __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - } else - #endif - #if CYTHON_FAST_PYCCALL - if (__Pyx_PyFastCFunction_Check(__pyx_t_6)) { - PyObject *__pyx_temp[4] = {__pyx_t_9, __pyx_kp_u_Status_line_is_too_long, __pyx_t_7, __pyx_t_8}; - __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_6, __pyx_temp+1-__pyx_t_10, 3+__pyx_t_10); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 644, __pyx_L3_error) - __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - } else - #endif - { - __pyx_t_11 = PyTuple_New(3+__pyx_t_10); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 644, __pyx_L3_error) - __Pyx_GOTREF(__pyx_t_11); - if (__pyx_t_9) { - __Pyx_GIVEREF(__pyx_t_9); PyTuple_SET_ITEM(__pyx_t_11, 0, __pyx_t_9); __pyx_t_9 = NULL; - } - __Pyx_INCREF(__pyx_kp_u_Status_line_is_too_long); - __Pyx_GIVEREF(__pyx_kp_u_Status_line_is_too_long); - PyTuple_SET_ITEM(__pyx_t_11, 0+__pyx_t_10, __pyx_kp_u_Status_line_is_too_long); - __Pyx_GIVEREF(__pyx_t_7); - PyTuple_SET_ITEM(__pyx_t_11, 1+__pyx_t_10, __pyx_t_7); - __Pyx_GIVEREF(__pyx_t_8); - PyTuple_SET_ITEM(__pyx_t_11, 2+__pyx_t_10, __pyx_t_8); - __pyx_t_7 = 0; - __pyx_t_8 = 0; - __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_6, __pyx_t_11, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 644, __pyx_L3_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; - } - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - __Pyx_Raise(__pyx_t_1, 0, 0, 0); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __PYX_ERR(0, 644, __pyx_L3_error) - - /* "aiohttp/_http_parser.pyx":643 - * cdef str reason - * try: - * if length > pyparser._max_line_size: # <<<<<<<<<<<<<< - * raise LineTooLong( - * 'Status line is too long', pyparser._max_line_size, length) - */ - } - - /* "aiohttp/_http_parser.pyx":646 - * raise LineTooLong( - * 'Status line is too long', pyparser._max_line_size, length) - * extend(pyparser._buf, at, length) # <<<<<<<<<<<<<< - * except BaseException as ex: - * pyparser._last_error = ex - */ - __pyx_t_1 = __pyx_v_pyparser->_buf; - __Pyx_INCREF(__pyx_t_1); - __pyx_t_6 = __pyx_f_7aiohttp_12_http_parser_extend(__pyx_t_1, __pyx_v_at, __pyx_v_length); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 646, __pyx_L3_error) - __Pyx_GOTREF(__pyx_t_6); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - - /* "aiohttp/_http_parser.pyx":642 - * cdef HttpParser pyparser = parser.data - * cdef str reason - * try: # <<<<<<<<<<<<<< - * if length > pyparser._max_line_size: - * raise LineTooLong( - */ - } - - /* "aiohttp/_http_parser.pyx":651 - * return -1 - * else: - * return 0 # <<<<<<<<<<<<<< - * - * - */ - /*else:*/ { - __pyx_r = 0; - goto __pyx_L6_except_return; - } - __pyx_L3_error:; - __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0; - __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; - __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; - __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; - __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; - - /* "aiohttp/_http_parser.pyx":647 - * 'Status line is too long', pyparser._max_line_size, length) - * extend(pyparser._buf, at, length) - * except BaseException as ex: # <<<<<<<<<<<<<< - * pyparser._last_error = ex - * return -1 - */ - __pyx_t_10 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_BaseException); - if (__pyx_t_10) { - __Pyx_AddTraceback("aiohttp._http_parser.cb_on_status", __pyx_clineno, __pyx_lineno, __pyx_filename); - if (__Pyx_GetException(&__pyx_t_6, &__pyx_t_1, &__pyx_t_11) < 0) __PYX_ERR(0, 647, __pyx_L5_except_error) - __Pyx_GOTREF(__pyx_t_6); - __Pyx_GOTREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_t_11); - __Pyx_INCREF(__pyx_t_1); - __pyx_v_ex = __pyx_t_1; - /*try:*/ { - - /* "aiohttp/_http_parser.pyx":648 - * extend(pyparser._buf, at, length) - * except BaseException as ex: - * pyparser._last_error = ex # <<<<<<<<<<<<<< - * return -1 - * else: - */ - __Pyx_INCREF(__pyx_v_ex); - __Pyx_GIVEREF(__pyx_v_ex); - __Pyx_GOTREF(__pyx_v_pyparser->_last_error); - __Pyx_DECREF(__pyx_v_pyparser->_last_error); - __pyx_v_pyparser->_last_error = __pyx_v_ex; - - /* "aiohttp/_http_parser.pyx":649 - * except BaseException as ex: - * pyparser._last_error = ex - * return -1 # <<<<<<<<<<<<<< - * else: - * return 0 - */ - __pyx_r = -1; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; - goto __pyx_L14_return; - } - - /* "aiohttp/_http_parser.pyx":647 - * 'Status line is too long', pyparser._max_line_size, length) - * extend(pyparser._buf, at, length) - * except BaseException as ex: # <<<<<<<<<<<<<< - * pyparser._last_error = ex - * return -1 - */ - /*finally:*/ { - __pyx_L14_return: { - __pyx_t_10 = __pyx_r; - __Pyx_DECREF(__pyx_v_ex); - __pyx_v_ex = NULL; - __pyx_r = __pyx_t_10; - goto __pyx_L6_except_return; - } - } - } - goto __pyx_L5_except_error; - __pyx_L5_except_error:; - - /* "aiohttp/_http_parser.pyx":642 - * cdef HttpParser pyparser = parser.data - * cdef str reason - * try: # <<<<<<<<<<<<<< - * if length > pyparser._max_line_size: - * raise LineTooLong( - */ - __Pyx_XGIVEREF(__pyx_t_2); - __Pyx_XGIVEREF(__pyx_t_3); - __Pyx_XGIVEREF(__pyx_t_4); - __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); - goto __pyx_L1_error; - __pyx_L6_except_return:; - __Pyx_XGIVEREF(__pyx_t_2); - __Pyx_XGIVEREF(__pyx_t_3); - __Pyx_XGIVEREF(__pyx_t_4); - __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); - goto __pyx_L0; - } - - /* "aiohttp/_http_parser.pyx":638 - * - * - * cdef int cb_on_status(cparser.http_parser* parser, # <<<<<<<<<<<<<< - * const char *at, size_t length) except -1: - * cdef HttpParser pyparser = parser.data - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_6); - __Pyx_XDECREF(__pyx_t_7); - __Pyx_XDECREF(__pyx_t_8); - __Pyx_XDECREF(__pyx_t_9); - __Pyx_XDECREF(__pyx_t_11); - __Pyx_AddTraceback("aiohttp._http_parser.cb_on_status", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = -1; - __pyx_L0:; - __Pyx_XDECREF((PyObject *)__pyx_v_pyparser); - __Pyx_XDECREF(__pyx_v_ex); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":654 - * - * - * cdef int cb_on_header_field(cparser.http_parser* parser, # <<<<<<<<<<<<<< - * const char *at, size_t length) except -1: - * cdef HttpParser pyparser = parser.data - */ - -static int __pyx_f_7aiohttp_12_http_parser_cb_on_header_field(struct http_parser *__pyx_v_parser, char const *__pyx_v_at, size_t __pyx_v_length) { - struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_pyparser = 0; - Py_ssize_t __pyx_v_size; - PyObject *__pyx_v_ex = NULL; - int __pyx_r; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - Py_ssize_t __pyx_t_5; - int __pyx_t_6; - PyObject *__pyx_t_7 = NULL; - PyObject *__pyx_t_8 = NULL; - PyObject *__pyx_t_9 = NULL; - PyObject *__pyx_t_10 = NULL; - int __pyx_t_11; - PyObject *__pyx_t_12 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("cb_on_header_field", 0); - - /* "aiohttp/_http_parser.pyx":656 - * cdef int cb_on_header_field(cparser.http_parser* parser, - * const char *at, size_t length) except -1: - * cdef HttpParser pyparser = parser.data # <<<<<<<<<<<<<< - * cdef Py_ssize_t size - * try: - */ - __pyx_t_1 = ((PyObject *)__pyx_v_parser->data); - __Pyx_INCREF(__pyx_t_1); - __pyx_v_pyparser = ((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_t_1); - __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":658 - * cdef HttpParser pyparser = parser.data - * cdef Py_ssize_t size - * try: # <<<<<<<<<<<<<< - * pyparser._on_status_complete() - * size = len(pyparser._raw_name) + length - */ - { - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - __Pyx_ExceptionSave(&__pyx_t_2, &__pyx_t_3, &__pyx_t_4); - __Pyx_XGOTREF(__pyx_t_2); - __Pyx_XGOTREF(__pyx_t_3); - __Pyx_XGOTREF(__pyx_t_4); - /*try:*/ { - - /* "aiohttp/_http_parser.pyx":659 - * cdef Py_ssize_t size - * try: - * pyparser._on_status_complete() # <<<<<<<<<<<<<< - * size = len(pyparser._raw_name) + length - * if size > pyparser._max_field_size: - */ - __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *)__pyx_v_pyparser->__pyx_vtab)->_on_status_complete(__pyx_v_pyparser); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 659, __pyx_L3_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":660 - * try: - * pyparser._on_status_complete() - * size = len(pyparser._raw_name) + length # <<<<<<<<<<<<<< - * if size > pyparser._max_field_size: - * raise LineTooLong( - */ - __pyx_t_1 = __pyx_v_pyparser->_raw_name; - __Pyx_INCREF(__pyx_t_1); - if (unlikely(__pyx_t_1 == Py_None)) { - PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); - __PYX_ERR(0, 660, __pyx_L3_error) - } - __pyx_t_5 = PyByteArray_GET_SIZE(__pyx_t_1); if (unlikely(__pyx_t_5 == ((Py_ssize_t)-1))) __PYX_ERR(0, 660, __pyx_L3_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_v_size = (__pyx_t_5 + __pyx_v_length); - - /* "aiohttp/_http_parser.pyx":661 - * pyparser._on_status_complete() - * size = len(pyparser._raw_name) + length - * if size > pyparser._max_field_size: # <<<<<<<<<<<<<< - * raise LineTooLong( - * 'Header name is too long', pyparser._max_field_size, size) - */ - __pyx_t_6 = ((__pyx_v_size > __pyx_v_pyparser->_max_field_size) != 0); - if (unlikely(__pyx_t_6)) { - - /* "aiohttp/_http_parser.pyx":662 - * size = len(pyparser._raw_name) + length - * if size > pyparser._max_field_size: - * raise LineTooLong( # <<<<<<<<<<<<<< - * 'Header name is too long', pyparser._max_field_size, size) - * pyparser._on_header_field(at, length) - */ - __Pyx_GetModuleGlobalName(__pyx_t_7, __pyx_n_s_LineTooLong); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 662, __pyx_L3_error) - __Pyx_GOTREF(__pyx_t_7); - - /* "aiohttp/_http_parser.pyx":663 - * if size > pyparser._max_field_size: - * raise LineTooLong( - * 'Header name is too long', pyparser._max_field_size, size) # <<<<<<<<<<<<<< - * pyparser._on_header_field(at, length) - * except BaseException as ex: - */ - __pyx_t_8 = __Pyx_PyInt_FromSize_t(__pyx_v_pyparser->_max_field_size); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 663, __pyx_L3_error) - __Pyx_GOTREF(__pyx_t_8); - __pyx_t_9 = PyInt_FromSsize_t(__pyx_v_size); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 663, __pyx_L3_error) - __Pyx_GOTREF(__pyx_t_9); - __pyx_t_10 = NULL; - __pyx_t_11 = 0; - if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_7))) { - __pyx_t_10 = PyMethod_GET_SELF(__pyx_t_7); - if (likely(__pyx_t_10)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7); - __Pyx_INCREF(__pyx_t_10); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_7, function); - __pyx_t_11 = 1; - } - } - #if CYTHON_FAST_PYCALL - if (PyFunction_Check(__pyx_t_7)) { - PyObject *__pyx_temp[4] = {__pyx_t_10, __pyx_kp_u_Header_name_is_too_long, __pyx_t_8, __pyx_t_9}; - __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_7, __pyx_temp+1-__pyx_t_11, 3+__pyx_t_11); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 662, __pyx_L3_error) - __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; - } else - #endif - #if CYTHON_FAST_PYCCALL - if (__Pyx_PyFastCFunction_Check(__pyx_t_7)) { - PyObject *__pyx_temp[4] = {__pyx_t_10, __pyx_kp_u_Header_name_is_too_long, __pyx_t_8, __pyx_t_9}; - __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_7, __pyx_temp+1-__pyx_t_11, 3+__pyx_t_11); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 662, __pyx_L3_error) - __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; - } else - #endif - { - __pyx_t_12 = PyTuple_New(3+__pyx_t_11); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 662, __pyx_L3_error) - __Pyx_GOTREF(__pyx_t_12); - if (__pyx_t_10) { - __Pyx_GIVEREF(__pyx_t_10); PyTuple_SET_ITEM(__pyx_t_12, 0, __pyx_t_10); __pyx_t_10 = NULL; - } - __Pyx_INCREF(__pyx_kp_u_Header_name_is_too_long); - __Pyx_GIVEREF(__pyx_kp_u_Header_name_is_too_long); - PyTuple_SET_ITEM(__pyx_t_12, 0+__pyx_t_11, __pyx_kp_u_Header_name_is_too_long); - __Pyx_GIVEREF(__pyx_t_8); - PyTuple_SET_ITEM(__pyx_t_12, 1+__pyx_t_11, __pyx_t_8); - __Pyx_GIVEREF(__pyx_t_9); - PyTuple_SET_ITEM(__pyx_t_12, 2+__pyx_t_11, __pyx_t_9); - __pyx_t_8 = 0; - __pyx_t_9 = 0; - __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_7, __pyx_t_12, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 662, __pyx_L3_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; - } - __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - __Pyx_Raise(__pyx_t_1, 0, 0, 0); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __PYX_ERR(0, 662, __pyx_L3_error) - - /* "aiohttp/_http_parser.pyx":661 - * pyparser._on_status_complete() - * size = len(pyparser._raw_name) + length - * if size > pyparser._max_field_size: # <<<<<<<<<<<<<< - * raise LineTooLong( - * 'Header name is too long', pyparser._max_field_size, size) - */ - } - - /* "aiohttp/_http_parser.pyx":664 - * raise LineTooLong( - * 'Header name is too long', pyparser._max_field_size, size) - * pyparser._on_header_field(at, length) # <<<<<<<<<<<<<< - * except BaseException as ex: - * pyparser._last_error = ex - */ - __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *)__pyx_v_pyparser->__pyx_vtab)->_on_header_field(__pyx_v_pyparser, __pyx_v_at, __pyx_v_length); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 664, __pyx_L3_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":658 - * cdef HttpParser pyparser = parser.data - * cdef Py_ssize_t size - * try: # <<<<<<<<<<<<<< - * pyparser._on_status_complete() - * size = len(pyparser._raw_name) + length - */ - } - - /* "aiohttp/_http_parser.pyx":669 - * return -1 - * else: - * return 0 # <<<<<<<<<<<<<< - * - * - */ - /*else:*/ { - __pyx_r = 0; - goto __pyx_L6_except_return; - } - __pyx_L3_error:; - __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; - __Pyx_XDECREF(__pyx_t_12); __pyx_t_12 = 0; - __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; - __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; - __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; - - /* "aiohttp/_http_parser.pyx":665 - * 'Header name is too long', pyparser._max_field_size, size) - * pyparser._on_header_field(at, length) - * except BaseException as ex: # <<<<<<<<<<<<<< - * pyparser._last_error = ex - * return -1 - */ - __pyx_t_11 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_BaseException); - if (__pyx_t_11) { - __Pyx_AddTraceback("aiohttp._http_parser.cb_on_header_field", __pyx_clineno, __pyx_lineno, __pyx_filename); - if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_7, &__pyx_t_12) < 0) __PYX_ERR(0, 665, __pyx_L5_except_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_t_7); - __Pyx_GOTREF(__pyx_t_12); - __Pyx_INCREF(__pyx_t_7); - __pyx_v_ex = __pyx_t_7; - /*try:*/ { - - /* "aiohttp/_http_parser.pyx":666 - * pyparser._on_header_field(at, length) - * except BaseException as ex: - * pyparser._last_error = ex # <<<<<<<<<<<<<< - * return -1 - * else: - */ - __Pyx_INCREF(__pyx_v_ex); - __Pyx_GIVEREF(__pyx_v_ex); - __Pyx_GOTREF(__pyx_v_pyparser->_last_error); - __Pyx_DECREF(__pyx_v_pyparser->_last_error); - __pyx_v_pyparser->_last_error = __pyx_v_ex; - - /* "aiohttp/_http_parser.pyx":667 - * except BaseException as ex: - * pyparser._last_error = ex - * return -1 # <<<<<<<<<<<<<< - * else: - * return 0 - */ - __pyx_r = -1; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; - goto __pyx_L14_return; - } - - /* "aiohttp/_http_parser.pyx":665 - * 'Header name is too long', pyparser._max_field_size, size) - * pyparser._on_header_field(at, length) - * except BaseException as ex: # <<<<<<<<<<<<<< - * pyparser._last_error = ex - * return -1 - */ - /*finally:*/ { - __pyx_L14_return: { - __pyx_t_11 = __pyx_r; - __Pyx_DECREF(__pyx_v_ex); - __pyx_v_ex = NULL; - __pyx_r = __pyx_t_11; - goto __pyx_L6_except_return; - } - } - } - goto __pyx_L5_except_error; - __pyx_L5_except_error:; - - /* "aiohttp/_http_parser.pyx":658 - * cdef HttpParser pyparser = parser.data - * cdef Py_ssize_t size - * try: # <<<<<<<<<<<<<< - * pyparser._on_status_complete() - * size = len(pyparser._raw_name) + length - */ - __Pyx_XGIVEREF(__pyx_t_2); - __Pyx_XGIVEREF(__pyx_t_3); - __Pyx_XGIVEREF(__pyx_t_4); - __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); - goto __pyx_L1_error; - __pyx_L6_except_return:; - __Pyx_XGIVEREF(__pyx_t_2); - __Pyx_XGIVEREF(__pyx_t_3); - __Pyx_XGIVEREF(__pyx_t_4); - __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); - goto __pyx_L0; - } - - /* "aiohttp/_http_parser.pyx":654 - * - * - * cdef int cb_on_header_field(cparser.http_parser* parser, # <<<<<<<<<<<<<< - * const char *at, size_t length) except -1: - * cdef HttpParser pyparser = parser.data - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_7); - __Pyx_XDECREF(__pyx_t_8); - __Pyx_XDECREF(__pyx_t_9); - __Pyx_XDECREF(__pyx_t_10); - __Pyx_XDECREF(__pyx_t_12); - __Pyx_AddTraceback("aiohttp._http_parser.cb_on_header_field", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = -1; - __pyx_L0:; - __Pyx_XDECREF((PyObject *)__pyx_v_pyparser); - __Pyx_XDECREF(__pyx_v_ex); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":672 - * - * - * cdef int cb_on_header_value(cparser.http_parser* parser, # <<<<<<<<<<<<<< - * const char *at, size_t length) except -1: - * cdef HttpParser pyparser = parser.data - */ - -static int __pyx_f_7aiohttp_12_http_parser_cb_on_header_value(struct http_parser *__pyx_v_parser, char const *__pyx_v_at, size_t __pyx_v_length) { - struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_pyparser = 0; - Py_ssize_t __pyx_v_size; - PyObject *__pyx_v_ex = NULL; - int __pyx_r; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - Py_ssize_t __pyx_t_5; - int __pyx_t_6; - PyObject *__pyx_t_7 = NULL; - PyObject *__pyx_t_8 = NULL; - PyObject *__pyx_t_9 = NULL; - PyObject *__pyx_t_10 = NULL; - int __pyx_t_11; - PyObject *__pyx_t_12 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("cb_on_header_value", 0); - - /* "aiohttp/_http_parser.pyx":674 - * cdef int cb_on_header_value(cparser.http_parser* parser, - * const char *at, size_t length) except -1: - * cdef HttpParser pyparser = parser.data # <<<<<<<<<<<<<< - * cdef Py_ssize_t size - * try: - */ - __pyx_t_1 = ((PyObject *)__pyx_v_parser->data); - __Pyx_INCREF(__pyx_t_1); - __pyx_v_pyparser = ((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_t_1); - __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":676 - * cdef HttpParser pyparser = parser.data - * cdef Py_ssize_t size - * try: # <<<<<<<<<<<<<< - * size = len(pyparser._raw_value) + length - * if size > pyparser._max_field_size: - */ - { - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - __Pyx_ExceptionSave(&__pyx_t_2, &__pyx_t_3, &__pyx_t_4); - __Pyx_XGOTREF(__pyx_t_2); - __Pyx_XGOTREF(__pyx_t_3); - __Pyx_XGOTREF(__pyx_t_4); - /*try:*/ { - - /* "aiohttp/_http_parser.pyx":677 - * cdef Py_ssize_t size - * try: - * size = len(pyparser._raw_value) + length # <<<<<<<<<<<<<< - * if size > pyparser._max_field_size: - * raise LineTooLong( - */ - __pyx_t_1 = __pyx_v_pyparser->_raw_value; - __Pyx_INCREF(__pyx_t_1); - if (unlikely(__pyx_t_1 == Py_None)) { - PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); - __PYX_ERR(0, 677, __pyx_L3_error) - } - __pyx_t_5 = PyByteArray_GET_SIZE(__pyx_t_1); if (unlikely(__pyx_t_5 == ((Py_ssize_t)-1))) __PYX_ERR(0, 677, __pyx_L3_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_v_size = (__pyx_t_5 + __pyx_v_length); - - /* "aiohttp/_http_parser.pyx":678 - * try: - * size = len(pyparser._raw_value) + length - * if size > pyparser._max_field_size: # <<<<<<<<<<<<<< - * raise LineTooLong( - * 'Header value is too long', pyparser._max_field_size, size) - */ - __pyx_t_6 = ((__pyx_v_size > __pyx_v_pyparser->_max_field_size) != 0); - if (unlikely(__pyx_t_6)) { - - /* "aiohttp/_http_parser.pyx":679 - * size = len(pyparser._raw_value) + length - * if size > pyparser._max_field_size: - * raise LineTooLong( # <<<<<<<<<<<<<< - * 'Header value is too long', pyparser._max_field_size, size) - * pyparser._on_header_value(at, length) - */ - __Pyx_GetModuleGlobalName(__pyx_t_7, __pyx_n_s_LineTooLong); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 679, __pyx_L3_error) - __Pyx_GOTREF(__pyx_t_7); - - /* "aiohttp/_http_parser.pyx":680 - * if size > pyparser._max_field_size: - * raise LineTooLong( - * 'Header value is too long', pyparser._max_field_size, size) # <<<<<<<<<<<<<< - * pyparser._on_header_value(at, length) - * except BaseException as ex: - */ - __pyx_t_8 = __Pyx_PyInt_FromSize_t(__pyx_v_pyparser->_max_field_size); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 680, __pyx_L3_error) - __Pyx_GOTREF(__pyx_t_8); - __pyx_t_9 = PyInt_FromSsize_t(__pyx_v_size); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 680, __pyx_L3_error) - __Pyx_GOTREF(__pyx_t_9); - __pyx_t_10 = NULL; - __pyx_t_11 = 0; - if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_7))) { - __pyx_t_10 = PyMethod_GET_SELF(__pyx_t_7); - if (likely(__pyx_t_10)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7); - __Pyx_INCREF(__pyx_t_10); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_7, function); - __pyx_t_11 = 1; - } - } - #if CYTHON_FAST_PYCALL - if (PyFunction_Check(__pyx_t_7)) { - PyObject *__pyx_temp[4] = {__pyx_t_10, __pyx_kp_u_Header_value_is_too_long, __pyx_t_8, __pyx_t_9}; - __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_7, __pyx_temp+1-__pyx_t_11, 3+__pyx_t_11); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 679, __pyx_L3_error) - __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; - } else - #endif - #if CYTHON_FAST_PYCCALL - if (__Pyx_PyFastCFunction_Check(__pyx_t_7)) { - PyObject *__pyx_temp[4] = {__pyx_t_10, __pyx_kp_u_Header_value_is_too_long, __pyx_t_8, __pyx_t_9}; - __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_7, __pyx_temp+1-__pyx_t_11, 3+__pyx_t_11); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 679, __pyx_L3_error) - __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; - } else - #endif - { - __pyx_t_12 = PyTuple_New(3+__pyx_t_11); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 679, __pyx_L3_error) - __Pyx_GOTREF(__pyx_t_12); - if (__pyx_t_10) { - __Pyx_GIVEREF(__pyx_t_10); PyTuple_SET_ITEM(__pyx_t_12, 0, __pyx_t_10); __pyx_t_10 = NULL; - } - __Pyx_INCREF(__pyx_kp_u_Header_value_is_too_long); - __Pyx_GIVEREF(__pyx_kp_u_Header_value_is_too_long); - PyTuple_SET_ITEM(__pyx_t_12, 0+__pyx_t_11, __pyx_kp_u_Header_value_is_too_long); - __Pyx_GIVEREF(__pyx_t_8); - PyTuple_SET_ITEM(__pyx_t_12, 1+__pyx_t_11, __pyx_t_8); - __Pyx_GIVEREF(__pyx_t_9); - PyTuple_SET_ITEM(__pyx_t_12, 2+__pyx_t_11, __pyx_t_9); - __pyx_t_8 = 0; - __pyx_t_9 = 0; - __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_7, __pyx_t_12, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 679, __pyx_L3_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; - } - __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - __Pyx_Raise(__pyx_t_1, 0, 0, 0); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __PYX_ERR(0, 679, __pyx_L3_error) - - /* "aiohttp/_http_parser.pyx":678 - * try: - * size = len(pyparser._raw_value) + length - * if size > pyparser._max_field_size: # <<<<<<<<<<<<<< - * raise LineTooLong( - * 'Header value is too long', pyparser._max_field_size, size) - */ - } - - /* "aiohttp/_http_parser.pyx":681 - * raise LineTooLong( - * 'Header value is too long', pyparser._max_field_size, size) - * pyparser._on_header_value(at, length) # <<<<<<<<<<<<<< - * except BaseException as ex: - * pyparser._last_error = ex - */ - __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *)__pyx_v_pyparser->__pyx_vtab)->_on_header_value(__pyx_v_pyparser, __pyx_v_at, __pyx_v_length); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 681, __pyx_L3_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":676 - * cdef HttpParser pyparser = parser.data - * cdef Py_ssize_t size - * try: # <<<<<<<<<<<<<< - * size = len(pyparser._raw_value) + length - * if size > pyparser._max_field_size: - */ - } - - /* "aiohttp/_http_parser.pyx":686 - * return -1 - * else: - * return 0 # <<<<<<<<<<<<<< - * - * - */ - /*else:*/ { - __pyx_r = 0; - goto __pyx_L6_except_return; - } - __pyx_L3_error:; - __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; - __Pyx_XDECREF(__pyx_t_12); __pyx_t_12 = 0; - __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; - __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; - __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; - - /* "aiohttp/_http_parser.pyx":682 - * 'Header value is too long', pyparser._max_field_size, size) - * pyparser._on_header_value(at, length) - * except BaseException as ex: # <<<<<<<<<<<<<< - * pyparser._last_error = ex - * return -1 - */ - __pyx_t_11 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_BaseException); - if (__pyx_t_11) { - __Pyx_AddTraceback("aiohttp._http_parser.cb_on_header_value", __pyx_clineno, __pyx_lineno, __pyx_filename); - if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_7, &__pyx_t_12) < 0) __PYX_ERR(0, 682, __pyx_L5_except_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_t_7); - __Pyx_GOTREF(__pyx_t_12); - __Pyx_INCREF(__pyx_t_7); - __pyx_v_ex = __pyx_t_7; - /*try:*/ { - - /* "aiohttp/_http_parser.pyx":683 - * pyparser._on_header_value(at, length) - * except BaseException as ex: - * pyparser._last_error = ex # <<<<<<<<<<<<<< - * return -1 - * else: - */ - __Pyx_INCREF(__pyx_v_ex); - __Pyx_GIVEREF(__pyx_v_ex); - __Pyx_GOTREF(__pyx_v_pyparser->_last_error); - __Pyx_DECREF(__pyx_v_pyparser->_last_error); - __pyx_v_pyparser->_last_error = __pyx_v_ex; - - /* "aiohttp/_http_parser.pyx":684 - * except BaseException as ex: - * pyparser._last_error = ex - * return -1 # <<<<<<<<<<<<<< - * else: - * return 0 - */ - __pyx_r = -1; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; - goto __pyx_L14_return; - } - - /* "aiohttp/_http_parser.pyx":682 - * 'Header value is too long', pyparser._max_field_size, size) - * pyparser._on_header_value(at, length) - * except BaseException as ex: # <<<<<<<<<<<<<< - * pyparser._last_error = ex - * return -1 - */ - /*finally:*/ { - __pyx_L14_return: { - __pyx_t_11 = __pyx_r; - __Pyx_DECREF(__pyx_v_ex); - __pyx_v_ex = NULL; - __pyx_r = __pyx_t_11; - goto __pyx_L6_except_return; - } - } - } - goto __pyx_L5_except_error; - __pyx_L5_except_error:; - - /* "aiohttp/_http_parser.pyx":676 - * cdef HttpParser pyparser = parser.data - * cdef Py_ssize_t size - * try: # <<<<<<<<<<<<<< - * size = len(pyparser._raw_value) + length - * if size > pyparser._max_field_size: - */ - __Pyx_XGIVEREF(__pyx_t_2); - __Pyx_XGIVEREF(__pyx_t_3); - __Pyx_XGIVEREF(__pyx_t_4); - __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); - goto __pyx_L1_error; - __pyx_L6_except_return:; - __Pyx_XGIVEREF(__pyx_t_2); - __Pyx_XGIVEREF(__pyx_t_3); - __Pyx_XGIVEREF(__pyx_t_4); - __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); - goto __pyx_L0; - } - - /* "aiohttp/_http_parser.pyx":672 - * - * - * cdef int cb_on_header_value(cparser.http_parser* parser, # <<<<<<<<<<<<<< - * const char *at, size_t length) except -1: - * cdef HttpParser pyparser = parser.data - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_7); - __Pyx_XDECREF(__pyx_t_8); - __Pyx_XDECREF(__pyx_t_9); - __Pyx_XDECREF(__pyx_t_10); - __Pyx_XDECREF(__pyx_t_12); - __Pyx_AddTraceback("aiohttp._http_parser.cb_on_header_value", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = -1; - __pyx_L0:; - __Pyx_XDECREF((PyObject *)__pyx_v_pyparser); - __Pyx_XDECREF(__pyx_v_ex); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":689 - * - * - * cdef int cb_on_headers_complete(cparser.http_parser* parser) except -1: # <<<<<<<<<<<<<< - * cdef HttpParser pyparser = parser.data - * try: - */ - -static int __pyx_f_7aiohttp_12_http_parser_cb_on_headers_complete(struct http_parser *__pyx_v_parser) { - struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_pyparser = 0; - PyObject *__pyx_v_exc = NULL; - int __pyx_r; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - int __pyx_t_5; - int __pyx_t_6; - int __pyx_t_7; - PyObject *__pyx_t_8 = NULL; - PyObject *__pyx_t_9 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("cb_on_headers_complete", 0); - - /* "aiohttp/_http_parser.pyx":690 - * - * cdef int cb_on_headers_complete(cparser.http_parser* parser) except -1: - * cdef HttpParser pyparser = parser.data # <<<<<<<<<<<<<< - * try: - * pyparser._on_status_complete() - */ - __pyx_t_1 = ((PyObject *)__pyx_v_parser->data); - __Pyx_INCREF(__pyx_t_1); - __pyx_v_pyparser = ((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_t_1); - __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":691 - * cdef int cb_on_headers_complete(cparser.http_parser* parser) except -1: - * cdef HttpParser pyparser = parser.data - * try: # <<<<<<<<<<<<<< - * pyparser._on_status_complete() - * pyparser._on_headers_complete() - */ - { - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - __Pyx_ExceptionSave(&__pyx_t_2, &__pyx_t_3, &__pyx_t_4); - __Pyx_XGOTREF(__pyx_t_2); - __Pyx_XGOTREF(__pyx_t_3); - __Pyx_XGOTREF(__pyx_t_4); - /*try:*/ { - - /* "aiohttp/_http_parser.pyx":692 - * cdef HttpParser pyparser = parser.data - * try: - * pyparser._on_status_complete() # <<<<<<<<<<<<<< - * pyparser._on_headers_complete() - * except BaseException as exc: - */ - __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *)__pyx_v_pyparser->__pyx_vtab)->_on_status_complete(__pyx_v_pyparser); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 692, __pyx_L3_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":693 - * try: - * pyparser._on_status_complete() - * pyparser._on_headers_complete() # <<<<<<<<<<<<<< - * except BaseException as exc: - * pyparser._last_error = exc - */ - __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *)__pyx_v_pyparser->__pyx_vtab)->_on_headers_complete(__pyx_v_pyparser); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 693, __pyx_L3_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":691 - * cdef int cb_on_headers_complete(cparser.http_parser* parser) except -1: - * cdef HttpParser pyparser = parser.data - * try: # <<<<<<<<<<<<<< - * pyparser._on_status_complete() - * pyparser._on_headers_complete() - */ - } - - /* "aiohttp/_http_parser.pyx":698 - * return -1 - * else: - * if pyparser._cparser.upgrade or pyparser._cparser.method == 5: # CONNECT # <<<<<<<<<<<<<< - * return 2 - * else: - */ - /*else:*/ { - __pyx_t_6 = (__pyx_v_pyparser->_cparser->upgrade != 0); - if (!__pyx_t_6) { - } else { - __pyx_t_5 = __pyx_t_6; - goto __pyx_L10_bool_binop_done; - } - __pyx_t_6 = ((__pyx_v_pyparser->_cparser->method == 5) != 0); - __pyx_t_5 = __pyx_t_6; - __pyx_L10_bool_binop_done:; - if (__pyx_t_5) { - - /* "aiohttp/_http_parser.pyx":699 - * else: - * if pyparser._cparser.upgrade or pyparser._cparser.method == 5: # CONNECT - * return 2 # <<<<<<<<<<<<<< - * else: - * return 0 - */ - __pyx_r = 2; - goto __pyx_L6_except_return; - - /* "aiohttp/_http_parser.pyx":698 - * return -1 - * else: - * if pyparser._cparser.upgrade or pyparser._cparser.method == 5: # CONNECT # <<<<<<<<<<<<<< - * return 2 - * else: - */ - } - - /* "aiohttp/_http_parser.pyx":701 - * return 2 - * else: - * return 0 # <<<<<<<<<<<<<< - * - * - */ - /*else*/ { - __pyx_r = 0; - goto __pyx_L6_except_return; - } - } - __pyx_L3_error:; - __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":694 - * pyparser._on_status_complete() - * pyparser._on_headers_complete() - * except BaseException as exc: # <<<<<<<<<<<<<< - * pyparser._last_error = exc - * return -1 - */ - __pyx_t_7 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_BaseException); - if (__pyx_t_7) { - __Pyx_AddTraceback("aiohttp._http_parser.cb_on_headers_complete", __pyx_clineno, __pyx_lineno, __pyx_filename); - if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_8, &__pyx_t_9) < 0) __PYX_ERR(0, 694, __pyx_L5_except_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_t_8); - __Pyx_GOTREF(__pyx_t_9); - __Pyx_INCREF(__pyx_t_8); - __pyx_v_exc = __pyx_t_8; - /*try:*/ { - - /* "aiohttp/_http_parser.pyx":695 - * pyparser._on_headers_complete() - * except BaseException as exc: - * pyparser._last_error = exc # <<<<<<<<<<<<<< - * return -1 - * else: - */ - __Pyx_INCREF(__pyx_v_exc); - __Pyx_GIVEREF(__pyx_v_exc); - __Pyx_GOTREF(__pyx_v_pyparser->_last_error); - __Pyx_DECREF(__pyx_v_pyparser->_last_error); - __pyx_v_pyparser->_last_error = __pyx_v_exc; - - /* "aiohttp/_http_parser.pyx":696 - * except BaseException as exc: - * pyparser._last_error = exc - * return -1 # <<<<<<<<<<<<<< - * else: - * if pyparser._cparser.upgrade or pyparser._cparser.method == 5: # CONNECT - */ - __pyx_r = -1; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; - goto __pyx_L16_return; - } - - /* "aiohttp/_http_parser.pyx":694 - * pyparser._on_status_complete() - * pyparser._on_headers_complete() - * except BaseException as exc: # <<<<<<<<<<<<<< - * pyparser._last_error = exc - * return -1 - */ - /*finally:*/ { - __pyx_L16_return: { - __pyx_t_7 = __pyx_r; - __Pyx_DECREF(__pyx_v_exc); - __pyx_v_exc = NULL; - __pyx_r = __pyx_t_7; - goto __pyx_L6_except_return; - } - } - } - goto __pyx_L5_except_error; - __pyx_L5_except_error:; - - /* "aiohttp/_http_parser.pyx":691 - * cdef int cb_on_headers_complete(cparser.http_parser* parser) except -1: - * cdef HttpParser pyparser = parser.data - * try: # <<<<<<<<<<<<<< - * pyparser._on_status_complete() - * pyparser._on_headers_complete() - */ - __Pyx_XGIVEREF(__pyx_t_2); - __Pyx_XGIVEREF(__pyx_t_3); - __Pyx_XGIVEREF(__pyx_t_4); - __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); - goto __pyx_L1_error; - __pyx_L6_except_return:; - __Pyx_XGIVEREF(__pyx_t_2); - __Pyx_XGIVEREF(__pyx_t_3); - __Pyx_XGIVEREF(__pyx_t_4); - __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); - goto __pyx_L0; - } - - /* "aiohttp/_http_parser.pyx":689 - * - * - * cdef int cb_on_headers_complete(cparser.http_parser* parser) except -1: # <<<<<<<<<<<<<< - * cdef HttpParser pyparser = parser.data - * try: - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_8); - __Pyx_XDECREF(__pyx_t_9); - __Pyx_AddTraceback("aiohttp._http_parser.cb_on_headers_complete", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = -1; - __pyx_L0:; - __Pyx_XDECREF((PyObject *)__pyx_v_pyparser); - __Pyx_XDECREF(__pyx_v_exc); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":704 - * - * - * cdef int cb_on_body(cparser.http_parser* parser, # <<<<<<<<<<<<<< - * const char *at, size_t length) except -1: - * cdef HttpParser pyparser = parser.data - */ - -static int __pyx_f_7aiohttp_12_http_parser_cb_on_body(struct http_parser *__pyx_v_parser, char const *__pyx_v_at, size_t __pyx_v_length) { - struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_pyparser = 0; - PyObject *__pyx_v_body = 0; - PyObject *__pyx_v_exc = NULL; - int __pyx_r; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - PyObject *__pyx_t_5 = NULL; - PyObject *__pyx_t_6 = NULL; - PyObject *__pyx_t_7 = NULL; - int __pyx_t_8; - PyObject *__pyx_t_9 = NULL; - int __pyx_t_10; - int __pyx_t_11; - PyObject *__pyx_t_12 = NULL; - PyObject *__pyx_t_13 = NULL; - PyObject *__pyx_t_14 = NULL; - PyObject *__pyx_t_15 = NULL; - int __pyx_t_16; - char const *__pyx_t_17; - PyObject *__pyx_t_18 = NULL; - PyObject *__pyx_t_19 = NULL; - PyObject *__pyx_t_20 = NULL; - PyObject *__pyx_t_21 = NULL; - PyObject *__pyx_t_22 = NULL; - PyObject *__pyx_t_23 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("cb_on_body", 0); - - /* "aiohttp/_http_parser.pyx":706 - * cdef int cb_on_body(cparser.http_parser* parser, - * const char *at, size_t length) except -1: - * cdef HttpParser pyparser = parser.data # <<<<<<<<<<<<<< - * cdef bytes body = at[:length] - * try: - */ - __pyx_t_1 = ((PyObject *)__pyx_v_parser->data); - __Pyx_INCREF(__pyx_t_1); - __pyx_v_pyparser = ((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_t_1); - __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":707 - * const char *at, size_t length) except -1: - * cdef HttpParser pyparser = parser.data - * cdef bytes body = at[:length] # <<<<<<<<<<<<<< - * try: - * pyparser._payload.feed_data(body, length) - */ - __pyx_t_1 = __Pyx_PyBytes_FromStringAndSize(__pyx_v_at + 0, __pyx_v_length - 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 707, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_v_body = ((PyObject*)__pyx_t_1); - __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":708 - * cdef HttpParser pyparser = parser.data - * cdef bytes body = at[:length] - * try: # <<<<<<<<<<<<<< - * pyparser._payload.feed_data(body, length) - * except BaseException as exc: - */ - { - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - __Pyx_ExceptionSave(&__pyx_t_2, &__pyx_t_3, &__pyx_t_4); - __Pyx_XGOTREF(__pyx_t_2); - __Pyx_XGOTREF(__pyx_t_3); - __Pyx_XGOTREF(__pyx_t_4); - /*try:*/ { - - /* "aiohttp/_http_parser.pyx":709 - * cdef bytes body = at[:length] - * try: - * pyparser._payload.feed_data(body, length) # <<<<<<<<<<<<<< - * except BaseException as exc: - * if pyparser._payload_exception is not None: - */ - __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_pyparser->_payload, __pyx_n_s_feed_data); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 709, __pyx_L3_error) - __Pyx_GOTREF(__pyx_t_5); - __pyx_t_6 = __Pyx_PyInt_FromSize_t(__pyx_v_length); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 709, __pyx_L3_error) - __Pyx_GOTREF(__pyx_t_6); - __pyx_t_7 = NULL; - __pyx_t_8 = 0; - if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_5))) { - __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_5); - if (likely(__pyx_t_7)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_5); - __Pyx_INCREF(__pyx_t_7); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_5, function); - __pyx_t_8 = 1; - } - } - #if CYTHON_FAST_PYCALL - if (PyFunction_Check(__pyx_t_5)) { - PyObject *__pyx_temp[3] = {__pyx_t_7, __pyx_v_body, __pyx_t_6}; - __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_5, __pyx_temp+1-__pyx_t_8, 2+__pyx_t_8); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 709, __pyx_L3_error) - __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - } else - #endif - #if CYTHON_FAST_PYCCALL - if (__Pyx_PyFastCFunction_Check(__pyx_t_5)) { - PyObject *__pyx_temp[3] = {__pyx_t_7, __pyx_v_body, __pyx_t_6}; - __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_5, __pyx_temp+1-__pyx_t_8, 2+__pyx_t_8); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 709, __pyx_L3_error) - __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - } else - #endif - { - __pyx_t_9 = PyTuple_New(2+__pyx_t_8); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 709, __pyx_L3_error) - __Pyx_GOTREF(__pyx_t_9); - if (__pyx_t_7) { - __Pyx_GIVEREF(__pyx_t_7); PyTuple_SET_ITEM(__pyx_t_9, 0, __pyx_t_7); __pyx_t_7 = NULL; - } - __Pyx_INCREF(__pyx_v_body); - __Pyx_GIVEREF(__pyx_v_body); - PyTuple_SET_ITEM(__pyx_t_9, 0+__pyx_t_8, __pyx_v_body); - __Pyx_GIVEREF(__pyx_t_6); - PyTuple_SET_ITEM(__pyx_t_9, 1+__pyx_t_8, __pyx_t_6); - __pyx_t_6 = 0; - __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_5, __pyx_t_9, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 709, __pyx_L3_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; - } - __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":708 - * cdef HttpParser pyparser = parser.data - * cdef bytes body = at[:length] - * try: # <<<<<<<<<<<<<< - * pyparser._payload.feed_data(body, length) - * except BaseException as exc: - */ - } - - /* "aiohttp/_http_parser.pyx":718 - * return -1 - * else: - * return 0 # <<<<<<<<<<<<<< - * - * - */ - /*else:*/ { - __pyx_r = 0; - goto __pyx_L6_except_return; - } - __pyx_L3_error:; - __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; - __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; - __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; - __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; - - /* "aiohttp/_http_parser.pyx":710 - * try: - * pyparser._payload.feed_data(body, length) - * except BaseException as exc: # <<<<<<<<<<<<<< - * if pyparser._payload_exception is not None: - * pyparser._payload.set_exception(pyparser._payload_exception(str(exc))) - */ - __pyx_t_8 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_BaseException); - if (__pyx_t_8) { - __Pyx_AddTraceback("aiohttp._http_parser.cb_on_body", __pyx_clineno, __pyx_lineno, __pyx_filename); - if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_5, &__pyx_t_9) < 0) __PYX_ERR(0, 710, __pyx_L5_except_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_t_5); - __Pyx_GOTREF(__pyx_t_9); - __Pyx_INCREF(__pyx_t_5); - __pyx_v_exc = __pyx_t_5; - /*try:*/ { - - /* "aiohttp/_http_parser.pyx":711 - * pyparser._payload.feed_data(body, length) - * except BaseException as exc: - * if pyparser._payload_exception is not None: # <<<<<<<<<<<<<< - * pyparser._payload.set_exception(pyparser._payload_exception(str(exc))) - * else: - */ - __pyx_t_10 = (__pyx_v_pyparser->_payload_exception != Py_None); - __pyx_t_11 = (__pyx_t_10 != 0); - if (__pyx_t_11) { - - /* "aiohttp/_http_parser.pyx":712 - * except BaseException as exc: - * if pyparser._payload_exception is not None: - * pyparser._payload.set_exception(pyparser._payload_exception(str(exc))) # <<<<<<<<<<<<<< - * else: - * pyparser._payload.set_exception(exc) - */ - __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_v_pyparser->_payload, __pyx_n_s_set_exception); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 712, __pyx_L14_error) - __Pyx_GOTREF(__pyx_t_7); - __pyx_t_13 = __Pyx_PyObject_CallOneArg(((PyObject *)(&PyUnicode_Type)), __pyx_v_exc); if (unlikely(!__pyx_t_13)) __PYX_ERR(0, 712, __pyx_L14_error) - __Pyx_GOTREF(__pyx_t_13); - __Pyx_INCREF(__pyx_v_pyparser->_payload_exception); - __pyx_t_14 = __pyx_v_pyparser->_payload_exception; __pyx_t_15 = NULL; - if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_14))) { - __pyx_t_15 = PyMethod_GET_SELF(__pyx_t_14); - if (likely(__pyx_t_15)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_14); - __Pyx_INCREF(__pyx_t_15); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_14, function); - } - } - __pyx_t_12 = (__pyx_t_15) ? __Pyx_PyObject_Call2Args(__pyx_t_14, __pyx_t_15, __pyx_t_13) : __Pyx_PyObject_CallOneArg(__pyx_t_14, __pyx_t_13); - __Pyx_XDECREF(__pyx_t_15); __pyx_t_15 = 0; - __Pyx_DECREF(__pyx_t_13); __pyx_t_13 = 0; - if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 712, __pyx_L14_error) - __Pyx_GOTREF(__pyx_t_12); - __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; - __pyx_t_14 = NULL; - if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_7))) { - __pyx_t_14 = PyMethod_GET_SELF(__pyx_t_7); - if (likely(__pyx_t_14)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7); - __Pyx_INCREF(__pyx_t_14); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_7, function); - } - } - __pyx_t_6 = (__pyx_t_14) ? __Pyx_PyObject_Call2Args(__pyx_t_7, __pyx_t_14, __pyx_t_12) : __Pyx_PyObject_CallOneArg(__pyx_t_7, __pyx_t_12); - __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0; - __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; - if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 712, __pyx_L14_error) - __Pyx_GOTREF(__pyx_t_6); - __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - - /* "aiohttp/_http_parser.pyx":711 - * pyparser._payload.feed_data(body, length) - * except BaseException as exc: - * if pyparser._payload_exception is not None: # <<<<<<<<<<<<<< - * pyparser._payload.set_exception(pyparser._payload_exception(str(exc))) - * else: - */ - goto __pyx_L16; - } - - /* "aiohttp/_http_parser.pyx":714 - * pyparser._payload.set_exception(pyparser._payload_exception(str(exc))) - * else: - * pyparser._payload.set_exception(exc) # <<<<<<<<<<<<<< - * pyparser._payload_error = 1 - * return -1 - */ - /*else*/ { - __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_v_pyparser->_payload, __pyx_n_s_set_exception); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 714, __pyx_L14_error) - __Pyx_GOTREF(__pyx_t_7); - __pyx_t_12 = NULL; - if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_7))) { - __pyx_t_12 = PyMethod_GET_SELF(__pyx_t_7); - if (likely(__pyx_t_12)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7); - __Pyx_INCREF(__pyx_t_12); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_7, function); - } - } - __pyx_t_6 = (__pyx_t_12) ? __Pyx_PyObject_Call2Args(__pyx_t_7, __pyx_t_12, __pyx_v_exc) : __Pyx_PyObject_CallOneArg(__pyx_t_7, __pyx_v_exc); - __Pyx_XDECREF(__pyx_t_12); __pyx_t_12 = 0; - if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 714, __pyx_L14_error) - __Pyx_GOTREF(__pyx_t_6); - __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - } - __pyx_L16:; - - /* "aiohttp/_http_parser.pyx":715 - * else: - * pyparser._payload.set_exception(exc) - * pyparser._payload_error = 1 # <<<<<<<<<<<<<< - * return -1 - * else: - */ - __pyx_v_pyparser->_payload_error = 1; - - /* "aiohttp/_http_parser.pyx":716 - * pyparser._payload.set_exception(exc) - * pyparser._payload_error = 1 - * return -1 # <<<<<<<<<<<<<< - * else: - * return 0 - */ - __pyx_r = -1; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; - goto __pyx_L13_return; - } - - /* "aiohttp/_http_parser.pyx":710 - * try: - * pyparser._payload.feed_data(body, length) - * except BaseException as exc: # <<<<<<<<<<<<<< - * if pyparser._payload_exception is not None: - * pyparser._payload.set_exception(pyparser._payload_exception(str(exc))) - */ - /*finally:*/ { - __pyx_L14_error:; - /*exception exit:*/{ - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - __pyx_t_18 = 0; __pyx_t_19 = 0; __pyx_t_20 = 0; __pyx_t_21 = 0; __pyx_t_22 = 0; __pyx_t_23 = 0; - __Pyx_XDECREF(__pyx_t_12); __pyx_t_12 = 0; - __Pyx_XDECREF(__pyx_t_13); __pyx_t_13 = 0; - __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0; - __Pyx_XDECREF(__pyx_t_15); __pyx_t_15 = 0; - __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; - __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; - if (PY_MAJOR_VERSION >= 3) __Pyx_ExceptionSwap(&__pyx_t_21, &__pyx_t_22, &__pyx_t_23); - if ((PY_MAJOR_VERSION < 3) || unlikely(__Pyx_GetException(&__pyx_t_18, &__pyx_t_19, &__pyx_t_20) < 0)) __Pyx_ErrFetch(&__pyx_t_18, &__pyx_t_19, &__pyx_t_20); - __Pyx_XGOTREF(__pyx_t_18); - __Pyx_XGOTREF(__pyx_t_19); - __Pyx_XGOTREF(__pyx_t_20); - __Pyx_XGOTREF(__pyx_t_21); - __Pyx_XGOTREF(__pyx_t_22); - __Pyx_XGOTREF(__pyx_t_23); - __pyx_t_8 = __pyx_lineno; __pyx_t_16 = __pyx_clineno; __pyx_t_17 = __pyx_filename; - { - __Pyx_DECREF(__pyx_v_exc); - __pyx_v_exc = NULL; - } - if (PY_MAJOR_VERSION >= 3) { - __Pyx_XGIVEREF(__pyx_t_21); - __Pyx_XGIVEREF(__pyx_t_22); - __Pyx_XGIVEREF(__pyx_t_23); - __Pyx_ExceptionReset(__pyx_t_21, __pyx_t_22, __pyx_t_23); - } - __Pyx_XGIVEREF(__pyx_t_18); - __Pyx_XGIVEREF(__pyx_t_19); - __Pyx_XGIVEREF(__pyx_t_20); - __Pyx_ErrRestore(__pyx_t_18, __pyx_t_19, __pyx_t_20); - __pyx_t_18 = 0; __pyx_t_19 = 0; __pyx_t_20 = 0; __pyx_t_21 = 0; __pyx_t_22 = 0; __pyx_t_23 = 0; - __pyx_lineno = __pyx_t_8; __pyx_clineno = __pyx_t_16; __pyx_filename = __pyx_t_17; - goto __pyx_L5_except_error; - } - __pyx_L13_return: { - __pyx_t_16 = __pyx_r; - __Pyx_DECREF(__pyx_v_exc); - __pyx_v_exc = NULL; - __pyx_r = __pyx_t_16; - goto __pyx_L6_except_return; - } - } - } - goto __pyx_L5_except_error; - __pyx_L5_except_error:; - - /* "aiohttp/_http_parser.pyx":708 - * cdef HttpParser pyparser = parser.data - * cdef bytes body = at[:length] - * try: # <<<<<<<<<<<<<< - * pyparser._payload.feed_data(body, length) - * except BaseException as exc: - */ - __Pyx_XGIVEREF(__pyx_t_2); - __Pyx_XGIVEREF(__pyx_t_3); - __Pyx_XGIVEREF(__pyx_t_4); - __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); - goto __pyx_L1_error; - __pyx_L6_except_return:; - __Pyx_XGIVEREF(__pyx_t_2); - __Pyx_XGIVEREF(__pyx_t_3); - __Pyx_XGIVEREF(__pyx_t_4); - __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); - goto __pyx_L0; - } - - /* "aiohttp/_http_parser.pyx":704 - * - * - * cdef int cb_on_body(cparser.http_parser* parser, # <<<<<<<<<<<<<< - * const char *at, size_t length) except -1: - * cdef HttpParser pyparser = parser.data - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_5); - __Pyx_XDECREF(__pyx_t_6); - __Pyx_XDECREF(__pyx_t_7); - __Pyx_XDECREF(__pyx_t_9); - __Pyx_XDECREF(__pyx_t_12); - __Pyx_XDECREF(__pyx_t_13); - __Pyx_XDECREF(__pyx_t_14); - __Pyx_XDECREF(__pyx_t_15); - __Pyx_AddTraceback("aiohttp._http_parser.cb_on_body", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = -1; - __pyx_L0:; - __Pyx_XDECREF((PyObject *)__pyx_v_pyparser); - __Pyx_XDECREF(__pyx_v_body); - __Pyx_XDECREF(__pyx_v_exc); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":721 - * - * - * cdef int cb_on_message_complete(cparser.http_parser* parser) except -1: # <<<<<<<<<<<<<< - * cdef HttpParser pyparser = parser.data - * try: - */ - -static int __pyx_f_7aiohttp_12_http_parser_cb_on_message_complete(struct http_parser *__pyx_v_parser) { - struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_pyparser = 0; - PyObject *__pyx_v_exc = NULL; - int __pyx_r; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - int __pyx_t_5; - PyObject *__pyx_t_6 = NULL; - PyObject *__pyx_t_7 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("cb_on_message_complete", 0); - - /* "aiohttp/_http_parser.pyx":722 - * - * cdef int cb_on_message_complete(cparser.http_parser* parser) except -1: - * cdef HttpParser pyparser = parser.data # <<<<<<<<<<<<<< - * try: - * pyparser._started = False - */ - __pyx_t_1 = ((PyObject *)__pyx_v_parser->data); - __Pyx_INCREF(__pyx_t_1); - __pyx_v_pyparser = ((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_t_1); - __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":723 - * cdef int cb_on_message_complete(cparser.http_parser* parser) except -1: - * cdef HttpParser pyparser = parser.data - * try: # <<<<<<<<<<<<<< - * pyparser._started = False - * pyparser._on_message_complete() - */ - { - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - __Pyx_ExceptionSave(&__pyx_t_2, &__pyx_t_3, &__pyx_t_4); - __Pyx_XGOTREF(__pyx_t_2); - __Pyx_XGOTREF(__pyx_t_3); - __Pyx_XGOTREF(__pyx_t_4); - /*try:*/ { - - /* "aiohttp/_http_parser.pyx":724 - * cdef HttpParser pyparser = parser.data - * try: - * pyparser._started = False # <<<<<<<<<<<<<< - * pyparser._on_message_complete() - * except BaseException as exc: - */ - __pyx_v_pyparser->_started = 0; - - /* "aiohttp/_http_parser.pyx":725 - * try: - * pyparser._started = False - * pyparser._on_message_complete() # <<<<<<<<<<<<<< - * except BaseException as exc: - * pyparser._last_error = exc - */ - __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *)__pyx_v_pyparser->__pyx_vtab)->_on_message_complete(__pyx_v_pyparser); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 725, __pyx_L3_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":723 - * cdef int cb_on_message_complete(cparser.http_parser* parser) except -1: - * cdef HttpParser pyparser = parser.data - * try: # <<<<<<<<<<<<<< - * pyparser._started = False - * pyparser._on_message_complete() - */ - } - - /* "aiohttp/_http_parser.pyx":730 - * return -1 - * else: - * return 0 # <<<<<<<<<<<<<< - * - * - */ - /*else:*/ { - __pyx_r = 0; - goto __pyx_L6_except_return; - } - __pyx_L3_error:; - __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":726 - * pyparser._started = False - * pyparser._on_message_complete() - * except BaseException as exc: # <<<<<<<<<<<<<< - * pyparser._last_error = exc - * return -1 - */ - __pyx_t_5 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_BaseException); - if (__pyx_t_5) { - __Pyx_AddTraceback("aiohttp._http_parser.cb_on_message_complete", __pyx_clineno, __pyx_lineno, __pyx_filename); - if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_6, &__pyx_t_7) < 0) __PYX_ERR(0, 726, __pyx_L5_except_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_t_6); - __Pyx_GOTREF(__pyx_t_7); - __Pyx_INCREF(__pyx_t_6); - __pyx_v_exc = __pyx_t_6; - /*try:*/ { - - /* "aiohttp/_http_parser.pyx":727 - * pyparser._on_message_complete() - * except BaseException as exc: - * pyparser._last_error = exc # <<<<<<<<<<<<<< - * return -1 - * else: - */ - __Pyx_INCREF(__pyx_v_exc); - __Pyx_GIVEREF(__pyx_v_exc); - __Pyx_GOTREF(__pyx_v_pyparser->_last_error); - __Pyx_DECREF(__pyx_v_pyparser->_last_error); - __pyx_v_pyparser->_last_error = __pyx_v_exc; - - /* "aiohttp/_http_parser.pyx":728 - * except BaseException as exc: - * pyparser._last_error = exc - * return -1 # <<<<<<<<<<<<<< - * else: - * return 0 - */ - __pyx_r = -1; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - goto __pyx_L13_return; - } - - /* "aiohttp/_http_parser.pyx":726 - * pyparser._started = False - * pyparser._on_message_complete() - * except BaseException as exc: # <<<<<<<<<<<<<< - * pyparser._last_error = exc - * return -1 - */ - /*finally:*/ { - __pyx_L13_return: { - __pyx_t_5 = __pyx_r; - __Pyx_DECREF(__pyx_v_exc); - __pyx_v_exc = NULL; - __pyx_r = __pyx_t_5; - goto __pyx_L6_except_return; - } - } - } - goto __pyx_L5_except_error; - __pyx_L5_except_error:; - - /* "aiohttp/_http_parser.pyx":723 - * cdef int cb_on_message_complete(cparser.http_parser* parser) except -1: - * cdef HttpParser pyparser = parser.data - * try: # <<<<<<<<<<<<<< - * pyparser._started = False - * pyparser._on_message_complete() - */ - __Pyx_XGIVEREF(__pyx_t_2); - __Pyx_XGIVEREF(__pyx_t_3); - __Pyx_XGIVEREF(__pyx_t_4); - __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); - goto __pyx_L1_error; - __pyx_L6_except_return:; - __Pyx_XGIVEREF(__pyx_t_2); - __Pyx_XGIVEREF(__pyx_t_3); - __Pyx_XGIVEREF(__pyx_t_4); - __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); - goto __pyx_L0; - } - - /* "aiohttp/_http_parser.pyx":721 - * - * - * cdef int cb_on_message_complete(cparser.http_parser* parser) except -1: # <<<<<<<<<<<<<< - * cdef HttpParser pyparser = parser.data - * try: - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_6); - __Pyx_XDECREF(__pyx_t_7); - __Pyx_AddTraceback("aiohttp._http_parser.cb_on_message_complete", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = -1; - __pyx_L0:; - __Pyx_XDECREF((PyObject *)__pyx_v_pyparser); - __Pyx_XDECREF(__pyx_v_exc); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":733 - * - * - * cdef int cb_on_chunk_header(cparser.http_parser* parser) except -1: # <<<<<<<<<<<<<< - * cdef HttpParser pyparser = parser.data - * try: - */ - -static int __pyx_f_7aiohttp_12_http_parser_cb_on_chunk_header(struct http_parser *__pyx_v_parser) { - struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_pyparser = 0; - PyObject *__pyx_v_exc = NULL; - int __pyx_r; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - int __pyx_t_5; - PyObject *__pyx_t_6 = NULL; - PyObject *__pyx_t_7 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("cb_on_chunk_header", 0); - - /* "aiohttp/_http_parser.pyx":734 - * - * cdef int cb_on_chunk_header(cparser.http_parser* parser) except -1: - * cdef HttpParser pyparser = parser.data # <<<<<<<<<<<<<< - * try: - * pyparser._on_chunk_header() - */ - __pyx_t_1 = ((PyObject *)__pyx_v_parser->data); - __Pyx_INCREF(__pyx_t_1); - __pyx_v_pyparser = ((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_t_1); - __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":735 - * cdef int cb_on_chunk_header(cparser.http_parser* parser) except -1: - * cdef HttpParser pyparser = parser.data - * try: # <<<<<<<<<<<<<< - * pyparser._on_chunk_header() - * except BaseException as exc: - */ - { - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - __Pyx_ExceptionSave(&__pyx_t_2, &__pyx_t_3, &__pyx_t_4); - __Pyx_XGOTREF(__pyx_t_2); - __Pyx_XGOTREF(__pyx_t_3); - __Pyx_XGOTREF(__pyx_t_4); - /*try:*/ { - - /* "aiohttp/_http_parser.pyx":736 - * cdef HttpParser pyparser = parser.data - * try: - * pyparser._on_chunk_header() # <<<<<<<<<<<<<< - * except BaseException as exc: - * pyparser._last_error = exc - */ - __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *)__pyx_v_pyparser->__pyx_vtab)->_on_chunk_header(__pyx_v_pyparser); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 736, __pyx_L3_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":735 - * cdef int cb_on_chunk_header(cparser.http_parser* parser) except -1: - * cdef HttpParser pyparser = parser.data - * try: # <<<<<<<<<<<<<< - * pyparser._on_chunk_header() - * except BaseException as exc: - */ - } - - /* "aiohttp/_http_parser.pyx":741 - * return -1 - * else: - * return 0 # <<<<<<<<<<<<<< - * - * - */ - /*else:*/ { - __pyx_r = 0; - goto __pyx_L6_except_return; - } - __pyx_L3_error:; - __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":737 - * try: - * pyparser._on_chunk_header() - * except BaseException as exc: # <<<<<<<<<<<<<< - * pyparser._last_error = exc - * return -1 - */ - __pyx_t_5 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_BaseException); - if (__pyx_t_5) { - __Pyx_AddTraceback("aiohttp._http_parser.cb_on_chunk_header", __pyx_clineno, __pyx_lineno, __pyx_filename); - if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_6, &__pyx_t_7) < 0) __PYX_ERR(0, 737, __pyx_L5_except_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_t_6); - __Pyx_GOTREF(__pyx_t_7); - __Pyx_INCREF(__pyx_t_6); - __pyx_v_exc = __pyx_t_6; - /*try:*/ { - - /* "aiohttp/_http_parser.pyx":738 - * pyparser._on_chunk_header() - * except BaseException as exc: - * pyparser._last_error = exc # <<<<<<<<<<<<<< - * return -1 - * else: - */ - __Pyx_INCREF(__pyx_v_exc); - __Pyx_GIVEREF(__pyx_v_exc); - __Pyx_GOTREF(__pyx_v_pyparser->_last_error); - __Pyx_DECREF(__pyx_v_pyparser->_last_error); - __pyx_v_pyparser->_last_error = __pyx_v_exc; - - /* "aiohttp/_http_parser.pyx":739 - * except BaseException as exc: - * pyparser._last_error = exc - * return -1 # <<<<<<<<<<<<<< - * else: - * return 0 - */ - __pyx_r = -1; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - goto __pyx_L13_return; - } - - /* "aiohttp/_http_parser.pyx":737 - * try: - * pyparser._on_chunk_header() - * except BaseException as exc: # <<<<<<<<<<<<<< - * pyparser._last_error = exc - * return -1 - */ - /*finally:*/ { - __pyx_L13_return: { - __pyx_t_5 = __pyx_r; - __Pyx_DECREF(__pyx_v_exc); - __pyx_v_exc = NULL; - __pyx_r = __pyx_t_5; - goto __pyx_L6_except_return; - } - } - } - goto __pyx_L5_except_error; - __pyx_L5_except_error:; - - /* "aiohttp/_http_parser.pyx":735 - * cdef int cb_on_chunk_header(cparser.http_parser* parser) except -1: - * cdef HttpParser pyparser = parser.data - * try: # <<<<<<<<<<<<<< - * pyparser._on_chunk_header() - * except BaseException as exc: - */ - __Pyx_XGIVEREF(__pyx_t_2); - __Pyx_XGIVEREF(__pyx_t_3); - __Pyx_XGIVEREF(__pyx_t_4); - __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); - goto __pyx_L1_error; - __pyx_L6_except_return:; - __Pyx_XGIVEREF(__pyx_t_2); - __Pyx_XGIVEREF(__pyx_t_3); - __Pyx_XGIVEREF(__pyx_t_4); - __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); - goto __pyx_L0; - } - - /* "aiohttp/_http_parser.pyx":733 - * - * - * cdef int cb_on_chunk_header(cparser.http_parser* parser) except -1: # <<<<<<<<<<<<<< - * cdef HttpParser pyparser = parser.data - * try: - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_6); - __Pyx_XDECREF(__pyx_t_7); - __Pyx_AddTraceback("aiohttp._http_parser.cb_on_chunk_header", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = -1; - __pyx_L0:; - __Pyx_XDECREF((PyObject *)__pyx_v_pyparser); - __Pyx_XDECREF(__pyx_v_exc); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":744 - * - * - * cdef int cb_on_chunk_complete(cparser.http_parser* parser) except -1: # <<<<<<<<<<<<<< - * cdef HttpParser pyparser = parser.data - * try: - */ - -static int __pyx_f_7aiohttp_12_http_parser_cb_on_chunk_complete(struct http_parser *__pyx_v_parser) { - struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_pyparser = 0; - PyObject *__pyx_v_exc = NULL; - int __pyx_r; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - int __pyx_t_5; - PyObject *__pyx_t_6 = NULL; - PyObject *__pyx_t_7 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("cb_on_chunk_complete", 0); - - /* "aiohttp/_http_parser.pyx":745 - * - * cdef int cb_on_chunk_complete(cparser.http_parser* parser) except -1: - * cdef HttpParser pyparser = parser.data # <<<<<<<<<<<<<< - * try: - * pyparser._on_chunk_complete() - */ - __pyx_t_1 = ((PyObject *)__pyx_v_parser->data); - __Pyx_INCREF(__pyx_t_1); - __pyx_v_pyparser = ((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_t_1); - __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":746 - * cdef int cb_on_chunk_complete(cparser.http_parser* parser) except -1: - * cdef HttpParser pyparser = parser.data - * try: # <<<<<<<<<<<<<< - * pyparser._on_chunk_complete() - * except BaseException as exc: - */ - { - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - __Pyx_ExceptionSave(&__pyx_t_2, &__pyx_t_3, &__pyx_t_4); - __Pyx_XGOTREF(__pyx_t_2); - __Pyx_XGOTREF(__pyx_t_3); - __Pyx_XGOTREF(__pyx_t_4); - /*try:*/ { - - /* "aiohttp/_http_parser.pyx":747 - * cdef HttpParser pyparser = parser.data - * try: - * pyparser._on_chunk_complete() # <<<<<<<<<<<<<< - * except BaseException as exc: - * pyparser._last_error = exc - */ - __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *)__pyx_v_pyparser->__pyx_vtab)->_on_chunk_complete(__pyx_v_pyparser); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 747, __pyx_L3_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":746 - * cdef int cb_on_chunk_complete(cparser.http_parser* parser) except -1: - * cdef HttpParser pyparser = parser.data - * try: # <<<<<<<<<<<<<< - * pyparser._on_chunk_complete() - * except BaseException as exc: - */ - } - - /* "aiohttp/_http_parser.pyx":752 - * return -1 - * else: - * return 0 # <<<<<<<<<<<<<< - * - * - */ - /*else:*/ { - __pyx_r = 0; - goto __pyx_L6_except_return; - } - __pyx_L3_error:; - __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":748 - * try: - * pyparser._on_chunk_complete() - * except BaseException as exc: # <<<<<<<<<<<<<< - * pyparser._last_error = exc - * return -1 - */ - __pyx_t_5 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_BaseException); - if (__pyx_t_5) { - __Pyx_AddTraceback("aiohttp._http_parser.cb_on_chunk_complete", __pyx_clineno, __pyx_lineno, __pyx_filename); - if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_6, &__pyx_t_7) < 0) __PYX_ERR(0, 748, __pyx_L5_except_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_t_6); - __Pyx_GOTREF(__pyx_t_7); - __Pyx_INCREF(__pyx_t_6); - __pyx_v_exc = __pyx_t_6; - /*try:*/ { - - /* "aiohttp/_http_parser.pyx":749 - * pyparser._on_chunk_complete() - * except BaseException as exc: - * pyparser._last_error = exc # <<<<<<<<<<<<<< - * return -1 - * else: - */ - __Pyx_INCREF(__pyx_v_exc); - __Pyx_GIVEREF(__pyx_v_exc); - __Pyx_GOTREF(__pyx_v_pyparser->_last_error); - __Pyx_DECREF(__pyx_v_pyparser->_last_error); - __pyx_v_pyparser->_last_error = __pyx_v_exc; - - /* "aiohttp/_http_parser.pyx":750 - * except BaseException as exc: - * pyparser._last_error = exc - * return -1 # <<<<<<<<<<<<<< - * else: - * return 0 - */ - __pyx_r = -1; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - goto __pyx_L13_return; - } - - /* "aiohttp/_http_parser.pyx":748 - * try: - * pyparser._on_chunk_complete() - * except BaseException as exc: # <<<<<<<<<<<<<< - * pyparser._last_error = exc - * return -1 - */ - /*finally:*/ { - __pyx_L13_return: { - __pyx_t_5 = __pyx_r; - __Pyx_DECREF(__pyx_v_exc); - __pyx_v_exc = NULL; - __pyx_r = __pyx_t_5; - goto __pyx_L6_except_return; - } - } - } - goto __pyx_L5_except_error; - __pyx_L5_except_error:; - - /* "aiohttp/_http_parser.pyx":746 - * cdef int cb_on_chunk_complete(cparser.http_parser* parser) except -1: - * cdef HttpParser pyparser = parser.data - * try: # <<<<<<<<<<<<<< - * pyparser._on_chunk_complete() - * except BaseException as exc: - */ - __Pyx_XGIVEREF(__pyx_t_2); - __Pyx_XGIVEREF(__pyx_t_3); - __Pyx_XGIVEREF(__pyx_t_4); - __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); - goto __pyx_L1_error; - __pyx_L6_except_return:; - __Pyx_XGIVEREF(__pyx_t_2); - __Pyx_XGIVEREF(__pyx_t_3); - __Pyx_XGIVEREF(__pyx_t_4); - __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); - goto __pyx_L0; - } - - /* "aiohttp/_http_parser.pyx":744 - * - * - * cdef int cb_on_chunk_complete(cparser.http_parser* parser) except -1: # <<<<<<<<<<<<<< - * cdef HttpParser pyparser = parser.data - * try: - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_6); - __Pyx_XDECREF(__pyx_t_7); - __Pyx_AddTraceback("aiohttp._http_parser.cb_on_chunk_complete", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = -1; - __pyx_L0:; - __Pyx_XDECREF((PyObject *)__pyx_v_pyparser); - __Pyx_XDECREF(__pyx_v_exc); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":755 - * - * - * cdef parser_error_from_errno(cparser.http_errno errno): # <<<<<<<<<<<<<< - * cdef bytes desc = cparser.http_errno_description(errno) - * - */ - -static PyObject *__pyx_f_7aiohttp_12_http_parser_parser_error_from_errno(enum http_errno __pyx_v_errno) { - PyObject *__pyx_v_desc = 0; - PyObject *__pyx_v_cls = NULL; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("parser_error_from_errno", 0); - - /* "aiohttp/_http_parser.pyx":756 - * - * cdef parser_error_from_errno(cparser.http_errno errno): - * cdef bytes desc = cparser.http_errno_description(errno) # <<<<<<<<<<<<<< - * - * if errno in (cparser.HPE_CB_message_begin, - */ - __pyx_t_1 = __Pyx_PyBytes_FromString(http_errno_description(__pyx_v_errno)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 756, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_v_desc = ((PyObject*)__pyx_t_1); - __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":758 - * cdef bytes desc = cparser.http_errno_description(errno) - * - * if errno in (cparser.HPE_CB_message_begin, # <<<<<<<<<<<<<< - * cparser.HPE_CB_url, - * cparser.HPE_CB_header_field, - */ - switch (__pyx_v_errno) { - case HPE_CB_message_begin: - case HPE_CB_url: - - /* "aiohttp/_http_parser.pyx":759 - * - * if errno in (cparser.HPE_CB_message_begin, - * cparser.HPE_CB_url, # <<<<<<<<<<<<<< - * cparser.HPE_CB_header_field, - * cparser.HPE_CB_header_value, - */ - case HPE_CB_header_field: - - /* "aiohttp/_http_parser.pyx":760 - * if errno in (cparser.HPE_CB_message_begin, - * cparser.HPE_CB_url, - * cparser.HPE_CB_header_field, # <<<<<<<<<<<<<< - * cparser.HPE_CB_header_value, - * cparser.HPE_CB_headers_complete, - */ - case HPE_CB_header_value: - - /* "aiohttp/_http_parser.pyx":761 - * cparser.HPE_CB_url, - * cparser.HPE_CB_header_field, - * cparser.HPE_CB_header_value, # <<<<<<<<<<<<<< - * cparser.HPE_CB_headers_complete, - * cparser.HPE_CB_body, - */ - case HPE_CB_headers_complete: - - /* "aiohttp/_http_parser.pyx":762 - * cparser.HPE_CB_header_field, - * cparser.HPE_CB_header_value, - * cparser.HPE_CB_headers_complete, # <<<<<<<<<<<<<< - * cparser.HPE_CB_body, - * cparser.HPE_CB_message_complete, - */ - case HPE_CB_body: - - /* "aiohttp/_http_parser.pyx":763 - * cparser.HPE_CB_header_value, - * cparser.HPE_CB_headers_complete, - * cparser.HPE_CB_body, # <<<<<<<<<<<<<< - * cparser.HPE_CB_message_complete, - * cparser.HPE_CB_status, - */ - case HPE_CB_message_complete: - - /* "aiohttp/_http_parser.pyx":764 - * cparser.HPE_CB_headers_complete, - * cparser.HPE_CB_body, - * cparser.HPE_CB_message_complete, # <<<<<<<<<<<<<< - * cparser.HPE_CB_status, - * cparser.HPE_CB_chunk_header, - */ - case HPE_CB_status: - - /* "aiohttp/_http_parser.pyx":765 - * cparser.HPE_CB_body, - * cparser.HPE_CB_message_complete, - * cparser.HPE_CB_status, # <<<<<<<<<<<<<< - * cparser.HPE_CB_chunk_header, - * cparser.HPE_CB_chunk_complete): - */ - case HPE_CB_chunk_header: - - /* "aiohttp/_http_parser.pyx":766 - * cparser.HPE_CB_message_complete, - * cparser.HPE_CB_status, - * cparser.HPE_CB_chunk_header, # <<<<<<<<<<<<<< - * cparser.HPE_CB_chunk_complete): - * cls = BadHttpMessage - */ - case HPE_CB_chunk_complete: - - /* "aiohttp/_http_parser.pyx":768 - * cparser.HPE_CB_chunk_header, - * cparser.HPE_CB_chunk_complete): - * cls = BadHttpMessage # <<<<<<<<<<<<<< - * - * elif errno == cparser.HPE_INVALID_STATUS: - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_BadHttpMessage); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 768, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_v_cls = __pyx_t_1; - __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":758 - * cdef bytes desc = cparser.http_errno_description(errno) - * - * if errno in (cparser.HPE_CB_message_begin, # <<<<<<<<<<<<<< - * cparser.HPE_CB_url, - * cparser.HPE_CB_header_field, - */ - break; - case HPE_INVALID_STATUS: - - /* "aiohttp/_http_parser.pyx":771 - * - * elif errno == cparser.HPE_INVALID_STATUS: - * cls = BadStatusLine # <<<<<<<<<<<<<< - * - * elif errno == cparser.HPE_INVALID_METHOD: - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_BadStatusLine); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 771, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_v_cls = __pyx_t_1; - __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":770 - * cls = BadHttpMessage - * - * elif errno == cparser.HPE_INVALID_STATUS: # <<<<<<<<<<<<<< - * cls = BadStatusLine - * - */ - break; - case HPE_INVALID_METHOD: - - /* "aiohttp/_http_parser.pyx":774 - * - * elif errno == cparser.HPE_INVALID_METHOD: - * cls = BadStatusLine # <<<<<<<<<<<<<< - * - * elif errno == cparser.HPE_INVALID_URL: - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_BadStatusLine); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 774, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_v_cls = __pyx_t_1; - __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":773 - * cls = BadStatusLine - * - * elif errno == cparser.HPE_INVALID_METHOD: # <<<<<<<<<<<<<< - * cls = BadStatusLine - * - */ - break; - case HPE_INVALID_URL: - - /* "aiohttp/_http_parser.pyx":777 - * - * elif errno == cparser.HPE_INVALID_URL: - * cls = InvalidURLError # <<<<<<<<<<<<<< - * - * else: - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_InvalidURLError); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 777, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_v_cls = __pyx_t_1; - __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":776 - * cls = BadStatusLine - * - * elif errno == cparser.HPE_INVALID_URL: # <<<<<<<<<<<<<< - * cls = InvalidURLError - * - */ - break; - default: - - /* "aiohttp/_http_parser.pyx":780 - * - * else: - * cls = BadHttpMessage # <<<<<<<<<<<<<< - * - * return cls(desc.decode('latin-1')) - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_BadHttpMessage); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 780, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_v_cls = __pyx_t_1; - __pyx_t_1 = 0; - break; - } - - /* "aiohttp/_http_parser.pyx":782 - * cls = BadHttpMessage - * - * return cls(desc.decode('latin-1')) # <<<<<<<<<<<<<< - * - * - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_2 = __Pyx_decode_bytes(__pyx_v_desc, 0, PY_SSIZE_T_MAX, NULL, NULL, PyUnicode_DecodeLatin1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 782, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(__pyx_v_cls); - __pyx_t_3 = __pyx_v_cls; __pyx_t_4 = NULL; - if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) { - __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); - if (likely(__pyx_t_4)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); - __Pyx_INCREF(__pyx_t_4); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_3, function); - } - } - __pyx_t_1 = (__pyx_t_4) ? __Pyx_PyObject_Call2Args(__pyx_t_3, __pyx_t_4, __pyx_t_2) : __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_t_2); - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 782, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* "aiohttp/_http_parser.pyx":755 - * - * - * cdef parser_error_from_errno(cparser.http_errno errno): # <<<<<<<<<<<<<< - * cdef bytes desc = cparser.http_errno_description(errno) - * - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_AddTraceback("aiohttp._http_parser.parser_error_from_errno", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v_desc); - __Pyx_XDECREF(__pyx_v_cls); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":785 - * - * - * def parse_url(url): # <<<<<<<<<<<<<< - * cdef: - * Py_buffer py_buf - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_1parse_url(PyObject *__pyx_self, PyObject *__pyx_v_url); /*proto*/ -static PyMethodDef __pyx_mdef_7aiohttp_12_http_parser_1parse_url = {"parse_url", (PyCFunction)__pyx_pw_7aiohttp_12_http_parser_1parse_url, METH_O, 0}; -static PyObject *__pyx_pw_7aiohttp_12_http_parser_1parse_url(PyObject *__pyx_self, PyObject *__pyx_v_url) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("parse_url (wrapper)", 0); - __pyx_r = __pyx_pf_7aiohttp_12_http_parser_parse_url(__pyx_self, ((PyObject *)__pyx_v_url)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_12_http_parser_parse_url(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_url) { - Py_buffer __pyx_v_py_buf; - char *__pyx_v_buf_data; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - PyObject *__pyx_t_2 = NULL; - int __pyx_t_3; - char const *__pyx_t_4; - PyObject *__pyx_t_5 = NULL; - PyObject *__pyx_t_6 = NULL; - PyObject *__pyx_t_7 = NULL; - PyObject *__pyx_t_8 = NULL; - PyObject *__pyx_t_9 = NULL; - PyObject *__pyx_t_10 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("parse_url", 0); - - /* "aiohttp/_http_parser.pyx":790 - * char* buf_data - * - * PyObject_GetBuffer(url, &py_buf, PyBUF_SIMPLE) # <<<<<<<<<<<<<< - * try: - * buf_data = py_buf.buf - */ - __pyx_t_1 = PyObject_GetBuffer(__pyx_v_url, (&__pyx_v_py_buf), PyBUF_SIMPLE); if (unlikely(__pyx_t_1 == ((int)-1))) __PYX_ERR(0, 790, __pyx_L1_error) - - /* "aiohttp/_http_parser.pyx":791 - * - * PyObject_GetBuffer(url, &py_buf, PyBUF_SIMPLE) - * try: # <<<<<<<<<<<<<< - * buf_data = py_buf.buf - * return _parse_url(buf_data, py_buf.len) - */ - /*try:*/ { - - /* "aiohttp/_http_parser.pyx":792 - * PyObject_GetBuffer(url, &py_buf, PyBUF_SIMPLE) - * try: - * buf_data = py_buf.buf # <<<<<<<<<<<<<< - * return _parse_url(buf_data, py_buf.len) - * finally: - */ - __pyx_v_buf_data = ((char *)__pyx_v_py_buf.buf); - - /* "aiohttp/_http_parser.pyx":793 - * try: - * buf_data = py_buf.buf - * return _parse_url(buf_data, py_buf.len) # <<<<<<<<<<<<<< - * finally: - * PyBuffer_Release(&py_buf) - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_2 = __pyx_f_7aiohttp_12_http_parser__parse_url(__pyx_v_buf_data, __pyx_v_py_buf.len); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 793, __pyx_L4_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_r = __pyx_t_2; - __pyx_t_2 = 0; - goto __pyx_L3_return; - } - - /* "aiohttp/_http_parser.pyx":795 - * return _parse_url(buf_data, py_buf.len) - * finally: - * PyBuffer_Release(&py_buf) # <<<<<<<<<<<<<< - * - * - */ - /*finally:*/ { - __pyx_L4_error:; - /*exception exit:*/{ - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - __pyx_t_5 = 0; __pyx_t_6 = 0; __pyx_t_7 = 0; __pyx_t_8 = 0; __pyx_t_9 = 0; __pyx_t_10 = 0; - __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; - if (PY_MAJOR_VERSION >= 3) __Pyx_ExceptionSwap(&__pyx_t_8, &__pyx_t_9, &__pyx_t_10); - if ((PY_MAJOR_VERSION < 3) || unlikely(__Pyx_GetException(&__pyx_t_5, &__pyx_t_6, &__pyx_t_7) < 0)) __Pyx_ErrFetch(&__pyx_t_5, &__pyx_t_6, &__pyx_t_7); - __Pyx_XGOTREF(__pyx_t_5); - __Pyx_XGOTREF(__pyx_t_6); - __Pyx_XGOTREF(__pyx_t_7); - __Pyx_XGOTREF(__pyx_t_8); - __Pyx_XGOTREF(__pyx_t_9); - __Pyx_XGOTREF(__pyx_t_10); - __pyx_t_1 = __pyx_lineno; __pyx_t_3 = __pyx_clineno; __pyx_t_4 = __pyx_filename; - { - PyBuffer_Release((&__pyx_v_py_buf)); - } - if (PY_MAJOR_VERSION >= 3) { - __Pyx_XGIVEREF(__pyx_t_8); - __Pyx_XGIVEREF(__pyx_t_9); - __Pyx_XGIVEREF(__pyx_t_10); - __Pyx_ExceptionReset(__pyx_t_8, __pyx_t_9, __pyx_t_10); - } - __Pyx_XGIVEREF(__pyx_t_5); - __Pyx_XGIVEREF(__pyx_t_6); - __Pyx_XGIVEREF(__pyx_t_7); - __Pyx_ErrRestore(__pyx_t_5, __pyx_t_6, __pyx_t_7); - __pyx_t_5 = 0; __pyx_t_6 = 0; __pyx_t_7 = 0; __pyx_t_8 = 0; __pyx_t_9 = 0; __pyx_t_10 = 0; - __pyx_lineno = __pyx_t_1; __pyx_clineno = __pyx_t_3; __pyx_filename = __pyx_t_4; - goto __pyx_L1_error; - } - __pyx_L3_return: { - __pyx_t_10 = __pyx_r; - __pyx_r = 0; - PyBuffer_Release((&__pyx_v_py_buf)); - __pyx_r = __pyx_t_10; - __pyx_t_10 = 0; - goto __pyx_L0; - } - } - - /* "aiohttp/_http_parser.pyx":785 - * - * - * def parse_url(url): # <<<<<<<<<<<<<< - * cdef: - * Py_buffer py_buf - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_AddTraceback("aiohttp._http_parser.parse_url", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_parser.pyx":798 - * - * - * cdef _parse_url(char* buf_data, size_t length): # <<<<<<<<<<<<<< - * cdef: - * cparser.http_parser_url* parsed - */ - -static PyObject *__pyx_f_7aiohttp_12_http_parser__parse_url(char *__pyx_v_buf_data, size_t __pyx_v_length) { - struct http_parser_url *__pyx_v_parsed; - int __pyx_v_res; - PyObject *__pyx_v_schema = 0; - PyObject *__pyx_v_host = 0; - PyObject *__pyx_v_port = 0; - PyObject *__pyx_v_path = 0; - PyObject *__pyx_v_query = 0; - PyObject *__pyx_v_fragment = 0; - PyObject *__pyx_v_user = 0; - PyObject *__pyx_v_password = 0; - PyObject *__pyx_v_userinfo = 0; - CYTHON_UNUSED PyObject *__pyx_v_result = 0; - int __pyx_v_off; - int __pyx_v_ln; - CYTHON_UNUSED PyObject *__pyx_v_sep = NULL; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - uint16_t __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - PyObject *__pyx_t_5 = NULL; - PyObject *__pyx_t_6 = NULL; - PyObject *__pyx_t_7 = NULL; - PyObject *(*__pyx_t_8)(PyObject *); - PyObject *__pyx_t_9 = NULL; - int __pyx_t_10; - int __pyx_t_11; - char const *__pyx_t_12; - PyObject *__pyx_t_13 = NULL; - PyObject *__pyx_t_14 = NULL; - PyObject *__pyx_t_15 = NULL; - PyObject *__pyx_t_16 = NULL; - PyObject *__pyx_t_17 = NULL; - PyObject *__pyx_t_18 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("_parse_url", 0); - - /* "aiohttp/_http_parser.pyx":802 - * cparser.http_parser_url* parsed - * int res - * str schema = None # <<<<<<<<<<<<<< - * str host = None - * object port = None - */ - __Pyx_INCREF(Py_None); - __pyx_v_schema = ((PyObject*)Py_None); - - /* "aiohttp/_http_parser.pyx":803 - * int res - * str schema = None - * str host = None # <<<<<<<<<<<<<< - * object port = None - * str path = None - */ - __Pyx_INCREF(Py_None); - __pyx_v_host = ((PyObject*)Py_None); - - /* "aiohttp/_http_parser.pyx":804 - * str schema = None - * str host = None - * object port = None # <<<<<<<<<<<<<< - * str path = None - * str query = None - */ - __Pyx_INCREF(Py_None); - __pyx_v_port = Py_None; - - /* "aiohttp/_http_parser.pyx":805 - * str host = None - * object port = None - * str path = None # <<<<<<<<<<<<<< - * str query = None - * str fragment = None - */ - __Pyx_INCREF(Py_None); - __pyx_v_path = ((PyObject*)Py_None); - - /* "aiohttp/_http_parser.pyx":806 - * object port = None - * str path = None - * str query = None # <<<<<<<<<<<<<< - * str fragment = None - * str user = None - */ - __Pyx_INCREF(Py_None); - __pyx_v_query = ((PyObject*)Py_None); - - /* "aiohttp/_http_parser.pyx":807 - * str path = None - * str query = None - * str fragment = None # <<<<<<<<<<<<<< - * str user = None - * str password = None - */ - __Pyx_INCREF(Py_None); - __pyx_v_fragment = ((PyObject*)Py_None); - - /* "aiohttp/_http_parser.pyx":808 - * str query = None - * str fragment = None - * str user = None # <<<<<<<<<<<<<< - * str password = None - * str userinfo = None - */ - __Pyx_INCREF(Py_None); - __pyx_v_user = ((PyObject*)Py_None); - - /* "aiohttp/_http_parser.pyx":809 - * str fragment = None - * str user = None - * str password = None # <<<<<<<<<<<<<< - * str userinfo = None - * object result = None - */ - __Pyx_INCREF(Py_None); - __pyx_v_password = ((PyObject*)Py_None); - - /* "aiohttp/_http_parser.pyx":810 - * str user = None - * str password = None - * str userinfo = None # <<<<<<<<<<<<<< - * object result = None - * int off - */ - __Pyx_INCREF(Py_None); - __pyx_v_userinfo = ((PyObject*)Py_None); - - /* "aiohttp/_http_parser.pyx":811 - * str password = None - * str userinfo = None - * object result = None # <<<<<<<<<<<<<< - * int off - * int ln - */ - __Pyx_INCREF(Py_None); - __pyx_v_result = Py_None; - - /* "aiohttp/_http_parser.pyx":815 - * int ln - * - * parsed = \ # <<<<<<<<<<<<<< - * PyMem_Malloc(sizeof(cparser.http_parser_url)) - * if parsed is NULL: - */ - __pyx_v_parsed = ((struct http_parser_url *)PyMem_Malloc((sizeof(struct http_parser_url)))); - - /* "aiohttp/_http_parser.pyx":817 - * parsed = \ - * PyMem_Malloc(sizeof(cparser.http_parser_url)) - * if parsed is NULL: # <<<<<<<<<<<<<< - * raise MemoryError() - * cparser.http_parser_url_init(parsed) - */ - __pyx_t_1 = ((__pyx_v_parsed == NULL) != 0); - if (unlikely(__pyx_t_1)) { - - /* "aiohttp/_http_parser.pyx":818 - * PyMem_Malloc(sizeof(cparser.http_parser_url)) - * if parsed is NULL: - * raise MemoryError() # <<<<<<<<<<<<<< - * cparser.http_parser_url_init(parsed) - * try: - */ - PyErr_NoMemory(); __PYX_ERR(0, 818, __pyx_L1_error) - - /* "aiohttp/_http_parser.pyx":817 - * parsed = \ - * PyMem_Malloc(sizeof(cparser.http_parser_url)) - * if parsed is NULL: # <<<<<<<<<<<<<< - * raise MemoryError() - * cparser.http_parser_url_init(parsed) - */ - } - - /* "aiohttp/_http_parser.pyx":819 - * if parsed is NULL: - * raise MemoryError() - * cparser.http_parser_url_init(parsed) # <<<<<<<<<<<<<< - * try: - * res = cparser.http_parser_parse_url(buf_data, length, 0, parsed) - */ - http_parser_url_init(__pyx_v_parsed); - - /* "aiohttp/_http_parser.pyx":820 - * raise MemoryError() - * cparser.http_parser_url_init(parsed) - * try: # <<<<<<<<<<<<<< - * res = cparser.http_parser_parse_url(buf_data, length, 0, parsed) - * - */ - /*try:*/ { - - /* "aiohttp/_http_parser.pyx":821 - * cparser.http_parser_url_init(parsed) - * try: - * res = cparser.http_parser_parse_url(buf_data, length, 0, parsed) # <<<<<<<<<<<<<< - * - * if res == 0: - */ - __pyx_v_res = http_parser_parse_url(__pyx_v_buf_data, __pyx_v_length, 0, __pyx_v_parsed); - - /* "aiohttp/_http_parser.pyx":823 - * res = cparser.http_parser_parse_url(buf_data, length, 0, parsed) - * - * if res == 0: # <<<<<<<<<<<<<< - * if parsed.field_set & (1 << cparser.UF_SCHEMA): - * off = parsed.field_data[cparser.UF_SCHEMA].off - */ - __pyx_t_1 = ((__pyx_v_res == 0) != 0); - if (likely(__pyx_t_1)) { - - /* "aiohttp/_http_parser.pyx":824 - * - * if res == 0: - * if parsed.field_set & (1 << cparser.UF_SCHEMA): # <<<<<<<<<<<<<< - * off = parsed.field_data[cparser.UF_SCHEMA].off - * ln = parsed.field_data[cparser.UF_SCHEMA].len - */ - __pyx_t_1 = ((__pyx_v_parsed->field_set & (1 << UF_SCHEMA)) != 0); - if (__pyx_t_1) { - - /* "aiohttp/_http_parser.pyx":825 - * if res == 0: - * if parsed.field_set & (1 << cparser.UF_SCHEMA): - * off = parsed.field_data[cparser.UF_SCHEMA].off # <<<<<<<<<<<<<< - * ln = parsed.field_data[cparser.UF_SCHEMA].len - * schema = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') - */ - __pyx_t_2 = (__pyx_v_parsed->field_data[((int)UF_SCHEMA)]).off; - __pyx_v_off = __pyx_t_2; - - /* "aiohttp/_http_parser.pyx":826 - * if parsed.field_set & (1 << cparser.UF_SCHEMA): - * off = parsed.field_data[cparser.UF_SCHEMA].off - * ln = parsed.field_data[cparser.UF_SCHEMA].len # <<<<<<<<<<<<<< - * schema = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') - * else: - */ - __pyx_t_2 = (__pyx_v_parsed->field_data[((int)UF_SCHEMA)]).len; - __pyx_v_ln = __pyx_t_2; - - /* "aiohttp/_http_parser.pyx":827 - * off = parsed.field_data[cparser.UF_SCHEMA].off - * ln = parsed.field_data[cparser.UF_SCHEMA].len - * schema = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') # <<<<<<<<<<<<<< - * else: - * schema = '' - */ - __pyx_t_3 = __Pyx_decode_c_string(__pyx_v_buf_data, __pyx_v_off, (__pyx_v_off + __pyx_v_ln), NULL, ((char const *)"surrogateescape"), PyUnicode_DecodeUTF8); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 827, __pyx_L5_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF_SET(__pyx_v_schema, ((PyObject*)__pyx_t_3)); - __pyx_t_3 = 0; - - /* "aiohttp/_http_parser.pyx":824 - * - * if res == 0: - * if parsed.field_set & (1 << cparser.UF_SCHEMA): # <<<<<<<<<<<<<< - * off = parsed.field_data[cparser.UF_SCHEMA].off - * ln = parsed.field_data[cparser.UF_SCHEMA].len - */ - goto __pyx_L8; - } - - /* "aiohttp/_http_parser.pyx":829 - * schema = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') - * else: - * schema = '' # <<<<<<<<<<<<<< - * - * if parsed.field_set & (1 << cparser.UF_HOST): - */ - /*else*/ { - __Pyx_INCREF(__pyx_kp_u__4); - __Pyx_DECREF_SET(__pyx_v_schema, __pyx_kp_u__4); - } - __pyx_L8:; - - /* "aiohttp/_http_parser.pyx":831 - * schema = '' - * - * if parsed.field_set & (1 << cparser.UF_HOST): # <<<<<<<<<<<<<< - * off = parsed.field_data[cparser.UF_HOST].off - * ln = parsed.field_data[cparser.UF_HOST].len - */ - __pyx_t_1 = ((__pyx_v_parsed->field_set & (1 << UF_HOST)) != 0); - if (__pyx_t_1) { - - /* "aiohttp/_http_parser.pyx":832 - * - * if parsed.field_set & (1 << cparser.UF_HOST): - * off = parsed.field_data[cparser.UF_HOST].off # <<<<<<<<<<<<<< - * ln = parsed.field_data[cparser.UF_HOST].len - * host = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') - */ - __pyx_t_2 = (__pyx_v_parsed->field_data[((int)UF_HOST)]).off; - __pyx_v_off = __pyx_t_2; - - /* "aiohttp/_http_parser.pyx":833 - * if parsed.field_set & (1 << cparser.UF_HOST): - * off = parsed.field_data[cparser.UF_HOST].off - * ln = parsed.field_data[cparser.UF_HOST].len # <<<<<<<<<<<<<< - * host = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') - * else: - */ - __pyx_t_2 = (__pyx_v_parsed->field_data[((int)UF_HOST)]).len; - __pyx_v_ln = __pyx_t_2; - - /* "aiohttp/_http_parser.pyx":834 - * off = parsed.field_data[cparser.UF_HOST].off - * ln = parsed.field_data[cparser.UF_HOST].len - * host = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') # <<<<<<<<<<<<<< - * else: - * host = '' - */ - __pyx_t_3 = __Pyx_decode_c_string(__pyx_v_buf_data, __pyx_v_off, (__pyx_v_off + __pyx_v_ln), NULL, ((char const *)"surrogateescape"), PyUnicode_DecodeUTF8); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 834, __pyx_L5_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF_SET(__pyx_v_host, ((PyObject*)__pyx_t_3)); - __pyx_t_3 = 0; - - /* "aiohttp/_http_parser.pyx":831 - * schema = '' - * - * if parsed.field_set & (1 << cparser.UF_HOST): # <<<<<<<<<<<<<< - * off = parsed.field_data[cparser.UF_HOST].off - * ln = parsed.field_data[cparser.UF_HOST].len - */ - goto __pyx_L9; - } - - /* "aiohttp/_http_parser.pyx":836 - * host = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') - * else: - * host = '' # <<<<<<<<<<<<<< - * - * if parsed.field_set & (1 << cparser.UF_PORT): - */ - /*else*/ { - __Pyx_INCREF(__pyx_kp_u__4); - __Pyx_DECREF_SET(__pyx_v_host, __pyx_kp_u__4); - } - __pyx_L9:; - - /* "aiohttp/_http_parser.pyx":838 - * host = '' - * - * if parsed.field_set & (1 << cparser.UF_PORT): # <<<<<<<<<<<<<< - * port = parsed.port - * - */ - __pyx_t_1 = ((__pyx_v_parsed->field_set & (1 << UF_PORT)) != 0); - if (__pyx_t_1) { - - /* "aiohttp/_http_parser.pyx":839 - * - * if parsed.field_set & (1 << cparser.UF_PORT): - * port = parsed.port # <<<<<<<<<<<<<< - * - * if parsed.field_set & (1 << cparser.UF_PATH): - */ - __pyx_t_3 = __Pyx_PyInt_From_uint16_t(__pyx_v_parsed->port); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 839, __pyx_L5_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF_SET(__pyx_v_port, __pyx_t_3); - __pyx_t_3 = 0; - - /* "aiohttp/_http_parser.pyx":838 - * host = '' - * - * if parsed.field_set & (1 << cparser.UF_PORT): # <<<<<<<<<<<<<< - * port = parsed.port - * - */ - } - - /* "aiohttp/_http_parser.pyx":841 - * port = parsed.port - * - * if parsed.field_set & (1 << cparser.UF_PATH): # <<<<<<<<<<<<<< - * off = parsed.field_data[cparser.UF_PATH].off - * ln = parsed.field_data[cparser.UF_PATH].len - */ - __pyx_t_1 = ((__pyx_v_parsed->field_set & (1 << UF_PATH)) != 0); - if (__pyx_t_1) { - - /* "aiohttp/_http_parser.pyx":842 - * - * if parsed.field_set & (1 << cparser.UF_PATH): - * off = parsed.field_data[cparser.UF_PATH].off # <<<<<<<<<<<<<< - * ln = parsed.field_data[cparser.UF_PATH].len - * path = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') - */ - __pyx_t_2 = (__pyx_v_parsed->field_data[((int)UF_PATH)]).off; - __pyx_v_off = __pyx_t_2; - - /* "aiohttp/_http_parser.pyx":843 - * if parsed.field_set & (1 << cparser.UF_PATH): - * off = parsed.field_data[cparser.UF_PATH].off - * ln = parsed.field_data[cparser.UF_PATH].len # <<<<<<<<<<<<<< - * path = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') - * else: - */ - __pyx_t_2 = (__pyx_v_parsed->field_data[((int)UF_PATH)]).len; - __pyx_v_ln = __pyx_t_2; - - /* "aiohttp/_http_parser.pyx":844 - * off = parsed.field_data[cparser.UF_PATH].off - * ln = parsed.field_data[cparser.UF_PATH].len - * path = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') # <<<<<<<<<<<<<< - * else: - * path = '' - */ - __pyx_t_3 = __Pyx_decode_c_string(__pyx_v_buf_data, __pyx_v_off, (__pyx_v_off + __pyx_v_ln), NULL, ((char const *)"surrogateescape"), PyUnicode_DecodeUTF8); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 844, __pyx_L5_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF_SET(__pyx_v_path, ((PyObject*)__pyx_t_3)); - __pyx_t_3 = 0; - - /* "aiohttp/_http_parser.pyx":841 - * port = parsed.port - * - * if parsed.field_set & (1 << cparser.UF_PATH): # <<<<<<<<<<<<<< - * off = parsed.field_data[cparser.UF_PATH].off - * ln = parsed.field_data[cparser.UF_PATH].len - */ - goto __pyx_L11; - } - - /* "aiohttp/_http_parser.pyx":846 - * path = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') - * else: - * path = '' # <<<<<<<<<<<<<< - * - * if parsed.field_set & (1 << cparser.UF_QUERY): - */ - /*else*/ { - __Pyx_INCREF(__pyx_kp_u__4); - __Pyx_DECREF_SET(__pyx_v_path, __pyx_kp_u__4); - } - __pyx_L11:; - - /* "aiohttp/_http_parser.pyx":848 - * path = '' - * - * if parsed.field_set & (1 << cparser.UF_QUERY): # <<<<<<<<<<<<<< - * off = parsed.field_data[cparser.UF_QUERY].off - * ln = parsed.field_data[cparser.UF_QUERY].len - */ - __pyx_t_1 = ((__pyx_v_parsed->field_set & (1 << UF_QUERY)) != 0); - if (__pyx_t_1) { - - /* "aiohttp/_http_parser.pyx":849 - * - * if parsed.field_set & (1 << cparser.UF_QUERY): - * off = parsed.field_data[cparser.UF_QUERY].off # <<<<<<<<<<<<<< - * ln = parsed.field_data[cparser.UF_QUERY].len - * query = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') - */ - __pyx_t_2 = (__pyx_v_parsed->field_data[((int)UF_QUERY)]).off; - __pyx_v_off = __pyx_t_2; - - /* "aiohttp/_http_parser.pyx":850 - * if parsed.field_set & (1 << cparser.UF_QUERY): - * off = parsed.field_data[cparser.UF_QUERY].off - * ln = parsed.field_data[cparser.UF_QUERY].len # <<<<<<<<<<<<<< - * query = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') - * else: - */ - __pyx_t_2 = (__pyx_v_parsed->field_data[((int)UF_QUERY)]).len; - __pyx_v_ln = __pyx_t_2; - - /* "aiohttp/_http_parser.pyx":851 - * off = parsed.field_data[cparser.UF_QUERY].off - * ln = parsed.field_data[cparser.UF_QUERY].len - * query = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') # <<<<<<<<<<<<<< - * else: - * query = '' - */ - __pyx_t_3 = __Pyx_decode_c_string(__pyx_v_buf_data, __pyx_v_off, (__pyx_v_off + __pyx_v_ln), NULL, ((char const *)"surrogateescape"), PyUnicode_DecodeUTF8); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 851, __pyx_L5_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF_SET(__pyx_v_query, ((PyObject*)__pyx_t_3)); - __pyx_t_3 = 0; - - /* "aiohttp/_http_parser.pyx":848 - * path = '' - * - * if parsed.field_set & (1 << cparser.UF_QUERY): # <<<<<<<<<<<<<< - * off = parsed.field_data[cparser.UF_QUERY].off - * ln = parsed.field_data[cparser.UF_QUERY].len - */ - goto __pyx_L12; - } - - /* "aiohttp/_http_parser.pyx":853 - * query = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') - * else: - * query = '' # <<<<<<<<<<<<<< - * - * if parsed.field_set & (1 << cparser.UF_FRAGMENT): - */ - /*else*/ { - __Pyx_INCREF(__pyx_kp_u__4); - __Pyx_DECREF_SET(__pyx_v_query, __pyx_kp_u__4); - } - __pyx_L12:; - - /* "aiohttp/_http_parser.pyx":855 - * query = '' - * - * if parsed.field_set & (1 << cparser.UF_FRAGMENT): # <<<<<<<<<<<<<< - * off = parsed.field_data[cparser.UF_FRAGMENT].off - * ln = parsed.field_data[cparser.UF_FRAGMENT].len - */ - __pyx_t_1 = ((__pyx_v_parsed->field_set & (1 << UF_FRAGMENT)) != 0); - if (__pyx_t_1) { - - /* "aiohttp/_http_parser.pyx":856 - * - * if parsed.field_set & (1 << cparser.UF_FRAGMENT): - * off = parsed.field_data[cparser.UF_FRAGMENT].off # <<<<<<<<<<<<<< - * ln = parsed.field_data[cparser.UF_FRAGMENT].len - * fragment = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') - */ - __pyx_t_2 = (__pyx_v_parsed->field_data[((int)UF_FRAGMENT)]).off; - __pyx_v_off = __pyx_t_2; - - /* "aiohttp/_http_parser.pyx":857 - * if parsed.field_set & (1 << cparser.UF_FRAGMENT): - * off = parsed.field_data[cparser.UF_FRAGMENT].off - * ln = parsed.field_data[cparser.UF_FRAGMENT].len # <<<<<<<<<<<<<< - * fragment = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') - * else: - */ - __pyx_t_2 = (__pyx_v_parsed->field_data[((int)UF_FRAGMENT)]).len; - __pyx_v_ln = __pyx_t_2; - - /* "aiohttp/_http_parser.pyx":858 - * off = parsed.field_data[cparser.UF_FRAGMENT].off - * ln = parsed.field_data[cparser.UF_FRAGMENT].len - * fragment = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') # <<<<<<<<<<<<<< - * else: - * fragment = '' - */ - __pyx_t_3 = __Pyx_decode_c_string(__pyx_v_buf_data, __pyx_v_off, (__pyx_v_off + __pyx_v_ln), NULL, ((char const *)"surrogateescape"), PyUnicode_DecodeUTF8); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 858, __pyx_L5_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF_SET(__pyx_v_fragment, ((PyObject*)__pyx_t_3)); - __pyx_t_3 = 0; - - /* "aiohttp/_http_parser.pyx":855 - * query = '' - * - * if parsed.field_set & (1 << cparser.UF_FRAGMENT): # <<<<<<<<<<<<<< - * off = parsed.field_data[cparser.UF_FRAGMENT].off - * ln = parsed.field_data[cparser.UF_FRAGMENT].len - */ - goto __pyx_L13; - } - - /* "aiohttp/_http_parser.pyx":860 - * fragment = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') - * else: - * fragment = '' # <<<<<<<<<<<<<< - * - * if parsed.field_set & (1 << cparser.UF_USERINFO): - */ - /*else*/ { - __Pyx_INCREF(__pyx_kp_u__4); - __Pyx_DECREF_SET(__pyx_v_fragment, __pyx_kp_u__4); - } - __pyx_L13:; - - /* "aiohttp/_http_parser.pyx":862 - * fragment = '' - * - * if parsed.field_set & (1 << cparser.UF_USERINFO): # <<<<<<<<<<<<<< - * off = parsed.field_data[cparser.UF_USERINFO].off - * ln = parsed.field_data[cparser.UF_USERINFO].len - */ - __pyx_t_1 = ((__pyx_v_parsed->field_set & (1 << UF_USERINFO)) != 0); - if (__pyx_t_1) { - - /* "aiohttp/_http_parser.pyx":863 - * - * if parsed.field_set & (1 << cparser.UF_USERINFO): - * off = parsed.field_data[cparser.UF_USERINFO].off # <<<<<<<<<<<<<< - * ln = parsed.field_data[cparser.UF_USERINFO].len - * userinfo = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') - */ - __pyx_t_2 = (__pyx_v_parsed->field_data[((int)UF_USERINFO)]).off; - __pyx_v_off = __pyx_t_2; - - /* "aiohttp/_http_parser.pyx":864 - * if parsed.field_set & (1 << cparser.UF_USERINFO): - * off = parsed.field_data[cparser.UF_USERINFO].off - * ln = parsed.field_data[cparser.UF_USERINFO].len # <<<<<<<<<<<<<< - * userinfo = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') - * - */ - __pyx_t_2 = (__pyx_v_parsed->field_data[((int)UF_USERINFO)]).len; - __pyx_v_ln = __pyx_t_2; - - /* "aiohttp/_http_parser.pyx":865 - * off = parsed.field_data[cparser.UF_USERINFO].off - * ln = parsed.field_data[cparser.UF_USERINFO].len - * userinfo = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') # <<<<<<<<<<<<<< - * - * user, sep, password = userinfo.partition(':') - */ - __pyx_t_3 = __Pyx_decode_c_string(__pyx_v_buf_data, __pyx_v_off, (__pyx_v_off + __pyx_v_ln), NULL, ((char const *)"surrogateescape"), PyUnicode_DecodeUTF8); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 865, __pyx_L5_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF_SET(__pyx_v_userinfo, ((PyObject*)__pyx_t_3)); - __pyx_t_3 = 0; - - /* "aiohttp/_http_parser.pyx":867 - * userinfo = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') - * - * user, sep, password = userinfo.partition(':') # <<<<<<<<<<<<<< - * - * return URL_build(scheme=schema, - */ - __pyx_t_3 = __Pyx_CallUnboundCMethod1(&__pyx_umethod_PyUnicode_Type_partition, __pyx_v_userinfo, __pyx_kp_u__11); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 867, __pyx_L5_error) - __Pyx_GOTREF(__pyx_t_3); - if ((likely(PyTuple_CheckExact(__pyx_t_3))) || (PyList_CheckExact(__pyx_t_3))) { - PyObject* sequence = __pyx_t_3; - Py_ssize_t size = __Pyx_PySequence_SIZE(sequence); - if (unlikely(size != 3)) { - if (size > 3) __Pyx_RaiseTooManyValuesError(3); - else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); - __PYX_ERR(0, 867, __pyx_L5_error) - } - #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - if (likely(PyTuple_CheckExact(sequence))) { - __pyx_t_4 = PyTuple_GET_ITEM(sequence, 0); - __pyx_t_5 = PyTuple_GET_ITEM(sequence, 1); - __pyx_t_6 = PyTuple_GET_ITEM(sequence, 2); - } else { - __pyx_t_4 = PyList_GET_ITEM(sequence, 0); - __pyx_t_5 = PyList_GET_ITEM(sequence, 1); - __pyx_t_6 = PyList_GET_ITEM(sequence, 2); - } - __Pyx_INCREF(__pyx_t_4); - __Pyx_INCREF(__pyx_t_5); - __Pyx_INCREF(__pyx_t_6); - #else - __pyx_t_4 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 867, __pyx_L5_error) - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_5 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 867, __pyx_L5_error) - __Pyx_GOTREF(__pyx_t_5); - __pyx_t_6 = PySequence_ITEM(sequence, 2); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 867, __pyx_L5_error) - __Pyx_GOTREF(__pyx_t_6); - #endif - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - } else { - Py_ssize_t index = -1; - __pyx_t_7 = PyObject_GetIter(__pyx_t_3); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 867, __pyx_L5_error) - __Pyx_GOTREF(__pyx_t_7); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_8 = Py_TYPE(__pyx_t_7)->tp_iternext; - index = 0; __pyx_t_4 = __pyx_t_8(__pyx_t_7); if (unlikely(!__pyx_t_4)) goto __pyx_L15_unpacking_failed; - __Pyx_GOTREF(__pyx_t_4); - index = 1; __pyx_t_5 = __pyx_t_8(__pyx_t_7); if (unlikely(!__pyx_t_5)) goto __pyx_L15_unpacking_failed; - __Pyx_GOTREF(__pyx_t_5); - index = 2; __pyx_t_6 = __pyx_t_8(__pyx_t_7); if (unlikely(!__pyx_t_6)) goto __pyx_L15_unpacking_failed; - __Pyx_GOTREF(__pyx_t_6); - if (__Pyx_IternextUnpackEndCheck(__pyx_t_8(__pyx_t_7), 3) < 0) __PYX_ERR(0, 867, __pyx_L5_error) - __pyx_t_8 = NULL; - __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - goto __pyx_L16_unpacking_done; - __pyx_L15_unpacking_failed:; - __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - __pyx_t_8 = NULL; - if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index); - __PYX_ERR(0, 867, __pyx_L5_error) - __pyx_L16_unpacking_done:; - } - if (!(likely(PyUnicode_CheckExact(__pyx_t_4))||((__pyx_t_4) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "unicode", Py_TYPE(__pyx_t_4)->tp_name), 0))) __PYX_ERR(0, 867, __pyx_L5_error) - if (!(likely(PyUnicode_CheckExact(__pyx_t_6))||((__pyx_t_6) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "unicode", Py_TYPE(__pyx_t_6)->tp_name), 0))) __PYX_ERR(0, 867, __pyx_L5_error) - __Pyx_DECREF_SET(__pyx_v_user, ((PyObject*)__pyx_t_4)); - __pyx_t_4 = 0; - __pyx_v_sep = __pyx_t_5; - __pyx_t_5 = 0; - __Pyx_DECREF_SET(__pyx_v_password, ((PyObject*)__pyx_t_6)); - __pyx_t_6 = 0; - - /* "aiohttp/_http_parser.pyx":862 - * fragment = '' - * - * if parsed.field_set & (1 << cparser.UF_USERINFO): # <<<<<<<<<<<<<< - * off = parsed.field_data[cparser.UF_USERINFO].off - * ln = parsed.field_data[cparser.UF_USERINFO].len - */ - } - - /* "aiohttp/_http_parser.pyx":869 - * user, sep, password = userinfo.partition(':') - * - * return URL_build(scheme=schema, # <<<<<<<<<<<<<< - * user=user, password=password, host=host, port=port, - * path=path, query_string=query, fragment=fragment, encoded=True) - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_3 = __Pyx_PyDict_NewPresized(9); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 869, __pyx_L5_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_scheme, __pyx_v_schema) < 0) __PYX_ERR(0, 869, __pyx_L5_error) - - /* "aiohttp/_http_parser.pyx":870 - * - * return URL_build(scheme=schema, - * user=user, password=password, host=host, port=port, # <<<<<<<<<<<<<< - * path=path, query_string=query, fragment=fragment, encoded=True) - * else: - */ - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_user, __pyx_v_user) < 0) __PYX_ERR(0, 869, __pyx_L5_error) - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_password, __pyx_v_password) < 0) __PYX_ERR(0, 869, __pyx_L5_error) - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_host, __pyx_v_host) < 0) __PYX_ERR(0, 869, __pyx_L5_error) - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_port, __pyx_v_port) < 0) __PYX_ERR(0, 869, __pyx_L5_error) - - /* "aiohttp/_http_parser.pyx":871 - * return URL_build(scheme=schema, - * user=user, password=password, host=host, port=port, - * path=path, query_string=query, fragment=fragment, encoded=True) # <<<<<<<<<<<<<< - * else: - * raise InvalidURLError("invalid url {!r}".format(buf_data)) - */ - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_path, __pyx_v_path) < 0) __PYX_ERR(0, 869, __pyx_L5_error) - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_query_string, __pyx_v_query) < 0) __PYX_ERR(0, 869, __pyx_L5_error) - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_fragment, __pyx_v_fragment) < 0) __PYX_ERR(0, 869, __pyx_L5_error) - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_encoded, Py_True) < 0) __PYX_ERR(0, 869, __pyx_L5_error) - - /* "aiohttp/_http_parser.pyx":869 - * user, sep, password = userinfo.partition(':') - * - * return URL_build(scheme=schema, # <<<<<<<<<<<<<< - * user=user, password=password, host=host, port=port, - * path=path, query_string=query, fragment=fragment, encoded=True) - */ - __pyx_t_6 = __Pyx_PyObject_Call(__pyx_v_7aiohttp_12_http_parser_URL_build, __pyx_empty_tuple, __pyx_t_3); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 869, __pyx_L5_error) - __Pyx_GOTREF(__pyx_t_6); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_r = __pyx_t_6; - __pyx_t_6 = 0; - goto __pyx_L4_return; - - /* "aiohttp/_http_parser.pyx":823 - * res = cparser.http_parser_parse_url(buf_data, length, 0, parsed) - * - * if res == 0: # <<<<<<<<<<<<<< - * if parsed.field_set & (1 << cparser.UF_SCHEMA): - * off = parsed.field_data[cparser.UF_SCHEMA].off - */ - } - - /* "aiohttp/_http_parser.pyx":873 - * path=path, query_string=query, fragment=fragment, encoded=True) - * else: - * raise InvalidURLError("invalid url {!r}".format(buf_data)) # <<<<<<<<<<<<<< - * finally: - * PyMem_Free(parsed) - */ - /*else*/ { - __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_InvalidURLError); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 873, __pyx_L5_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_kp_u_invalid_url_r, __pyx_n_s_format); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 873, __pyx_L5_error) - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_7 = __Pyx_PyBytes_FromString(__pyx_v_buf_data); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 873, __pyx_L5_error) - __Pyx_GOTREF(__pyx_t_7); - __pyx_t_9 = NULL; - if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) { - __pyx_t_9 = PyMethod_GET_SELF(__pyx_t_4); - if (likely(__pyx_t_9)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); - __Pyx_INCREF(__pyx_t_9); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_4, function); - } - } - __pyx_t_5 = (__pyx_t_9) ? __Pyx_PyObject_Call2Args(__pyx_t_4, __pyx_t_9, __pyx_t_7) : __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_t_7); - __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; - __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 873, __pyx_L5_error) - __Pyx_GOTREF(__pyx_t_5); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __pyx_t_4 = NULL; - if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) { - __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); - if (likely(__pyx_t_4)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); - __Pyx_INCREF(__pyx_t_4); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_3, function); - } - } - __pyx_t_6 = (__pyx_t_4) ? __Pyx_PyObject_Call2Args(__pyx_t_3, __pyx_t_4, __pyx_t_5) : __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_t_5); - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 873, __pyx_L5_error) - __Pyx_GOTREF(__pyx_t_6); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_Raise(__pyx_t_6, 0, 0, 0); - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - __PYX_ERR(0, 873, __pyx_L5_error) - } - } - - /* "aiohttp/_http_parser.pyx":875 - * raise InvalidURLError("invalid url {!r}".format(buf_data)) - * finally: - * PyMem_Free(parsed) # <<<<<<<<<<<<<< - */ - /*finally:*/ { - __pyx_L5_error:; - /*exception exit:*/{ - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - __pyx_t_13 = 0; __pyx_t_14 = 0; __pyx_t_15 = 0; __pyx_t_16 = 0; __pyx_t_17 = 0; __pyx_t_18 = 0; - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; - __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; - __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; - __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; - if (PY_MAJOR_VERSION >= 3) __Pyx_ExceptionSwap(&__pyx_t_16, &__pyx_t_17, &__pyx_t_18); - if ((PY_MAJOR_VERSION < 3) || unlikely(__Pyx_GetException(&__pyx_t_13, &__pyx_t_14, &__pyx_t_15) < 0)) __Pyx_ErrFetch(&__pyx_t_13, &__pyx_t_14, &__pyx_t_15); - __Pyx_XGOTREF(__pyx_t_13); - __Pyx_XGOTREF(__pyx_t_14); - __Pyx_XGOTREF(__pyx_t_15); - __Pyx_XGOTREF(__pyx_t_16); - __Pyx_XGOTREF(__pyx_t_17); - __Pyx_XGOTREF(__pyx_t_18); - __pyx_t_10 = __pyx_lineno; __pyx_t_11 = __pyx_clineno; __pyx_t_12 = __pyx_filename; - { - PyMem_Free(__pyx_v_parsed); - } - if (PY_MAJOR_VERSION >= 3) { - __Pyx_XGIVEREF(__pyx_t_16); - __Pyx_XGIVEREF(__pyx_t_17); - __Pyx_XGIVEREF(__pyx_t_18); - __Pyx_ExceptionReset(__pyx_t_16, __pyx_t_17, __pyx_t_18); - } - __Pyx_XGIVEREF(__pyx_t_13); - __Pyx_XGIVEREF(__pyx_t_14); - __Pyx_XGIVEREF(__pyx_t_15); - __Pyx_ErrRestore(__pyx_t_13, __pyx_t_14, __pyx_t_15); - __pyx_t_13 = 0; __pyx_t_14 = 0; __pyx_t_15 = 0; __pyx_t_16 = 0; __pyx_t_17 = 0; __pyx_t_18 = 0; - __pyx_lineno = __pyx_t_10; __pyx_clineno = __pyx_t_11; __pyx_filename = __pyx_t_12; - goto __pyx_L1_error; - } - __pyx_L4_return: { - __pyx_t_18 = __pyx_r; - __pyx_r = 0; - PyMem_Free(__pyx_v_parsed); - __pyx_r = __pyx_t_18; - __pyx_t_18 = 0; - goto __pyx_L0; - } - } - - /* "aiohttp/_http_parser.pyx":798 - * - * - * cdef _parse_url(char* buf_data, size_t length): # <<<<<<<<<<<<<< - * cdef: - * cparser.http_parser_url* parsed - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_XDECREF(__pyx_t_5); - __Pyx_XDECREF(__pyx_t_6); - __Pyx_XDECREF(__pyx_t_7); - __Pyx_XDECREF(__pyx_t_9); - __Pyx_AddTraceback("aiohttp._http_parser._parse_url", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v_schema); - __Pyx_XDECREF(__pyx_v_host); - __Pyx_XDECREF(__pyx_v_port); - __Pyx_XDECREF(__pyx_v_path); - __Pyx_XDECREF(__pyx_v_query); - __Pyx_XDECREF(__pyx_v_fragment); - __Pyx_XDECREF(__pyx_v_user); - __Pyx_XDECREF(__pyx_v_password); - __Pyx_XDECREF(__pyx_v_userinfo); - __Pyx_XDECREF(__pyx_v_result); - __Pyx_XDECREF(__pyx_v_sep); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "(tree fragment)":1 - * def __pyx_unpickle_RawRequestMessage(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< - * cdef object __pyx_PickleError - * cdef object __pyx_result - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_3__pyx_unpickle_RawRequestMessage(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ -static PyMethodDef __pyx_mdef_7aiohttp_12_http_parser_3__pyx_unpickle_RawRequestMessage = {"__pyx_unpickle_RawRequestMessage", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_7aiohttp_12_http_parser_3__pyx_unpickle_RawRequestMessage, METH_VARARGS|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_7aiohttp_12_http_parser_3__pyx_unpickle_RawRequestMessage(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { - PyObject *__pyx_v___pyx_type = 0; - long __pyx_v___pyx_checksum; - PyObject *__pyx_v___pyx_state = 0; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__pyx_unpickle_RawRequestMessage (wrapper)", 0); - { - static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_pyx_type,&__pyx_n_s_pyx_checksum,&__pyx_n_s_pyx_state,0}; - PyObject* values[3] = {0,0,0}; - if (unlikely(__pyx_kwds)) { - Py_ssize_t kw_args; - const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); - switch (pos_args) { - case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); - CYTHON_FALLTHROUGH; - case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); - CYTHON_FALLTHROUGH; - case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = PyDict_Size(__pyx_kwds); - switch (pos_args) { - case 0: - if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_type)) != 0)) kw_args--; - else goto __pyx_L5_argtuple_error; - CYTHON_FALLTHROUGH; - case 1: - if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_checksum)) != 0)) kw_args--; - else { - __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_RawRequestMessage", 1, 3, 3, 1); __PYX_ERR(1, 1, __pyx_L3_error) - } - CYTHON_FALLTHROUGH; - case 2: - if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_state)) != 0)) kw_args--; - else { - __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_RawRequestMessage", 1, 3, 3, 2); __PYX_ERR(1, 1, __pyx_L3_error) - } - } - if (unlikely(kw_args > 0)) { - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__pyx_unpickle_RawRequestMessage") < 0)) __PYX_ERR(1, 1, __pyx_L3_error) - } - } else if (PyTuple_GET_SIZE(__pyx_args) != 3) { - goto __pyx_L5_argtuple_error; - } else { - values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - values[1] = PyTuple_GET_ITEM(__pyx_args, 1); - values[2] = PyTuple_GET_ITEM(__pyx_args, 2); - } - __pyx_v___pyx_type = values[0]; - __pyx_v___pyx_checksum = __Pyx_PyInt_As_long(values[1]); if (unlikely((__pyx_v___pyx_checksum == (long)-1) && PyErr_Occurred())) __PYX_ERR(1, 1, __pyx_L3_error) - __pyx_v___pyx_state = values[2]; - } - goto __pyx_L4_argument_unpacking_done; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_RawRequestMessage", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(1, 1, __pyx_L3_error) - __pyx_L3_error:; - __Pyx_AddTraceback("aiohttp._http_parser.__pyx_unpickle_RawRequestMessage", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_7aiohttp_12_http_parser_2__pyx_unpickle_RawRequestMessage(__pyx_self, __pyx_v___pyx_type, __pyx_v___pyx_checksum, __pyx_v___pyx_state); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_12_http_parser_2__pyx_unpickle_RawRequestMessage(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state) { - PyObject *__pyx_v___pyx_PickleError = 0; - PyObject *__pyx_v___pyx_result = 0; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - PyObject *__pyx_t_5 = NULL; - int __pyx_t_6; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__pyx_unpickle_RawRequestMessage", 0); - - /* "(tree fragment)":4 - * cdef object __pyx_PickleError - * cdef object __pyx_result - * if __pyx_checksum != 0x1408252: # <<<<<<<<<<<<<< - * from pickle import PickleError as __pyx_PickleError - * raise __pyx_PickleError("Incompatible checksums (%s vs 0x1408252 = (chunked, compression, headers, method, path, raw_headers, should_close, upgrade, url, version))" % __pyx_checksum) - */ - __pyx_t_1 = ((__pyx_v___pyx_checksum != 0x1408252) != 0); - if (__pyx_t_1) { - - /* "(tree fragment)":5 - * cdef object __pyx_result - * if __pyx_checksum != 0x1408252: - * from pickle import PickleError as __pyx_PickleError # <<<<<<<<<<<<<< - * raise __pyx_PickleError("Incompatible checksums (%s vs 0x1408252 = (chunked, compression, headers, method, path, raw_headers, should_close, upgrade, url, version))" % __pyx_checksum) - * __pyx_result = RawRequestMessage.__new__(__pyx_type) - */ - __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 5, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(__pyx_n_s_PickleError); - __Pyx_GIVEREF(__pyx_n_s_PickleError); - PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_PickleError); - __pyx_t_3 = __Pyx_Import(__pyx_n_s_pickle, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 5, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_PickleError); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 5, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(__pyx_t_2); - __pyx_v___pyx_PickleError = __pyx_t_2; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "(tree fragment)":6 - * if __pyx_checksum != 0x1408252: - * from pickle import PickleError as __pyx_PickleError - * raise __pyx_PickleError("Incompatible checksums (%s vs 0x1408252 = (chunked, compression, headers, method, path, raw_headers, should_close, upgrade, url, version))" % __pyx_checksum) # <<<<<<<<<<<<<< - * __pyx_result = RawRequestMessage.__new__(__pyx_type) - * if __pyx_state is not None: - */ - __pyx_t_2 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 6, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_4 = __Pyx_PyString_Format(__pyx_kp_s_Incompatible_checksums_s_vs_0x14, __pyx_t_2); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 6, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_INCREF(__pyx_v___pyx_PickleError); - __pyx_t_2 = __pyx_v___pyx_PickleError; __pyx_t_5 = NULL; - if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_5)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_5); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - } - } - __pyx_t_3 = (__pyx_t_5) ? __Pyx_PyObject_Call2Args(__pyx_t_2, __pyx_t_5, __pyx_t_4) : __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_4); - __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 6, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_Raise(__pyx_t_3, 0, 0, 0); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __PYX_ERR(1, 6, __pyx_L1_error) - - /* "(tree fragment)":4 - * cdef object __pyx_PickleError - * cdef object __pyx_result - * if __pyx_checksum != 0x1408252: # <<<<<<<<<<<<<< - * from pickle import PickleError as __pyx_PickleError - * raise __pyx_PickleError("Incompatible checksums (%s vs 0x1408252 = (chunked, compression, headers, method, path, raw_headers, should_close, upgrade, url, version))" % __pyx_checksum) - */ - } - - /* "(tree fragment)":7 - * from pickle import PickleError as __pyx_PickleError - * raise __pyx_PickleError("Incompatible checksums (%s vs 0x1408252 = (chunked, compression, headers, method, path, raw_headers, should_close, upgrade, url, version))" % __pyx_checksum) - * __pyx_result = RawRequestMessage.__new__(__pyx_type) # <<<<<<<<<<<<<< - * if __pyx_state is not None: - * __pyx_unpickle_RawRequestMessage__set_state( __pyx_result, __pyx_state) - */ - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_ptype_7aiohttp_12_http_parser_RawRequestMessage), __pyx_n_s_new); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 7, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_4 = NULL; - if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_4)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_4); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - } - } - __pyx_t_3 = (__pyx_t_4) ? __Pyx_PyObject_Call2Args(__pyx_t_2, __pyx_t_4, __pyx_v___pyx_type) : __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_v___pyx_type); - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 7, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_v___pyx_result = __pyx_t_3; - __pyx_t_3 = 0; - - /* "(tree fragment)":8 - * raise __pyx_PickleError("Incompatible checksums (%s vs 0x1408252 = (chunked, compression, headers, method, path, raw_headers, should_close, upgrade, url, version))" % __pyx_checksum) - * __pyx_result = RawRequestMessage.__new__(__pyx_type) - * if __pyx_state is not None: # <<<<<<<<<<<<<< - * __pyx_unpickle_RawRequestMessage__set_state( __pyx_result, __pyx_state) - * return __pyx_result - */ - __pyx_t_1 = (__pyx_v___pyx_state != Py_None); - __pyx_t_6 = (__pyx_t_1 != 0); - if (__pyx_t_6) { - - /* "(tree fragment)":9 - * __pyx_result = RawRequestMessage.__new__(__pyx_type) - * if __pyx_state is not None: - * __pyx_unpickle_RawRequestMessage__set_state( __pyx_result, __pyx_state) # <<<<<<<<<<<<<< - * return __pyx_result - * cdef __pyx_unpickle_RawRequestMessage__set_state(RawRequestMessage __pyx_result, tuple __pyx_state): - */ - if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v___pyx_state)->tp_name), 0))) __PYX_ERR(1, 9, __pyx_L1_error) - __pyx_t_3 = __pyx_f_7aiohttp_12_http_parser___pyx_unpickle_RawRequestMessage__set_state(((struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *)__pyx_v___pyx_result), ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 9, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "(tree fragment)":8 - * raise __pyx_PickleError("Incompatible checksums (%s vs 0x1408252 = (chunked, compression, headers, method, path, raw_headers, should_close, upgrade, url, version))" % __pyx_checksum) - * __pyx_result = RawRequestMessage.__new__(__pyx_type) - * if __pyx_state is not None: # <<<<<<<<<<<<<< - * __pyx_unpickle_RawRequestMessage__set_state( __pyx_result, __pyx_state) - * return __pyx_result - */ - } - - /* "(tree fragment)":10 - * if __pyx_state is not None: - * __pyx_unpickle_RawRequestMessage__set_state( __pyx_result, __pyx_state) - * return __pyx_result # <<<<<<<<<<<<<< - * cdef __pyx_unpickle_RawRequestMessage__set_state(RawRequestMessage __pyx_result, tuple __pyx_state): - * __pyx_result.chunked = __pyx_state[0]; __pyx_result.compression = __pyx_state[1]; __pyx_result.headers = __pyx_state[2]; __pyx_result.method = __pyx_state[3]; __pyx_result.path = __pyx_state[4]; __pyx_result.raw_headers = __pyx_state[5]; __pyx_result.should_close = __pyx_state[6]; __pyx_result.upgrade = __pyx_state[7]; __pyx_result.url = __pyx_state[8]; __pyx_result.version = __pyx_state[9] - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v___pyx_result); - __pyx_r = __pyx_v___pyx_result; - goto __pyx_L0; - - /* "(tree fragment)":1 - * def __pyx_unpickle_RawRequestMessage(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< - * cdef object __pyx_PickleError - * cdef object __pyx_result - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_XDECREF(__pyx_t_5); - __Pyx_AddTraceback("aiohttp._http_parser.__pyx_unpickle_RawRequestMessage", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v___pyx_PickleError); - __Pyx_XDECREF(__pyx_v___pyx_result); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "(tree fragment)":11 - * __pyx_unpickle_RawRequestMessage__set_state( __pyx_result, __pyx_state) - * return __pyx_result - * cdef __pyx_unpickle_RawRequestMessage__set_state(RawRequestMessage __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< - * __pyx_result.chunked = __pyx_state[0]; __pyx_result.compression = __pyx_state[1]; __pyx_result.headers = __pyx_state[2]; __pyx_result.method = __pyx_state[3]; __pyx_result.path = __pyx_state[4]; __pyx_result.raw_headers = __pyx_state[5]; __pyx_result.should_close = __pyx_state[6]; __pyx_result.upgrade = __pyx_state[7]; __pyx_result.url = __pyx_state[8]; __pyx_result.version = __pyx_state[9] - * if len(__pyx_state) > 10 and hasattr(__pyx_result, '__dict__'): - */ - -static PyObject *__pyx_f_7aiohttp_12_http_parser___pyx_unpickle_RawRequestMessage__set_state(struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *__pyx_v___pyx_result, PyObject *__pyx_v___pyx_state) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_t_2; - Py_ssize_t __pyx_t_3; - int __pyx_t_4; - int __pyx_t_5; - PyObject *__pyx_t_6 = NULL; - PyObject *__pyx_t_7 = NULL; - PyObject *__pyx_t_8 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__pyx_unpickle_RawRequestMessage__set_state", 0); - - /* "(tree fragment)":12 - * return __pyx_result - * cdef __pyx_unpickle_RawRequestMessage__set_state(RawRequestMessage __pyx_result, tuple __pyx_state): - * __pyx_result.chunked = __pyx_state[0]; __pyx_result.compression = __pyx_state[1]; __pyx_result.headers = __pyx_state[2]; __pyx_result.method = __pyx_state[3]; __pyx_result.path = __pyx_state[4]; __pyx_result.raw_headers = __pyx_state[5]; __pyx_result.should_close = __pyx_state[6]; __pyx_result.upgrade = __pyx_state[7]; __pyx_result.url = __pyx_state[8]; __pyx_result.version = __pyx_state[9] # <<<<<<<<<<<<<< - * if len(__pyx_state) > 10 and hasattr(__pyx_result, '__dict__'): - * __pyx_result.__dict__.update(__pyx_state[10]) - */ - if (unlikely(__pyx_v___pyx_state == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(1, 12, __pyx_L1_error) - } - __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_v___pyx_result->chunked); - __Pyx_DECREF(__pyx_v___pyx_result->chunked); - __pyx_v___pyx_result->chunked = __pyx_t_1; - __pyx_t_1 = 0; - if (unlikely(__pyx_v___pyx_state == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(1, 12, __pyx_L1_error) - } - __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_v___pyx_result->compression); - __Pyx_DECREF(__pyx_v___pyx_result->compression); - __pyx_v___pyx_result->compression = __pyx_t_1; - __pyx_t_1 = 0; - if (unlikely(__pyx_v___pyx_state == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(1, 12, __pyx_L1_error) - } - __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 2, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_v___pyx_result->headers); - __Pyx_DECREF(__pyx_v___pyx_result->headers); - __pyx_v___pyx_result->headers = __pyx_t_1; - __pyx_t_1 = 0; - if (unlikely(__pyx_v___pyx_state == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(1, 12, __pyx_L1_error) - } - __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 3, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (!(likely(PyUnicode_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "unicode", Py_TYPE(__pyx_t_1)->tp_name), 0))) __PYX_ERR(1, 12, __pyx_L1_error) - __Pyx_GIVEREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_v___pyx_result->method); - __Pyx_DECREF(__pyx_v___pyx_result->method); - __pyx_v___pyx_result->method = ((PyObject*)__pyx_t_1); - __pyx_t_1 = 0; - if (unlikely(__pyx_v___pyx_state == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(1, 12, __pyx_L1_error) - } - __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 4, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (!(likely(PyUnicode_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "unicode", Py_TYPE(__pyx_t_1)->tp_name), 0))) __PYX_ERR(1, 12, __pyx_L1_error) - __Pyx_GIVEREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_v___pyx_result->path); - __Pyx_DECREF(__pyx_v___pyx_result->path); - __pyx_v___pyx_result->path = ((PyObject*)__pyx_t_1); - __pyx_t_1 = 0; - if (unlikely(__pyx_v___pyx_state == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(1, 12, __pyx_L1_error) - } - __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 5, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_v___pyx_result->raw_headers); - __Pyx_DECREF(__pyx_v___pyx_result->raw_headers); - __pyx_v___pyx_result->raw_headers = __pyx_t_1; - __pyx_t_1 = 0; - if (unlikely(__pyx_v___pyx_state == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(1, 12, __pyx_L1_error) - } - __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 6, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_v___pyx_result->should_close); - __Pyx_DECREF(__pyx_v___pyx_result->should_close); - __pyx_v___pyx_result->should_close = __pyx_t_1; - __pyx_t_1 = 0; - if (unlikely(__pyx_v___pyx_state == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(1, 12, __pyx_L1_error) - } - __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 7, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_v___pyx_result->upgrade); - __Pyx_DECREF(__pyx_v___pyx_result->upgrade); - __pyx_v___pyx_result->upgrade = __pyx_t_1; - __pyx_t_1 = 0; - if (unlikely(__pyx_v___pyx_state == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(1, 12, __pyx_L1_error) - } - __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 8, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_v___pyx_result->url); - __Pyx_DECREF(__pyx_v___pyx_result->url); - __pyx_v___pyx_result->url = __pyx_t_1; - __pyx_t_1 = 0; - if (unlikely(__pyx_v___pyx_state == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(1, 12, __pyx_L1_error) - } - __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 9, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_v___pyx_result->version); - __Pyx_DECREF(__pyx_v___pyx_result->version); - __pyx_v___pyx_result->version = __pyx_t_1; - __pyx_t_1 = 0; - - /* "(tree fragment)":13 - * cdef __pyx_unpickle_RawRequestMessage__set_state(RawRequestMessage __pyx_result, tuple __pyx_state): - * __pyx_result.chunked = __pyx_state[0]; __pyx_result.compression = __pyx_state[1]; __pyx_result.headers = __pyx_state[2]; __pyx_result.method = __pyx_state[3]; __pyx_result.path = __pyx_state[4]; __pyx_result.raw_headers = __pyx_state[5]; __pyx_result.should_close = __pyx_state[6]; __pyx_result.upgrade = __pyx_state[7]; __pyx_result.url = __pyx_state[8]; __pyx_result.version = __pyx_state[9] - * if len(__pyx_state) > 10 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< - * __pyx_result.__dict__.update(__pyx_state[10]) - */ - if (unlikely(__pyx_v___pyx_state == Py_None)) { - PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); - __PYX_ERR(1, 13, __pyx_L1_error) - } - __pyx_t_3 = PyTuple_GET_SIZE(__pyx_v___pyx_state); if (unlikely(__pyx_t_3 == ((Py_ssize_t)-1))) __PYX_ERR(1, 13, __pyx_L1_error) - __pyx_t_4 = ((__pyx_t_3 > 10) != 0); - if (__pyx_t_4) { - } else { - __pyx_t_2 = __pyx_t_4; - goto __pyx_L4_bool_binop_done; - } - __pyx_t_4 = __Pyx_HasAttr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(1, 13, __pyx_L1_error) - __pyx_t_5 = (__pyx_t_4 != 0); - __pyx_t_2 = __pyx_t_5; - __pyx_L4_bool_binop_done:; - if (__pyx_t_2) { - - /* "(tree fragment)":14 - * __pyx_result.chunked = __pyx_state[0]; __pyx_result.compression = __pyx_state[1]; __pyx_result.headers = __pyx_state[2]; __pyx_result.method = __pyx_state[3]; __pyx_result.path = __pyx_state[4]; __pyx_result.raw_headers = __pyx_state[5]; __pyx_result.should_close = __pyx_state[6]; __pyx_result.upgrade = __pyx_state[7]; __pyx_result.url = __pyx_state[8]; __pyx_result.version = __pyx_state[9] - * if len(__pyx_state) > 10 and hasattr(__pyx_result, '__dict__'): - * __pyx_result.__dict__.update(__pyx_state[10]) # <<<<<<<<<<<<<< - */ - __pyx_t_6 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 14, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_6); - __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_t_6, __pyx_n_s_update); if (unlikely(!__pyx_t_7)) __PYX_ERR(1, 14, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_7); - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - if (unlikely(__pyx_v___pyx_state == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(1, 14, __pyx_L1_error) - } - __pyx_t_6 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 10, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 14, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_6); - __pyx_t_8 = NULL; - if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_7))) { - __pyx_t_8 = PyMethod_GET_SELF(__pyx_t_7); - if (likely(__pyx_t_8)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7); - __Pyx_INCREF(__pyx_t_8); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_7, function); - } - } - __pyx_t_1 = (__pyx_t_8) ? __Pyx_PyObject_Call2Args(__pyx_t_7, __pyx_t_8, __pyx_t_6) : __Pyx_PyObject_CallOneArg(__pyx_t_7, __pyx_t_6); - __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 14, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "(tree fragment)":13 - * cdef __pyx_unpickle_RawRequestMessage__set_state(RawRequestMessage __pyx_result, tuple __pyx_state): - * __pyx_result.chunked = __pyx_state[0]; __pyx_result.compression = __pyx_state[1]; __pyx_result.headers = __pyx_state[2]; __pyx_result.method = __pyx_state[3]; __pyx_result.path = __pyx_state[4]; __pyx_result.raw_headers = __pyx_state[5]; __pyx_result.should_close = __pyx_state[6]; __pyx_result.upgrade = __pyx_state[7]; __pyx_result.url = __pyx_state[8]; __pyx_result.version = __pyx_state[9] - * if len(__pyx_state) > 10 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< - * __pyx_result.__dict__.update(__pyx_state[10]) - */ - } - - /* "(tree fragment)":11 - * __pyx_unpickle_RawRequestMessage__set_state( __pyx_result, __pyx_state) - * return __pyx_result - * cdef __pyx_unpickle_RawRequestMessage__set_state(RawRequestMessage __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< - * __pyx_result.chunked = __pyx_state[0]; __pyx_result.compression = __pyx_state[1]; __pyx_result.headers = __pyx_state[2]; __pyx_result.method = __pyx_state[3]; __pyx_result.path = __pyx_state[4]; __pyx_result.raw_headers = __pyx_state[5]; __pyx_result.should_close = __pyx_state[6]; __pyx_result.upgrade = __pyx_state[7]; __pyx_result.url = __pyx_state[8]; __pyx_result.version = __pyx_state[9] - * if len(__pyx_state) > 10 and hasattr(__pyx_result, '__dict__'): - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_6); - __Pyx_XDECREF(__pyx_t_7); - __Pyx_XDECREF(__pyx_t_8); - __Pyx_AddTraceback("aiohttp._http_parser.__pyx_unpickle_RawRequestMessage__set_state", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "(tree fragment)":1 - * def __pyx_unpickle_RawResponseMessage(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< - * cdef object __pyx_PickleError - * cdef object __pyx_result - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_12_http_parser_5__pyx_unpickle_RawResponseMessage(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ -static PyMethodDef __pyx_mdef_7aiohttp_12_http_parser_5__pyx_unpickle_RawResponseMessage = {"__pyx_unpickle_RawResponseMessage", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_7aiohttp_12_http_parser_5__pyx_unpickle_RawResponseMessage, METH_VARARGS|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_7aiohttp_12_http_parser_5__pyx_unpickle_RawResponseMessage(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { - PyObject *__pyx_v___pyx_type = 0; - long __pyx_v___pyx_checksum; - PyObject *__pyx_v___pyx_state = 0; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__pyx_unpickle_RawResponseMessage (wrapper)", 0); - { - static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_pyx_type,&__pyx_n_s_pyx_checksum,&__pyx_n_s_pyx_state,0}; - PyObject* values[3] = {0,0,0}; - if (unlikely(__pyx_kwds)) { - Py_ssize_t kw_args; - const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); - switch (pos_args) { - case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); - CYTHON_FALLTHROUGH; - case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); - CYTHON_FALLTHROUGH; - case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = PyDict_Size(__pyx_kwds); - switch (pos_args) { - case 0: - if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_type)) != 0)) kw_args--; - else goto __pyx_L5_argtuple_error; - CYTHON_FALLTHROUGH; - case 1: - if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_checksum)) != 0)) kw_args--; - else { - __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_RawResponseMessage", 1, 3, 3, 1); __PYX_ERR(1, 1, __pyx_L3_error) - } - CYTHON_FALLTHROUGH; - case 2: - if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_state)) != 0)) kw_args--; - else { - __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_RawResponseMessage", 1, 3, 3, 2); __PYX_ERR(1, 1, __pyx_L3_error) - } - } - if (unlikely(kw_args > 0)) { - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__pyx_unpickle_RawResponseMessage") < 0)) __PYX_ERR(1, 1, __pyx_L3_error) - } - } else if (PyTuple_GET_SIZE(__pyx_args) != 3) { - goto __pyx_L5_argtuple_error; - } else { - values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - values[1] = PyTuple_GET_ITEM(__pyx_args, 1); - values[2] = PyTuple_GET_ITEM(__pyx_args, 2); - } - __pyx_v___pyx_type = values[0]; - __pyx_v___pyx_checksum = __Pyx_PyInt_As_long(values[1]); if (unlikely((__pyx_v___pyx_checksum == (long)-1) && PyErr_Occurred())) __PYX_ERR(1, 1, __pyx_L3_error) - __pyx_v___pyx_state = values[2]; - } - goto __pyx_L4_argument_unpacking_done; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_RawResponseMessage", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(1, 1, __pyx_L3_error) - __pyx_L3_error:; - __Pyx_AddTraceback("aiohttp._http_parser.__pyx_unpickle_RawResponseMessage", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_7aiohttp_12_http_parser_4__pyx_unpickle_RawResponseMessage(__pyx_self, __pyx_v___pyx_type, __pyx_v___pyx_checksum, __pyx_v___pyx_state); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_12_http_parser_4__pyx_unpickle_RawResponseMessage(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state) { - PyObject *__pyx_v___pyx_PickleError = 0; - PyObject *__pyx_v___pyx_result = 0; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - PyObject *__pyx_t_5 = NULL; - int __pyx_t_6; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__pyx_unpickle_RawResponseMessage", 0); - - /* "(tree fragment)":4 - * cdef object __pyx_PickleError - * cdef object __pyx_result - * if __pyx_checksum != 0xc7706dc: # <<<<<<<<<<<<<< - * from pickle import PickleError as __pyx_PickleError - * raise __pyx_PickleError("Incompatible checksums (%s vs 0xc7706dc = (chunked, code, compression, headers, raw_headers, reason, should_close, upgrade, version))" % __pyx_checksum) - */ - __pyx_t_1 = ((__pyx_v___pyx_checksum != 0xc7706dc) != 0); - if (__pyx_t_1) { - - /* "(tree fragment)":5 - * cdef object __pyx_result - * if __pyx_checksum != 0xc7706dc: - * from pickle import PickleError as __pyx_PickleError # <<<<<<<<<<<<<< - * raise __pyx_PickleError("Incompatible checksums (%s vs 0xc7706dc = (chunked, code, compression, headers, raw_headers, reason, should_close, upgrade, version))" % __pyx_checksum) - * __pyx_result = RawResponseMessage.__new__(__pyx_type) - */ - __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 5, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(__pyx_n_s_PickleError); - __Pyx_GIVEREF(__pyx_n_s_PickleError); - PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_PickleError); - __pyx_t_3 = __Pyx_Import(__pyx_n_s_pickle, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 5, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_PickleError); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 5, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(__pyx_t_2); - __pyx_v___pyx_PickleError = __pyx_t_2; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "(tree fragment)":6 - * if __pyx_checksum != 0xc7706dc: - * from pickle import PickleError as __pyx_PickleError - * raise __pyx_PickleError("Incompatible checksums (%s vs 0xc7706dc = (chunked, code, compression, headers, raw_headers, reason, should_close, upgrade, version))" % __pyx_checksum) # <<<<<<<<<<<<<< - * __pyx_result = RawResponseMessage.__new__(__pyx_type) - * if __pyx_state is not None: - */ - __pyx_t_2 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 6, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_4 = __Pyx_PyString_Format(__pyx_kp_s_Incompatible_checksums_s_vs_0xc7, __pyx_t_2); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 6, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_INCREF(__pyx_v___pyx_PickleError); - __pyx_t_2 = __pyx_v___pyx_PickleError; __pyx_t_5 = NULL; - if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_5)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_5); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - } - } - __pyx_t_3 = (__pyx_t_5) ? __Pyx_PyObject_Call2Args(__pyx_t_2, __pyx_t_5, __pyx_t_4) : __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_4); - __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 6, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_Raise(__pyx_t_3, 0, 0, 0); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __PYX_ERR(1, 6, __pyx_L1_error) - - /* "(tree fragment)":4 - * cdef object __pyx_PickleError - * cdef object __pyx_result - * if __pyx_checksum != 0xc7706dc: # <<<<<<<<<<<<<< - * from pickle import PickleError as __pyx_PickleError - * raise __pyx_PickleError("Incompatible checksums (%s vs 0xc7706dc = (chunked, code, compression, headers, raw_headers, reason, should_close, upgrade, version))" % __pyx_checksum) - */ - } - - /* "(tree fragment)":7 - * from pickle import PickleError as __pyx_PickleError - * raise __pyx_PickleError("Incompatible checksums (%s vs 0xc7706dc = (chunked, code, compression, headers, raw_headers, reason, should_close, upgrade, version))" % __pyx_checksum) - * __pyx_result = RawResponseMessage.__new__(__pyx_type) # <<<<<<<<<<<<<< - * if __pyx_state is not None: - * __pyx_unpickle_RawResponseMessage__set_state( __pyx_result, __pyx_state) - */ - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_ptype_7aiohttp_12_http_parser_RawResponseMessage), __pyx_n_s_new); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 7, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_4 = NULL; - if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_4)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_4); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - } - } - __pyx_t_3 = (__pyx_t_4) ? __Pyx_PyObject_Call2Args(__pyx_t_2, __pyx_t_4, __pyx_v___pyx_type) : __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_v___pyx_type); - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 7, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_v___pyx_result = __pyx_t_3; - __pyx_t_3 = 0; - - /* "(tree fragment)":8 - * raise __pyx_PickleError("Incompatible checksums (%s vs 0xc7706dc = (chunked, code, compression, headers, raw_headers, reason, should_close, upgrade, version))" % __pyx_checksum) - * __pyx_result = RawResponseMessage.__new__(__pyx_type) - * if __pyx_state is not None: # <<<<<<<<<<<<<< - * __pyx_unpickle_RawResponseMessage__set_state( __pyx_result, __pyx_state) - * return __pyx_result - */ - __pyx_t_1 = (__pyx_v___pyx_state != Py_None); - __pyx_t_6 = (__pyx_t_1 != 0); - if (__pyx_t_6) { - - /* "(tree fragment)":9 - * __pyx_result = RawResponseMessage.__new__(__pyx_type) - * if __pyx_state is not None: - * __pyx_unpickle_RawResponseMessage__set_state( __pyx_result, __pyx_state) # <<<<<<<<<<<<<< - * return __pyx_result - * cdef __pyx_unpickle_RawResponseMessage__set_state(RawResponseMessage __pyx_result, tuple __pyx_state): - */ - if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v___pyx_state)->tp_name), 0))) __PYX_ERR(1, 9, __pyx_L1_error) - __pyx_t_3 = __pyx_f_7aiohttp_12_http_parser___pyx_unpickle_RawResponseMessage__set_state(((struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *)__pyx_v___pyx_result), ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 9, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "(tree fragment)":8 - * raise __pyx_PickleError("Incompatible checksums (%s vs 0xc7706dc = (chunked, code, compression, headers, raw_headers, reason, should_close, upgrade, version))" % __pyx_checksum) - * __pyx_result = RawResponseMessage.__new__(__pyx_type) - * if __pyx_state is not None: # <<<<<<<<<<<<<< - * __pyx_unpickle_RawResponseMessage__set_state( __pyx_result, __pyx_state) - * return __pyx_result - */ - } - - /* "(tree fragment)":10 - * if __pyx_state is not None: - * __pyx_unpickle_RawResponseMessage__set_state( __pyx_result, __pyx_state) - * return __pyx_result # <<<<<<<<<<<<<< - * cdef __pyx_unpickle_RawResponseMessage__set_state(RawResponseMessage __pyx_result, tuple __pyx_state): - * __pyx_result.chunked = __pyx_state[0]; __pyx_result.code = __pyx_state[1]; __pyx_result.compression = __pyx_state[2]; __pyx_result.headers = __pyx_state[3]; __pyx_result.raw_headers = __pyx_state[4]; __pyx_result.reason = __pyx_state[5]; __pyx_result.should_close = __pyx_state[6]; __pyx_result.upgrade = __pyx_state[7]; __pyx_result.version = __pyx_state[8] - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v___pyx_result); - __pyx_r = __pyx_v___pyx_result; - goto __pyx_L0; - - /* "(tree fragment)":1 - * def __pyx_unpickle_RawResponseMessage(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< - * cdef object __pyx_PickleError - * cdef object __pyx_result - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_XDECREF(__pyx_t_5); - __Pyx_AddTraceback("aiohttp._http_parser.__pyx_unpickle_RawResponseMessage", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v___pyx_PickleError); - __Pyx_XDECREF(__pyx_v___pyx_result); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "(tree fragment)":11 - * __pyx_unpickle_RawResponseMessage__set_state( __pyx_result, __pyx_state) - * return __pyx_result - * cdef __pyx_unpickle_RawResponseMessage__set_state(RawResponseMessage __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< - * __pyx_result.chunked = __pyx_state[0]; __pyx_result.code = __pyx_state[1]; __pyx_result.compression = __pyx_state[2]; __pyx_result.headers = __pyx_state[3]; __pyx_result.raw_headers = __pyx_state[4]; __pyx_result.reason = __pyx_state[5]; __pyx_result.should_close = __pyx_state[6]; __pyx_result.upgrade = __pyx_state[7]; __pyx_result.version = __pyx_state[8] - * if len(__pyx_state) > 9 and hasattr(__pyx_result, '__dict__'): - */ - -static PyObject *__pyx_f_7aiohttp_12_http_parser___pyx_unpickle_RawResponseMessage__set_state(struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *__pyx_v___pyx_result, PyObject *__pyx_v___pyx_state) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_t_2; - int __pyx_t_3; - Py_ssize_t __pyx_t_4; - int __pyx_t_5; - int __pyx_t_6; - PyObject *__pyx_t_7 = NULL; - PyObject *__pyx_t_8 = NULL; - PyObject *__pyx_t_9 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__pyx_unpickle_RawResponseMessage__set_state", 0); - - /* "(tree fragment)":12 - * return __pyx_result - * cdef __pyx_unpickle_RawResponseMessage__set_state(RawResponseMessage __pyx_result, tuple __pyx_state): - * __pyx_result.chunked = __pyx_state[0]; __pyx_result.code = __pyx_state[1]; __pyx_result.compression = __pyx_state[2]; __pyx_result.headers = __pyx_state[3]; __pyx_result.raw_headers = __pyx_state[4]; __pyx_result.reason = __pyx_state[5]; __pyx_result.should_close = __pyx_state[6]; __pyx_result.upgrade = __pyx_state[7]; __pyx_result.version = __pyx_state[8] # <<<<<<<<<<<<<< - * if len(__pyx_state) > 9 and hasattr(__pyx_result, '__dict__'): - * __pyx_result.__dict__.update(__pyx_state[9]) - */ - if (unlikely(__pyx_v___pyx_state == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(1, 12, __pyx_L1_error) - } - __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_v___pyx_result->chunked); - __Pyx_DECREF(__pyx_v___pyx_result->chunked); - __pyx_v___pyx_result->chunked = __pyx_t_1; - __pyx_t_1 = 0; - if (unlikely(__pyx_v___pyx_state == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(1, 12, __pyx_L1_error) - } - __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(1, 12, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_v___pyx_result->code = __pyx_t_2; - if (unlikely(__pyx_v___pyx_state == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(1, 12, __pyx_L1_error) - } - __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 2, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_v___pyx_result->compression); - __Pyx_DECREF(__pyx_v___pyx_result->compression); - __pyx_v___pyx_result->compression = __pyx_t_1; - __pyx_t_1 = 0; - if (unlikely(__pyx_v___pyx_state == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(1, 12, __pyx_L1_error) - } - __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 3, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_v___pyx_result->headers); - __Pyx_DECREF(__pyx_v___pyx_result->headers); - __pyx_v___pyx_result->headers = __pyx_t_1; - __pyx_t_1 = 0; - if (unlikely(__pyx_v___pyx_state == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(1, 12, __pyx_L1_error) - } - __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 4, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_v___pyx_result->raw_headers); - __Pyx_DECREF(__pyx_v___pyx_result->raw_headers); - __pyx_v___pyx_result->raw_headers = __pyx_t_1; - __pyx_t_1 = 0; - if (unlikely(__pyx_v___pyx_state == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(1, 12, __pyx_L1_error) - } - __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 5, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (!(likely(PyUnicode_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "unicode", Py_TYPE(__pyx_t_1)->tp_name), 0))) __PYX_ERR(1, 12, __pyx_L1_error) - __Pyx_GIVEREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_v___pyx_result->reason); - __Pyx_DECREF(__pyx_v___pyx_result->reason); - __pyx_v___pyx_result->reason = ((PyObject*)__pyx_t_1); - __pyx_t_1 = 0; - if (unlikely(__pyx_v___pyx_state == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(1, 12, __pyx_L1_error) - } - __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 6, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_v___pyx_result->should_close); - __Pyx_DECREF(__pyx_v___pyx_result->should_close); - __pyx_v___pyx_result->should_close = __pyx_t_1; - __pyx_t_1 = 0; - if (unlikely(__pyx_v___pyx_state == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(1, 12, __pyx_L1_error) - } - __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 7, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_v___pyx_result->upgrade); - __Pyx_DECREF(__pyx_v___pyx_result->upgrade); - __pyx_v___pyx_result->upgrade = __pyx_t_1; - __pyx_t_1 = 0; - if (unlikely(__pyx_v___pyx_state == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(1, 12, __pyx_L1_error) - } - __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 8, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_v___pyx_result->version); - __Pyx_DECREF(__pyx_v___pyx_result->version); - __pyx_v___pyx_result->version = __pyx_t_1; - __pyx_t_1 = 0; - - /* "(tree fragment)":13 - * cdef __pyx_unpickle_RawResponseMessage__set_state(RawResponseMessage __pyx_result, tuple __pyx_state): - * __pyx_result.chunked = __pyx_state[0]; __pyx_result.code = __pyx_state[1]; __pyx_result.compression = __pyx_state[2]; __pyx_result.headers = __pyx_state[3]; __pyx_result.raw_headers = __pyx_state[4]; __pyx_result.reason = __pyx_state[5]; __pyx_result.should_close = __pyx_state[6]; __pyx_result.upgrade = __pyx_state[7]; __pyx_result.version = __pyx_state[8] - * if len(__pyx_state) > 9 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< - * __pyx_result.__dict__.update(__pyx_state[9]) - */ - if (unlikely(__pyx_v___pyx_state == Py_None)) { - PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); - __PYX_ERR(1, 13, __pyx_L1_error) - } - __pyx_t_4 = PyTuple_GET_SIZE(__pyx_v___pyx_state); if (unlikely(__pyx_t_4 == ((Py_ssize_t)-1))) __PYX_ERR(1, 13, __pyx_L1_error) - __pyx_t_5 = ((__pyx_t_4 > 9) != 0); - if (__pyx_t_5) { - } else { - __pyx_t_3 = __pyx_t_5; - goto __pyx_L4_bool_binop_done; - } - __pyx_t_5 = __Pyx_HasAttr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(__pyx_t_5 == ((int)-1))) __PYX_ERR(1, 13, __pyx_L1_error) - __pyx_t_6 = (__pyx_t_5 != 0); - __pyx_t_3 = __pyx_t_6; - __pyx_L4_bool_binop_done:; - if (__pyx_t_3) { - - /* "(tree fragment)":14 - * __pyx_result.chunked = __pyx_state[0]; __pyx_result.code = __pyx_state[1]; __pyx_result.compression = __pyx_state[2]; __pyx_result.headers = __pyx_state[3]; __pyx_result.raw_headers = __pyx_state[4]; __pyx_result.reason = __pyx_state[5]; __pyx_result.should_close = __pyx_state[6]; __pyx_result.upgrade = __pyx_state[7]; __pyx_result.version = __pyx_state[8] - * if len(__pyx_state) > 9 and hasattr(__pyx_result, '__dict__'): - * __pyx_result.__dict__.update(__pyx_state[9]) # <<<<<<<<<<<<<< - */ - __pyx_t_7 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(!__pyx_t_7)) __PYX_ERR(1, 14, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_7); - __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_t_7, __pyx_n_s_update); if (unlikely(!__pyx_t_8)) __PYX_ERR(1, 14, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_8); - __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - if (unlikely(__pyx_v___pyx_state == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(1, 14, __pyx_L1_error) - } - __pyx_t_7 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 9, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_7)) __PYX_ERR(1, 14, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_7); - __pyx_t_9 = NULL; - if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_8))) { - __pyx_t_9 = PyMethod_GET_SELF(__pyx_t_8); - if (likely(__pyx_t_9)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_8); - __Pyx_INCREF(__pyx_t_9); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_8, function); - } - } - __pyx_t_1 = (__pyx_t_9) ? __Pyx_PyObject_Call2Args(__pyx_t_8, __pyx_t_9, __pyx_t_7) : __Pyx_PyObject_CallOneArg(__pyx_t_8, __pyx_t_7); - __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; - __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 14, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "(tree fragment)":13 - * cdef __pyx_unpickle_RawResponseMessage__set_state(RawResponseMessage __pyx_result, tuple __pyx_state): - * __pyx_result.chunked = __pyx_state[0]; __pyx_result.code = __pyx_state[1]; __pyx_result.compression = __pyx_state[2]; __pyx_result.headers = __pyx_state[3]; __pyx_result.raw_headers = __pyx_state[4]; __pyx_result.reason = __pyx_state[5]; __pyx_result.should_close = __pyx_state[6]; __pyx_result.upgrade = __pyx_state[7]; __pyx_result.version = __pyx_state[8] - * if len(__pyx_state) > 9 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< - * __pyx_result.__dict__.update(__pyx_state[9]) - */ - } - - /* "(tree fragment)":11 - * __pyx_unpickle_RawResponseMessage__set_state( __pyx_result, __pyx_state) - * return __pyx_result - * cdef __pyx_unpickle_RawResponseMessage__set_state(RawResponseMessage __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< - * __pyx_result.chunked = __pyx_state[0]; __pyx_result.code = __pyx_state[1]; __pyx_result.compression = __pyx_state[2]; __pyx_result.headers = __pyx_state[3]; __pyx_result.raw_headers = __pyx_state[4]; __pyx_result.reason = __pyx_state[5]; __pyx_result.should_close = __pyx_state[6]; __pyx_result.upgrade = __pyx_state[7]; __pyx_result.version = __pyx_state[8] - * if len(__pyx_state) > 9 and hasattr(__pyx_result, '__dict__'): - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_7); - __Pyx_XDECREF(__pyx_t_8); - __Pyx_XDECREF(__pyx_t_9); - __Pyx_AddTraceback("aiohttp._http_parser.__pyx_unpickle_RawResponseMessage__set_state", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *__pyx_freelist_7aiohttp_12_http_parser_RawRequestMessage[250]; -static int __pyx_freecount_7aiohttp_12_http_parser_RawRequestMessage = 0; - -static PyObject *__pyx_tp_new_7aiohttp_12_http_parser_RawRequestMessage(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { - struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *p; - PyObject *o; - if (CYTHON_COMPILING_IN_CPYTHON && likely((__pyx_freecount_7aiohttp_12_http_parser_RawRequestMessage > 0) & (t->tp_basicsize == sizeof(struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage)) & ((t->tp_flags & (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE)) == 0))) { - o = (PyObject*)__pyx_freelist_7aiohttp_12_http_parser_RawRequestMessage[--__pyx_freecount_7aiohttp_12_http_parser_RawRequestMessage]; - memset(o, 0, sizeof(struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage)); - (void) PyObject_INIT(o, t); - PyObject_GC_Track(o); - } else { - if (likely((t->tp_flags & Py_TPFLAGS_IS_ABSTRACT) == 0)) { - o = (*t->tp_alloc)(t, 0); - } else { - o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0); - } - if (unlikely(!o)) return 0; - } - p = ((struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *)o); - p->method = ((PyObject*)Py_None); Py_INCREF(Py_None); - p->path = ((PyObject*)Py_None); Py_INCREF(Py_None); - p->version = Py_None; Py_INCREF(Py_None); - p->headers = Py_None; Py_INCREF(Py_None); - p->raw_headers = Py_None; Py_INCREF(Py_None); - p->should_close = Py_None; Py_INCREF(Py_None); - p->compression = Py_None; Py_INCREF(Py_None); - p->upgrade = Py_None; Py_INCREF(Py_None); - p->chunked = Py_None; Py_INCREF(Py_None); - p->url = Py_None; Py_INCREF(Py_None); - return o; -} - -static void __pyx_tp_dealloc_7aiohttp_12_http_parser_RawRequestMessage(PyObject *o) { - struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *p = (struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *)o; - #if CYTHON_USE_TP_FINALIZE - if (unlikely(PyType_HasFeature(Py_TYPE(o), Py_TPFLAGS_HAVE_FINALIZE) && Py_TYPE(o)->tp_finalize) && !_PyGC_FINALIZED(o)) { - if (PyObject_CallFinalizerFromDealloc(o)) return; - } - #endif - PyObject_GC_UnTrack(o); - Py_CLEAR(p->method); - Py_CLEAR(p->path); - Py_CLEAR(p->version); - Py_CLEAR(p->headers); - Py_CLEAR(p->raw_headers); - Py_CLEAR(p->should_close); - Py_CLEAR(p->compression); - Py_CLEAR(p->upgrade); - Py_CLEAR(p->chunked); - Py_CLEAR(p->url); - if (CYTHON_COMPILING_IN_CPYTHON && ((__pyx_freecount_7aiohttp_12_http_parser_RawRequestMessage < 250) & (Py_TYPE(o)->tp_basicsize == sizeof(struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage)) & ((Py_TYPE(o)->tp_flags & (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE)) == 0))) { - __pyx_freelist_7aiohttp_12_http_parser_RawRequestMessage[__pyx_freecount_7aiohttp_12_http_parser_RawRequestMessage++] = ((struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *)o); - } else { - (*Py_TYPE(o)->tp_free)(o); - } -} - -static int __pyx_tp_traverse_7aiohttp_12_http_parser_RawRequestMessage(PyObject *o, visitproc v, void *a) { - int e; - struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *p = (struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *)o; - if (p->version) { - e = (*v)(p->version, a); if (e) return e; - } - if (p->headers) { - e = (*v)(p->headers, a); if (e) return e; - } - if (p->raw_headers) { - e = (*v)(p->raw_headers, a); if (e) return e; - } - if (p->should_close) { - e = (*v)(p->should_close, a); if (e) return e; - } - if (p->compression) { - e = (*v)(p->compression, a); if (e) return e; - } - if (p->upgrade) { - e = (*v)(p->upgrade, a); if (e) return e; - } - if (p->chunked) { - e = (*v)(p->chunked, a); if (e) return e; - } - if (p->url) { - e = (*v)(p->url, a); if (e) return e; - } - return 0; -} - -static int __pyx_tp_clear_7aiohttp_12_http_parser_RawRequestMessage(PyObject *o) { - PyObject* tmp; - struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *p = (struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage *)o; - tmp = ((PyObject*)p->version); - p->version = Py_None; Py_INCREF(Py_None); - Py_XDECREF(tmp); - tmp = ((PyObject*)p->headers); - p->headers = Py_None; Py_INCREF(Py_None); - Py_XDECREF(tmp); - tmp = ((PyObject*)p->raw_headers); - p->raw_headers = Py_None; Py_INCREF(Py_None); - Py_XDECREF(tmp); - tmp = ((PyObject*)p->should_close); - p->should_close = Py_None; Py_INCREF(Py_None); - Py_XDECREF(tmp); - tmp = ((PyObject*)p->compression); - p->compression = Py_None; Py_INCREF(Py_None); - Py_XDECREF(tmp); - tmp = ((PyObject*)p->upgrade); - p->upgrade = Py_None; Py_INCREF(Py_None); - Py_XDECREF(tmp); - tmp = ((PyObject*)p->chunked); - p->chunked = Py_None; Py_INCREF(Py_None); - Py_XDECREF(tmp); - tmp = ((PyObject*)p->url); - p->url = Py_None; Py_INCREF(Py_None); - Py_XDECREF(tmp); - return 0; -} - -static PyObject *__pyx_getprop_7aiohttp_12_http_parser_17RawRequestMessage_method(PyObject *o, CYTHON_UNUSED void *x) { - return __pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_6method_1__get__(o); -} - -static PyObject *__pyx_getprop_7aiohttp_12_http_parser_17RawRequestMessage_path(PyObject *o, CYTHON_UNUSED void *x) { - return __pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_4path_1__get__(o); -} - -static PyObject *__pyx_getprop_7aiohttp_12_http_parser_17RawRequestMessage_version(PyObject *o, CYTHON_UNUSED void *x) { - return __pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_7version_1__get__(o); -} - -static PyObject *__pyx_getprop_7aiohttp_12_http_parser_17RawRequestMessage_headers(PyObject *o, CYTHON_UNUSED void *x) { - return __pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_7headers_1__get__(o); -} - -static PyObject *__pyx_getprop_7aiohttp_12_http_parser_17RawRequestMessage_raw_headers(PyObject *o, CYTHON_UNUSED void *x) { - return __pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_11raw_headers_1__get__(o); -} - -static PyObject *__pyx_getprop_7aiohttp_12_http_parser_17RawRequestMessage_should_close(PyObject *o, CYTHON_UNUSED void *x) { - return __pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_12should_close_1__get__(o); -} - -static PyObject *__pyx_getprop_7aiohttp_12_http_parser_17RawRequestMessage_compression(PyObject *o, CYTHON_UNUSED void *x) { - return __pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_11compression_1__get__(o); -} - -static PyObject *__pyx_getprop_7aiohttp_12_http_parser_17RawRequestMessage_upgrade(PyObject *o, CYTHON_UNUSED void *x) { - return __pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_7upgrade_1__get__(o); -} - -static PyObject *__pyx_getprop_7aiohttp_12_http_parser_17RawRequestMessage_chunked(PyObject *o, CYTHON_UNUSED void *x) { - return __pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_7chunked_1__get__(o); -} - -static PyObject *__pyx_getprop_7aiohttp_12_http_parser_17RawRequestMessage_url(PyObject *o, CYTHON_UNUSED void *x) { - return __pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_3url_1__get__(o); -} - -static PyMethodDef __pyx_methods_7aiohttp_12_http_parser_RawRequestMessage[] = { - {"_replace", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_5_replace, METH_VARARGS|METH_KEYWORDS, 0}, - {"__reduce_cython__", (PyCFunction)__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_7__reduce_cython__, METH_NOARGS, 0}, - {"__setstate_cython__", (PyCFunction)__pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_9__setstate_cython__, METH_O, 0}, - {0, 0, 0, 0} -}; - -static struct PyGetSetDef __pyx_getsets_7aiohttp_12_http_parser_RawRequestMessage[] = { - {(char *)"method", __pyx_getprop_7aiohttp_12_http_parser_17RawRequestMessage_method, 0, (char *)0, 0}, - {(char *)"path", __pyx_getprop_7aiohttp_12_http_parser_17RawRequestMessage_path, 0, (char *)0, 0}, - {(char *)"version", __pyx_getprop_7aiohttp_12_http_parser_17RawRequestMessage_version, 0, (char *)0, 0}, - {(char *)"headers", __pyx_getprop_7aiohttp_12_http_parser_17RawRequestMessage_headers, 0, (char *)0, 0}, - {(char *)"raw_headers", __pyx_getprop_7aiohttp_12_http_parser_17RawRequestMessage_raw_headers, 0, (char *)0, 0}, - {(char *)"should_close", __pyx_getprop_7aiohttp_12_http_parser_17RawRequestMessage_should_close, 0, (char *)0, 0}, - {(char *)"compression", __pyx_getprop_7aiohttp_12_http_parser_17RawRequestMessage_compression, 0, (char *)0, 0}, - {(char *)"upgrade", __pyx_getprop_7aiohttp_12_http_parser_17RawRequestMessage_upgrade, 0, (char *)0, 0}, - {(char *)"chunked", __pyx_getprop_7aiohttp_12_http_parser_17RawRequestMessage_chunked, 0, (char *)0, 0}, - {(char *)"url", __pyx_getprop_7aiohttp_12_http_parser_17RawRequestMessage_url, 0, (char *)0, 0}, - {0, 0, 0, 0, 0} -}; - -static PyTypeObject __pyx_type_7aiohttp_12_http_parser_RawRequestMessage = { - PyVarObject_HEAD_INIT(0, 0) - "aiohttp._http_parser.RawRequestMessage", /*tp_name*/ - sizeof(struct __pyx_obj_7aiohttp_12_http_parser_RawRequestMessage), /*tp_basicsize*/ - 0, /*tp_itemsize*/ - __pyx_tp_dealloc_7aiohttp_12_http_parser_RawRequestMessage, /*tp_dealloc*/ - #if PY_VERSION_HEX < 0x030800b4 - 0, /*tp_print*/ - #endif - #if PY_VERSION_HEX >= 0x030800b4 - 0, /*tp_vectorcall_offset*/ - #endif - 0, /*tp_getattr*/ - 0, /*tp_setattr*/ - #if PY_MAJOR_VERSION < 3 - 0, /*tp_compare*/ - #endif - #if PY_MAJOR_VERSION >= 3 - 0, /*tp_as_async*/ - #endif - __pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_3__repr__, /*tp_repr*/ - 0, /*tp_as_number*/ - 0, /*tp_as_sequence*/ - 0, /*tp_as_mapping*/ - 0, /*tp_hash*/ - 0, /*tp_call*/ - 0, /*tp_str*/ - 0, /*tp_getattro*/ - 0, /*tp_setattro*/ - 0, /*tp_as_buffer*/ - Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ - 0, /*tp_doc*/ - __pyx_tp_traverse_7aiohttp_12_http_parser_RawRequestMessage, /*tp_traverse*/ - __pyx_tp_clear_7aiohttp_12_http_parser_RawRequestMessage, /*tp_clear*/ - 0, /*tp_richcompare*/ - 0, /*tp_weaklistoffset*/ - 0, /*tp_iter*/ - 0, /*tp_iternext*/ - __pyx_methods_7aiohttp_12_http_parser_RawRequestMessage, /*tp_methods*/ - 0, /*tp_members*/ - __pyx_getsets_7aiohttp_12_http_parser_RawRequestMessage, /*tp_getset*/ - 0, /*tp_base*/ - 0, /*tp_dict*/ - 0, /*tp_descr_get*/ - 0, /*tp_descr_set*/ - 0, /*tp_dictoffset*/ - __pyx_pw_7aiohttp_12_http_parser_17RawRequestMessage_1__init__, /*tp_init*/ - 0, /*tp_alloc*/ - __pyx_tp_new_7aiohttp_12_http_parser_RawRequestMessage, /*tp_new*/ - 0, /*tp_free*/ - 0, /*tp_is_gc*/ - 0, /*tp_bases*/ - 0, /*tp_mro*/ - 0, /*tp_cache*/ - 0, /*tp_subclasses*/ - 0, /*tp_weaklist*/ - 0, /*tp_del*/ - 0, /*tp_version_tag*/ - #if PY_VERSION_HEX >= 0x030400a1 - 0, /*tp_finalize*/ - #endif - #if PY_VERSION_HEX >= 0x030800b1 - 0, /*tp_vectorcall*/ - #endif - #if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 - 0, /*tp_print*/ - #endif -}; - -static struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *__pyx_freelist_7aiohttp_12_http_parser_RawResponseMessage[250]; -static int __pyx_freecount_7aiohttp_12_http_parser_RawResponseMessage = 0; - -static PyObject *__pyx_tp_new_7aiohttp_12_http_parser_RawResponseMessage(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { - struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *p; - PyObject *o; - if (CYTHON_COMPILING_IN_CPYTHON && likely((__pyx_freecount_7aiohttp_12_http_parser_RawResponseMessage > 0) & (t->tp_basicsize == sizeof(struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage)) & ((t->tp_flags & (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE)) == 0))) { - o = (PyObject*)__pyx_freelist_7aiohttp_12_http_parser_RawResponseMessage[--__pyx_freecount_7aiohttp_12_http_parser_RawResponseMessage]; - memset(o, 0, sizeof(struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage)); - (void) PyObject_INIT(o, t); - PyObject_GC_Track(o); - } else { - if (likely((t->tp_flags & Py_TPFLAGS_IS_ABSTRACT) == 0)) { - o = (*t->tp_alloc)(t, 0); - } else { - o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0); - } - if (unlikely(!o)) return 0; - } - p = ((struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *)o); - p->version = Py_None; Py_INCREF(Py_None); - p->reason = ((PyObject*)Py_None); Py_INCREF(Py_None); - p->headers = Py_None; Py_INCREF(Py_None); - p->raw_headers = Py_None; Py_INCREF(Py_None); - p->should_close = Py_None; Py_INCREF(Py_None); - p->compression = Py_None; Py_INCREF(Py_None); - p->upgrade = Py_None; Py_INCREF(Py_None); - p->chunked = Py_None; Py_INCREF(Py_None); - return o; -} - -static void __pyx_tp_dealloc_7aiohttp_12_http_parser_RawResponseMessage(PyObject *o) { - struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *p = (struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *)o; - #if CYTHON_USE_TP_FINALIZE - if (unlikely(PyType_HasFeature(Py_TYPE(o), Py_TPFLAGS_HAVE_FINALIZE) && Py_TYPE(o)->tp_finalize) && !_PyGC_FINALIZED(o)) { - if (PyObject_CallFinalizerFromDealloc(o)) return; - } - #endif - PyObject_GC_UnTrack(o); - Py_CLEAR(p->version); - Py_CLEAR(p->reason); - Py_CLEAR(p->headers); - Py_CLEAR(p->raw_headers); - Py_CLEAR(p->should_close); - Py_CLEAR(p->compression); - Py_CLEAR(p->upgrade); - Py_CLEAR(p->chunked); - if (CYTHON_COMPILING_IN_CPYTHON && ((__pyx_freecount_7aiohttp_12_http_parser_RawResponseMessage < 250) & (Py_TYPE(o)->tp_basicsize == sizeof(struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage)) & ((Py_TYPE(o)->tp_flags & (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE)) == 0))) { - __pyx_freelist_7aiohttp_12_http_parser_RawResponseMessage[__pyx_freecount_7aiohttp_12_http_parser_RawResponseMessage++] = ((struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *)o); - } else { - (*Py_TYPE(o)->tp_free)(o); - } -} - -static int __pyx_tp_traverse_7aiohttp_12_http_parser_RawResponseMessage(PyObject *o, visitproc v, void *a) { - int e; - struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *p = (struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *)o; - if (p->version) { - e = (*v)(p->version, a); if (e) return e; - } - if (p->headers) { - e = (*v)(p->headers, a); if (e) return e; - } - if (p->raw_headers) { - e = (*v)(p->raw_headers, a); if (e) return e; - } - if (p->should_close) { - e = (*v)(p->should_close, a); if (e) return e; - } - if (p->compression) { - e = (*v)(p->compression, a); if (e) return e; - } - if (p->upgrade) { - e = (*v)(p->upgrade, a); if (e) return e; - } - if (p->chunked) { - e = (*v)(p->chunked, a); if (e) return e; - } - return 0; -} - -static int __pyx_tp_clear_7aiohttp_12_http_parser_RawResponseMessage(PyObject *o) { - PyObject* tmp; - struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *p = (struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage *)o; - tmp = ((PyObject*)p->version); - p->version = Py_None; Py_INCREF(Py_None); - Py_XDECREF(tmp); - tmp = ((PyObject*)p->headers); - p->headers = Py_None; Py_INCREF(Py_None); - Py_XDECREF(tmp); - tmp = ((PyObject*)p->raw_headers); - p->raw_headers = Py_None; Py_INCREF(Py_None); - Py_XDECREF(tmp); - tmp = ((PyObject*)p->should_close); - p->should_close = Py_None; Py_INCREF(Py_None); - Py_XDECREF(tmp); - tmp = ((PyObject*)p->compression); - p->compression = Py_None; Py_INCREF(Py_None); - Py_XDECREF(tmp); - tmp = ((PyObject*)p->upgrade); - p->upgrade = Py_None; Py_INCREF(Py_None); - Py_XDECREF(tmp); - tmp = ((PyObject*)p->chunked); - p->chunked = Py_None; Py_INCREF(Py_None); - Py_XDECREF(tmp); - return 0; -} - -static PyObject *__pyx_getprop_7aiohttp_12_http_parser_18RawResponseMessage_version(PyObject *o, CYTHON_UNUSED void *x) { - return __pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_7version_1__get__(o); -} - -static PyObject *__pyx_getprop_7aiohttp_12_http_parser_18RawResponseMessage_code(PyObject *o, CYTHON_UNUSED void *x) { - return __pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_4code_1__get__(o); -} - -static PyObject *__pyx_getprop_7aiohttp_12_http_parser_18RawResponseMessage_reason(PyObject *o, CYTHON_UNUSED void *x) { - return __pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_6reason_1__get__(o); -} - -static PyObject *__pyx_getprop_7aiohttp_12_http_parser_18RawResponseMessage_headers(PyObject *o, CYTHON_UNUSED void *x) { - return __pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_7headers_1__get__(o); -} - -static PyObject *__pyx_getprop_7aiohttp_12_http_parser_18RawResponseMessage_raw_headers(PyObject *o, CYTHON_UNUSED void *x) { - return __pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_11raw_headers_1__get__(o); -} - -static PyObject *__pyx_getprop_7aiohttp_12_http_parser_18RawResponseMessage_should_close(PyObject *o, CYTHON_UNUSED void *x) { - return __pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_12should_close_1__get__(o); -} - -static PyObject *__pyx_getprop_7aiohttp_12_http_parser_18RawResponseMessage_compression(PyObject *o, CYTHON_UNUSED void *x) { - return __pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_11compression_1__get__(o); -} - -static PyObject *__pyx_getprop_7aiohttp_12_http_parser_18RawResponseMessage_upgrade(PyObject *o, CYTHON_UNUSED void *x) { - return __pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_7upgrade_1__get__(o); -} - -static PyObject *__pyx_getprop_7aiohttp_12_http_parser_18RawResponseMessage_chunked(PyObject *o, CYTHON_UNUSED void *x) { - return __pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_7chunked_1__get__(o); -} - -static PyMethodDef __pyx_methods_7aiohttp_12_http_parser_RawResponseMessage[] = { - {"__reduce_cython__", (PyCFunction)__pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_5__reduce_cython__, METH_NOARGS, 0}, - {"__setstate_cython__", (PyCFunction)__pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_7__setstate_cython__, METH_O, 0}, - {0, 0, 0, 0} -}; - -static struct PyGetSetDef __pyx_getsets_7aiohttp_12_http_parser_RawResponseMessage[] = { - {(char *)"version", __pyx_getprop_7aiohttp_12_http_parser_18RawResponseMessage_version, 0, (char *)0, 0}, - {(char *)"code", __pyx_getprop_7aiohttp_12_http_parser_18RawResponseMessage_code, 0, (char *)0, 0}, - {(char *)"reason", __pyx_getprop_7aiohttp_12_http_parser_18RawResponseMessage_reason, 0, (char *)0, 0}, - {(char *)"headers", __pyx_getprop_7aiohttp_12_http_parser_18RawResponseMessage_headers, 0, (char *)0, 0}, - {(char *)"raw_headers", __pyx_getprop_7aiohttp_12_http_parser_18RawResponseMessage_raw_headers, 0, (char *)0, 0}, - {(char *)"should_close", __pyx_getprop_7aiohttp_12_http_parser_18RawResponseMessage_should_close, 0, (char *)0, 0}, - {(char *)"compression", __pyx_getprop_7aiohttp_12_http_parser_18RawResponseMessage_compression, 0, (char *)0, 0}, - {(char *)"upgrade", __pyx_getprop_7aiohttp_12_http_parser_18RawResponseMessage_upgrade, 0, (char *)0, 0}, - {(char *)"chunked", __pyx_getprop_7aiohttp_12_http_parser_18RawResponseMessage_chunked, 0, (char *)0, 0}, - {0, 0, 0, 0, 0} -}; - -static PyTypeObject __pyx_type_7aiohttp_12_http_parser_RawResponseMessage = { - PyVarObject_HEAD_INIT(0, 0) - "aiohttp._http_parser.RawResponseMessage", /*tp_name*/ - sizeof(struct __pyx_obj_7aiohttp_12_http_parser_RawResponseMessage), /*tp_basicsize*/ - 0, /*tp_itemsize*/ - __pyx_tp_dealloc_7aiohttp_12_http_parser_RawResponseMessage, /*tp_dealloc*/ - #if PY_VERSION_HEX < 0x030800b4 - 0, /*tp_print*/ - #endif - #if PY_VERSION_HEX >= 0x030800b4 - 0, /*tp_vectorcall_offset*/ - #endif - 0, /*tp_getattr*/ - 0, /*tp_setattr*/ - #if PY_MAJOR_VERSION < 3 - 0, /*tp_compare*/ - #endif - #if PY_MAJOR_VERSION >= 3 - 0, /*tp_as_async*/ - #endif - __pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_3__repr__, /*tp_repr*/ - 0, /*tp_as_number*/ - 0, /*tp_as_sequence*/ - 0, /*tp_as_mapping*/ - 0, /*tp_hash*/ - 0, /*tp_call*/ - 0, /*tp_str*/ - 0, /*tp_getattro*/ - 0, /*tp_setattro*/ - 0, /*tp_as_buffer*/ - Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ - 0, /*tp_doc*/ - __pyx_tp_traverse_7aiohttp_12_http_parser_RawResponseMessage, /*tp_traverse*/ - __pyx_tp_clear_7aiohttp_12_http_parser_RawResponseMessage, /*tp_clear*/ - 0, /*tp_richcompare*/ - 0, /*tp_weaklistoffset*/ - 0, /*tp_iter*/ - 0, /*tp_iternext*/ - __pyx_methods_7aiohttp_12_http_parser_RawResponseMessage, /*tp_methods*/ - 0, /*tp_members*/ - __pyx_getsets_7aiohttp_12_http_parser_RawResponseMessage, /*tp_getset*/ - 0, /*tp_base*/ - 0, /*tp_dict*/ - 0, /*tp_descr_get*/ - 0, /*tp_descr_set*/ - 0, /*tp_dictoffset*/ - __pyx_pw_7aiohttp_12_http_parser_18RawResponseMessage_1__init__, /*tp_init*/ - 0, /*tp_alloc*/ - __pyx_tp_new_7aiohttp_12_http_parser_RawResponseMessage, /*tp_new*/ - 0, /*tp_free*/ - 0, /*tp_is_gc*/ - 0, /*tp_bases*/ - 0, /*tp_mro*/ - 0, /*tp_cache*/ - 0, /*tp_subclasses*/ - 0, /*tp_weaklist*/ - 0, /*tp_del*/ - 0, /*tp_version_tag*/ - #if PY_VERSION_HEX >= 0x030400a1 - 0, /*tp_finalize*/ - #endif - #if PY_VERSION_HEX >= 0x030800b1 - 0, /*tp_vectorcall*/ - #endif - #if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 - 0, /*tp_print*/ - #endif -}; -static struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser __pyx_vtable_7aiohttp_12_http_parser_HttpParser; - -static PyObject *__pyx_tp_new_7aiohttp_12_http_parser_HttpParser(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { - struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *p; - PyObject *o; - if (likely((t->tp_flags & Py_TPFLAGS_IS_ABSTRACT) == 0)) { - o = (*t->tp_alloc)(t, 0); - } else { - o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0); - } - if (unlikely(!o)) return 0; - p = ((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)o); - p->__pyx_vtab = __pyx_vtabptr_7aiohttp_12_http_parser_HttpParser; - p->_raw_name = ((PyObject*)Py_None); Py_INCREF(Py_None); - p->_raw_value = ((PyObject*)Py_None); Py_INCREF(Py_None); - p->_protocol = Py_None; Py_INCREF(Py_None); - p->_loop = Py_None; Py_INCREF(Py_None); - p->_timer = Py_None; Py_INCREF(Py_None); - p->_url = Py_None; Py_INCREF(Py_None); - p->_buf = ((PyObject*)Py_None); Py_INCREF(Py_None); - p->_path = ((PyObject*)Py_None); Py_INCREF(Py_None); - p->_reason = ((PyObject*)Py_None); Py_INCREF(Py_None); - p->_headers = Py_None; Py_INCREF(Py_None); - p->_raw_headers = ((PyObject*)Py_None); Py_INCREF(Py_None); - p->_messages = ((PyObject*)Py_None); Py_INCREF(Py_None); - p->_payload = Py_None; Py_INCREF(Py_None); - p->_payload_exception = Py_None; Py_INCREF(Py_None); - p->_last_error = Py_None; Py_INCREF(Py_None); - p->_content_encoding = ((PyObject*)Py_None); Py_INCREF(Py_None); - p->py_buf.obj = NULL; - if (unlikely(__pyx_pw_7aiohttp_12_http_parser_10HttpParser_1__cinit__(o, __pyx_empty_tuple, NULL) < 0)) goto bad; - return o; - bad: - Py_DECREF(o); o = 0; - return NULL; -} - -static void __pyx_tp_dealloc_7aiohttp_12_http_parser_HttpParser(PyObject *o) { - struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *p = (struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)o; - #if CYTHON_USE_TP_FINALIZE - if (unlikely(PyType_HasFeature(Py_TYPE(o), Py_TPFLAGS_HAVE_FINALIZE) && Py_TYPE(o)->tp_finalize) && !_PyGC_FINALIZED(o)) { - if (PyObject_CallFinalizerFromDealloc(o)) return; - } - #endif - PyObject_GC_UnTrack(o); - { - PyObject *etype, *eval, *etb; - PyErr_Fetch(&etype, &eval, &etb); - __Pyx_SET_REFCNT(o, Py_REFCNT(o) + 1); - __pyx_pw_7aiohttp_12_http_parser_10HttpParser_3__dealloc__(o); - __Pyx_SET_REFCNT(o, Py_REFCNT(o) - 1); - PyErr_Restore(etype, eval, etb); - } - Py_CLEAR(p->_raw_name); - Py_CLEAR(p->_raw_value); - Py_CLEAR(p->_protocol); - Py_CLEAR(p->_loop); - Py_CLEAR(p->_timer); - Py_CLEAR(p->_url); - Py_CLEAR(p->_buf); - Py_CLEAR(p->_path); - Py_CLEAR(p->_reason); - Py_CLEAR(p->_headers); - Py_CLEAR(p->_raw_headers); - Py_CLEAR(p->_messages); - Py_CLEAR(p->_payload); - Py_CLEAR(p->_payload_exception); - Py_CLEAR(p->_last_error); - Py_CLEAR(p->_content_encoding); - (*Py_TYPE(o)->tp_free)(o); -} - -static int __pyx_tp_traverse_7aiohttp_12_http_parser_HttpParser(PyObject *o, visitproc v, void *a) { - int e; - struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *p = (struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)o; - if (p->_protocol) { - e = (*v)(p->_protocol, a); if (e) return e; - } - if (p->_loop) { - e = (*v)(p->_loop, a); if (e) return e; - } - if (p->_timer) { - e = (*v)(p->_timer, a); if (e) return e; - } - if (p->_url) { - e = (*v)(p->_url, a); if (e) return e; - } - if (p->_headers) { - e = (*v)(p->_headers, a); if (e) return e; - } - if (p->_raw_headers) { - e = (*v)(p->_raw_headers, a); if (e) return e; - } - if (p->_messages) { - e = (*v)(p->_messages, a); if (e) return e; - } - if (p->_payload) { - e = (*v)(p->_payload, a); if (e) return e; - } - if (p->_payload_exception) { - e = (*v)(p->_payload_exception, a); if (e) return e; - } - if (p->_last_error) { - e = (*v)(p->_last_error, a); if (e) return e; - } - if (p->py_buf.obj) { - e = (*v)(p->py_buf.obj, a); if (e) return e; - } - return 0; -} - -static int __pyx_tp_clear_7aiohttp_12_http_parser_HttpParser(PyObject *o) { - PyObject* tmp; - struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *p = (struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)o; - tmp = ((PyObject*)p->_protocol); - p->_protocol = Py_None; Py_INCREF(Py_None); - Py_XDECREF(tmp); - tmp = ((PyObject*)p->_loop); - p->_loop = Py_None; Py_INCREF(Py_None); - Py_XDECREF(tmp); - tmp = ((PyObject*)p->_timer); - p->_timer = Py_None; Py_INCREF(Py_None); - Py_XDECREF(tmp); - tmp = ((PyObject*)p->_url); - p->_url = Py_None; Py_INCREF(Py_None); - Py_XDECREF(tmp); - tmp = ((PyObject*)p->_headers); - p->_headers = Py_None; Py_INCREF(Py_None); - Py_XDECREF(tmp); - tmp = ((PyObject*)p->_raw_headers); - p->_raw_headers = ((PyObject*)Py_None); Py_INCREF(Py_None); - Py_XDECREF(tmp); - tmp = ((PyObject*)p->_messages); - p->_messages = ((PyObject*)Py_None); Py_INCREF(Py_None); - Py_XDECREF(tmp); - tmp = ((PyObject*)p->_payload); - p->_payload = Py_None; Py_INCREF(Py_None); - Py_XDECREF(tmp); - tmp = ((PyObject*)p->_payload_exception); - p->_payload_exception = Py_None; Py_INCREF(Py_None); - Py_XDECREF(tmp); - tmp = ((PyObject*)p->_last_error); - p->_last_error = Py_None; Py_INCREF(Py_None); - Py_XDECREF(tmp); - Py_CLEAR(p->py_buf.obj); - return 0; -} - -static PyMethodDef __pyx_methods_7aiohttp_12_http_parser_HttpParser[] = { - {"feed_eof", (PyCFunction)__pyx_pw_7aiohttp_12_http_parser_10HttpParser_5feed_eof, METH_NOARGS, 0}, - {"feed_data", (PyCFunction)__pyx_pw_7aiohttp_12_http_parser_10HttpParser_7feed_data, METH_O, 0}, - {"set_upgraded", (PyCFunction)__pyx_pw_7aiohttp_12_http_parser_10HttpParser_9set_upgraded, METH_O, 0}, - {"__reduce_cython__", (PyCFunction)__pyx_pw_7aiohttp_12_http_parser_10HttpParser_11__reduce_cython__, METH_NOARGS, 0}, - {"__setstate_cython__", (PyCFunction)__pyx_pw_7aiohttp_12_http_parser_10HttpParser_13__setstate_cython__, METH_O, 0}, - {0, 0, 0, 0} -}; - -static PyTypeObject __pyx_type_7aiohttp_12_http_parser_HttpParser = { - PyVarObject_HEAD_INIT(0, 0) - "aiohttp._http_parser.HttpParser", /*tp_name*/ - sizeof(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser), /*tp_basicsize*/ - 0, /*tp_itemsize*/ - __pyx_tp_dealloc_7aiohttp_12_http_parser_HttpParser, /*tp_dealloc*/ - #if PY_VERSION_HEX < 0x030800b4 - 0, /*tp_print*/ - #endif - #if PY_VERSION_HEX >= 0x030800b4 - 0, /*tp_vectorcall_offset*/ - #endif - 0, /*tp_getattr*/ - 0, /*tp_setattr*/ - #if PY_MAJOR_VERSION < 3 - 0, /*tp_compare*/ - #endif - #if PY_MAJOR_VERSION >= 3 - 0, /*tp_as_async*/ - #endif - 0, /*tp_repr*/ - 0, /*tp_as_number*/ - 0, /*tp_as_sequence*/ - 0, /*tp_as_mapping*/ - 0, /*tp_hash*/ - 0, /*tp_call*/ - 0, /*tp_str*/ - 0, /*tp_getattro*/ - 0, /*tp_setattro*/ - 0, /*tp_as_buffer*/ - Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ - 0, /*tp_doc*/ - __pyx_tp_traverse_7aiohttp_12_http_parser_HttpParser, /*tp_traverse*/ - __pyx_tp_clear_7aiohttp_12_http_parser_HttpParser, /*tp_clear*/ - 0, /*tp_richcompare*/ - 0, /*tp_weaklistoffset*/ - 0, /*tp_iter*/ - 0, /*tp_iternext*/ - __pyx_methods_7aiohttp_12_http_parser_HttpParser, /*tp_methods*/ - 0, /*tp_members*/ - 0, /*tp_getset*/ - 0, /*tp_base*/ - 0, /*tp_dict*/ - 0, /*tp_descr_get*/ - 0, /*tp_descr_set*/ - 0, /*tp_dictoffset*/ - 0, /*tp_init*/ - 0, /*tp_alloc*/ - __pyx_tp_new_7aiohttp_12_http_parser_HttpParser, /*tp_new*/ - 0, /*tp_free*/ - 0, /*tp_is_gc*/ - 0, /*tp_bases*/ - 0, /*tp_mro*/ - 0, /*tp_cache*/ - 0, /*tp_subclasses*/ - 0, /*tp_weaklist*/ - 0, /*tp_del*/ - 0, /*tp_version_tag*/ - #if PY_VERSION_HEX >= 0x030400a1 - 0, /*tp_finalize*/ - #endif - #if PY_VERSION_HEX >= 0x030800b1 - 0, /*tp_vectorcall*/ - #endif - #if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 - 0, /*tp_print*/ - #endif -}; -static struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpRequestParser __pyx_vtable_7aiohttp_12_http_parser_HttpRequestParser; - -static PyObject *__pyx_tp_new_7aiohttp_12_http_parser_HttpRequestParser(PyTypeObject *t, PyObject *a, PyObject *k) { - struct __pyx_obj_7aiohttp_12_http_parser_HttpRequestParser *p; - PyObject *o = __pyx_tp_new_7aiohttp_12_http_parser_HttpParser(t, a, k); - if (unlikely(!o)) return 0; - p = ((struct __pyx_obj_7aiohttp_12_http_parser_HttpRequestParser *)o); - p->__pyx_base.__pyx_vtab = (struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser*)__pyx_vtabptr_7aiohttp_12_http_parser_HttpRequestParser; - return o; -} - -static PyMethodDef __pyx_methods_7aiohttp_12_http_parser_HttpRequestParser[] = { - {"__reduce_cython__", (PyCFunction)__pyx_pw_7aiohttp_12_http_parser_17HttpRequestParser_3__reduce_cython__, METH_NOARGS, 0}, - {"__setstate_cython__", (PyCFunction)__pyx_pw_7aiohttp_12_http_parser_17HttpRequestParser_5__setstate_cython__, METH_O, 0}, - {0, 0, 0, 0} -}; - -static PyTypeObject __pyx_type_7aiohttp_12_http_parser_HttpRequestParser = { - PyVarObject_HEAD_INIT(0, 0) - "aiohttp._http_parser.HttpRequestParser", /*tp_name*/ - sizeof(struct __pyx_obj_7aiohttp_12_http_parser_HttpRequestParser), /*tp_basicsize*/ - 0, /*tp_itemsize*/ - __pyx_tp_dealloc_7aiohttp_12_http_parser_HttpParser, /*tp_dealloc*/ - #if PY_VERSION_HEX < 0x030800b4 - 0, /*tp_print*/ - #endif - #if PY_VERSION_HEX >= 0x030800b4 - 0, /*tp_vectorcall_offset*/ - #endif - 0, /*tp_getattr*/ - 0, /*tp_setattr*/ - #if PY_MAJOR_VERSION < 3 - 0, /*tp_compare*/ - #endif - #if PY_MAJOR_VERSION >= 3 - 0, /*tp_as_async*/ - #endif - 0, /*tp_repr*/ - 0, /*tp_as_number*/ - 0, /*tp_as_sequence*/ - 0, /*tp_as_mapping*/ - 0, /*tp_hash*/ - 0, /*tp_call*/ - 0, /*tp_str*/ - 0, /*tp_getattro*/ - 0, /*tp_setattro*/ - 0, /*tp_as_buffer*/ - Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ - 0, /*tp_doc*/ - __pyx_tp_traverse_7aiohttp_12_http_parser_HttpParser, /*tp_traverse*/ - __pyx_tp_clear_7aiohttp_12_http_parser_HttpParser, /*tp_clear*/ - 0, /*tp_richcompare*/ - 0, /*tp_weaklistoffset*/ - 0, /*tp_iter*/ - 0, /*tp_iternext*/ - __pyx_methods_7aiohttp_12_http_parser_HttpRequestParser, /*tp_methods*/ - 0, /*tp_members*/ - 0, /*tp_getset*/ - 0, /*tp_base*/ - 0, /*tp_dict*/ - 0, /*tp_descr_get*/ - 0, /*tp_descr_set*/ - 0, /*tp_dictoffset*/ - __pyx_pw_7aiohttp_12_http_parser_17HttpRequestParser_1__init__, /*tp_init*/ - 0, /*tp_alloc*/ - __pyx_tp_new_7aiohttp_12_http_parser_HttpRequestParser, /*tp_new*/ - 0, /*tp_free*/ - 0, /*tp_is_gc*/ - 0, /*tp_bases*/ - 0, /*tp_mro*/ - 0, /*tp_cache*/ - 0, /*tp_subclasses*/ - 0, /*tp_weaklist*/ - 0, /*tp_del*/ - 0, /*tp_version_tag*/ - #if PY_VERSION_HEX >= 0x030400a1 - 0, /*tp_finalize*/ - #endif - #if PY_VERSION_HEX >= 0x030800b1 - 0, /*tp_vectorcall*/ - #endif - #if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 - 0, /*tp_print*/ - #endif -}; -static struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpResponseParser __pyx_vtable_7aiohttp_12_http_parser_HttpResponseParser; - -static PyObject *__pyx_tp_new_7aiohttp_12_http_parser_HttpResponseParser(PyTypeObject *t, PyObject *a, PyObject *k) { - struct __pyx_obj_7aiohttp_12_http_parser_HttpResponseParser *p; - PyObject *o = __pyx_tp_new_7aiohttp_12_http_parser_HttpParser(t, a, k); - if (unlikely(!o)) return 0; - p = ((struct __pyx_obj_7aiohttp_12_http_parser_HttpResponseParser *)o); - p->__pyx_base.__pyx_vtab = (struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser*)__pyx_vtabptr_7aiohttp_12_http_parser_HttpResponseParser; - return o; -} - -static PyMethodDef __pyx_methods_7aiohttp_12_http_parser_HttpResponseParser[] = { - {"__reduce_cython__", (PyCFunction)__pyx_pw_7aiohttp_12_http_parser_18HttpResponseParser_3__reduce_cython__, METH_NOARGS, 0}, - {"__setstate_cython__", (PyCFunction)__pyx_pw_7aiohttp_12_http_parser_18HttpResponseParser_5__setstate_cython__, METH_O, 0}, - {0, 0, 0, 0} -}; - -static PyTypeObject __pyx_type_7aiohttp_12_http_parser_HttpResponseParser = { - PyVarObject_HEAD_INIT(0, 0) - "aiohttp._http_parser.HttpResponseParser", /*tp_name*/ - sizeof(struct __pyx_obj_7aiohttp_12_http_parser_HttpResponseParser), /*tp_basicsize*/ - 0, /*tp_itemsize*/ - __pyx_tp_dealloc_7aiohttp_12_http_parser_HttpParser, /*tp_dealloc*/ - #if PY_VERSION_HEX < 0x030800b4 - 0, /*tp_print*/ - #endif - #if PY_VERSION_HEX >= 0x030800b4 - 0, /*tp_vectorcall_offset*/ - #endif - 0, /*tp_getattr*/ - 0, /*tp_setattr*/ - #if PY_MAJOR_VERSION < 3 - 0, /*tp_compare*/ - #endif - #if PY_MAJOR_VERSION >= 3 - 0, /*tp_as_async*/ - #endif - 0, /*tp_repr*/ - 0, /*tp_as_number*/ - 0, /*tp_as_sequence*/ - 0, /*tp_as_mapping*/ - 0, /*tp_hash*/ - 0, /*tp_call*/ - 0, /*tp_str*/ - 0, /*tp_getattro*/ - 0, /*tp_setattro*/ - 0, /*tp_as_buffer*/ - Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ - 0, /*tp_doc*/ - __pyx_tp_traverse_7aiohttp_12_http_parser_HttpParser, /*tp_traverse*/ - __pyx_tp_clear_7aiohttp_12_http_parser_HttpParser, /*tp_clear*/ - 0, /*tp_richcompare*/ - 0, /*tp_weaklistoffset*/ - 0, /*tp_iter*/ - 0, /*tp_iternext*/ - __pyx_methods_7aiohttp_12_http_parser_HttpResponseParser, /*tp_methods*/ - 0, /*tp_members*/ - 0, /*tp_getset*/ - 0, /*tp_base*/ - 0, /*tp_dict*/ - 0, /*tp_descr_get*/ - 0, /*tp_descr_set*/ - 0, /*tp_dictoffset*/ - __pyx_pw_7aiohttp_12_http_parser_18HttpResponseParser_1__init__, /*tp_init*/ - 0, /*tp_alloc*/ - __pyx_tp_new_7aiohttp_12_http_parser_HttpResponseParser, /*tp_new*/ - 0, /*tp_free*/ - 0, /*tp_is_gc*/ - 0, /*tp_bases*/ - 0, /*tp_mro*/ - 0, /*tp_cache*/ - 0, /*tp_subclasses*/ - 0, /*tp_weaklist*/ - 0, /*tp_del*/ - 0, /*tp_version_tag*/ - #if PY_VERSION_HEX >= 0x030400a1 - 0, /*tp_finalize*/ - #endif - #if PY_VERSION_HEX >= 0x030800b1 - 0, /*tp_vectorcall*/ - #endif - #if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 - 0, /*tp_print*/ - #endif -}; - -static struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct____repr__ *__pyx_freelist_7aiohttp_12_http_parser___pyx_scope_struct____repr__[8]; -static int __pyx_freecount_7aiohttp_12_http_parser___pyx_scope_struct____repr__ = 0; - -static PyObject *__pyx_tp_new_7aiohttp_12_http_parser___pyx_scope_struct____repr__(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { - PyObject *o; - if (CYTHON_COMPILING_IN_CPYTHON && likely((__pyx_freecount_7aiohttp_12_http_parser___pyx_scope_struct____repr__ > 0) & (t->tp_basicsize == sizeof(struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct____repr__)))) { - o = (PyObject*)__pyx_freelist_7aiohttp_12_http_parser___pyx_scope_struct____repr__[--__pyx_freecount_7aiohttp_12_http_parser___pyx_scope_struct____repr__]; - memset(o, 0, sizeof(struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct____repr__)); - (void) PyObject_INIT(o, t); - PyObject_GC_Track(o); - } else { - o = (*t->tp_alloc)(t, 0); - if (unlikely(!o)) return 0; - } - return o; -} - -static void __pyx_tp_dealloc_7aiohttp_12_http_parser___pyx_scope_struct____repr__(PyObject *o) { - struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct____repr__ *p = (struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct____repr__ *)o; - PyObject_GC_UnTrack(o); - Py_CLEAR(p->__pyx_v_info); - if (CYTHON_COMPILING_IN_CPYTHON && ((__pyx_freecount_7aiohttp_12_http_parser___pyx_scope_struct____repr__ < 8) & (Py_TYPE(o)->tp_basicsize == sizeof(struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct____repr__)))) { - __pyx_freelist_7aiohttp_12_http_parser___pyx_scope_struct____repr__[__pyx_freecount_7aiohttp_12_http_parser___pyx_scope_struct____repr__++] = ((struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct____repr__ *)o); - } else { - (*Py_TYPE(o)->tp_free)(o); - } -} - -static int __pyx_tp_traverse_7aiohttp_12_http_parser___pyx_scope_struct____repr__(PyObject *o, visitproc v, void *a) { - int e; - struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct____repr__ *p = (struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct____repr__ *)o; - if (p->__pyx_v_info) { - e = (*v)(p->__pyx_v_info, a); if (e) return e; - } - return 0; -} - -static int __pyx_tp_clear_7aiohttp_12_http_parser___pyx_scope_struct____repr__(PyObject *o) { - PyObject* tmp; - struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct____repr__ *p = (struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct____repr__ *)o; - tmp = ((PyObject*)p->__pyx_v_info); - p->__pyx_v_info = ((PyObject*)Py_None); Py_INCREF(Py_None); - Py_XDECREF(tmp); - return 0; -} - -static PyTypeObject __pyx_type_7aiohttp_12_http_parser___pyx_scope_struct____repr__ = { - PyVarObject_HEAD_INIT(0, 0) - "aiohttp._http_parser.__pyx_scope_struct____repr__", /*tp_name*/ - sizeof(struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct____repr__), /*tp_basicsize*/ - 0, /*tp_itemsize*/ - __pyx_tp_dealloc_7aiohttp_12_http_parser___pyx_scope_struct____repr__, /*tp_dealloc*/ - #if PY_VERSION_HEX < 0x030800b4 - 0, /*tp_print*/ - #endif - #if PY_VERSION_HEX >= 0x030800b4 - 0, /*tp_vectorcall_offset*/ - #endif - 0, /*tp_getattr*/ - 0, /*tp_setattr*/ - #if PY_MAJOR_VERSION < 3 - 0, /*tp_compare*/ - #endif - #if PY_MAJOR_VERSION >= 3 - 0, /*tp_as_async*/ - #endif - 0, /*tp_repr*/ - 0, /*tp_as_number*/ - 0, /*tp_as_sequence*/ - 0, /*tp_as_mapping*/ - 0, /*tp_hash*/ - 0, /*tp_call*/ - 0, /*tp_str*/ - 0, /*tp_getattro*/ - 0, /*tp_setattro*/ - 0, /*tp_as_buffer*/ - Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ - 0, /*tp_doc*/ - __pyx_tp_traverse_7aiohttp_12_http_parser___pyx_scope_struct____repr__, /*tp_traverse*/ - __pyx_tp_clear_7aiohttp_12_http_parser___pyx_scope_struct____repr__, /*tp_clear*/ - 0, /*tp_richcompare*/ - 0, /*tp_weaklistoffset*/ - 0, /*tp_iter*/ - 0, /*tp_iternext*/ - 0, /*tp_methods*/ - 0, /*tp_members*/ - 0, /*tp_getset*/ - 0, /*tp_base*/ - 0, /*tp_dict*/ - 0, /*tp_descr_get*/ - 0, /*tp_descr_set*/ - 0, /*tp_dictoffset*/ - 0, /*tp_init*/ - 0, /*tp_alloc*/ - __pyx_tp_new_7aiohttp_12_http_parser___pyx_scope_struct____repr__, /*tp_new*/ - 0, /*tp_free*/ - 0, /*tp_is_gc*/ - 0, /*tp_bases*/ - 0, /*tp_mro*/ - 0, /*tp_cache*/ - 0, /*tp_subclasses*/ - 0, /*tp_weaklist*/ - 0, /*tp_del*/ - 0, /*tp_version_tag*/ - #if PY_VERSION_HEX >= 0x030400a1 - 0, /*tp_finalize*/ - #endif - #if PY_VERSION_HEX >= 0x030800b1 - 0, /*tp_vectorcall*/ - #endif - #if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 - 0, /*tp_print*/ - #endif -}; - -static struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr *__pyx_freelist_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr[8]; -static int __pyx_freecount_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr = 0; - -static PyObject *__pyx_tp_new_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { - PyObject *o; - if (CYTHON_COMPILING_IN_CPYTHON && likely((__pyx_freecount_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr > 0) & (t->tp_basicsize == sizeof(struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr)))) { - o = (PyObject*)__pyx_freelist_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr[--__pyx_freecount_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr]; - memset(o, 0, sizeof(struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr)); - (void) PyObject_INIT(o, t); - PyObject_GC_Track(o); - } else { - o = (*t->tp_alloc)(t, 0); - if (unlikely(!o)) return 0; - } - return o; -} - -static void __pyx_tp_dealloc_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr(PyObject *o) { - struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr *p = (struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr *)o; - PyObject_GC_UnTrack(o); - Py_CLEAR(p->__pyx_outer_scope); - Py_CLEAR(p->__pyx_v_name); - Py_CLEAR(p->__pyx_v_val); - if (CYTHON_COMPILING_IN_CPYTHON && ((__pyx_freecount_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr < 8) & (Py_TYPE(o)->tp_basicsize == sizeof(struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr)))) { - __pyx_freelist_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr[__pyx_freecount_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr++] = ((struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr *)o); - } else { - (*Py_TYPE(o)->tp_free)(o); - } -} - -static int __pyx_tp_traverse_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr(PyObject *o, visitproc v, void *a) { - int e; - struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr *p = (struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr *)o; - if (p->__pyx_outer_scope) { - e = (*v)(((PyObject *)p->__pyx_outer_scope), a); if (e) return e; - } - if (p->__pyx_v_name) { - e = (*v)(p->__pyx_v_name, a); if (e) return e; - } - if (p->__pyx_v_val) { - e = (*v)(p->__pyx_v_val, a); if (e) return e; - } - return 0; -} - -static PyTypeObject __pyx_type_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr = { - PyVarObject_HEAD_INIT(0, 0) - "aiohttp._http_parser.__pyx_scope_struct_1_genexpr", /*tp_name*/ - sizeof(struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr), /*tp_basicsize*/ - 0, /*tp_itemsize*/ - __pyx_tp_dealloc_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr, /*tp_dealloc*/ - #if PY_VERSION_HEX < 0x030800b4 - 0, /*tp_print*/ - #endif - #if PY_VERSION_HEX >= 0x030800b4 - 0, /*tp_vectorcall_offset*/ - #endif - 0, /*tp_getattr*/ - 0, /*tp_setattr*/ - #if PY_MAJOR_VERSION < 3 - 0, /*tp_compare*/ - #endif - #if PY_MAJOR_VERSION >= 3 - 0, /*tp_as_async*/ - #endif - 0, /*tp_repr*/ - 0, /*tp_as_number*/ - 0, /*tp_as_sequence*/ - 0, /*tp_as_mapping*/ - 0, /*tp_hash*/ - 0, /*tp_call*/ - 0, /*tp_str*/ - 0, /*tp_getattro*/ - 0, /*tp_setattro*/ - 0, /*tp_as_buffer*/ - Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ - 0, /*tp_doc*/ - __pyx_tp_traverse_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr, /*tp_traverse*/ - 0, /*tp_clear*/ - 0, /*tp_richcompare*/ - 0, /*tp_weaklistoffset*/ - 0, /*tp_iter*/ - 0, /*tp_iternext*/ - 0, /*tp_methods*/ - 0, /*tp_members*/ - 0, /*tp_getset*/ - 0, /*tp_base*/ - 0, /*tp_dict*/ - 0, /*tp_descr_get*/ - 0, /*tp_descr_set*/ - 0, /*tp_dictoffset*/ - 0, /*tp_init*/ - 0, /*tp_alloc*/ - __pyx_tp_new_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr, /*tp_new*/ - 0, /*tp_free*/ - 0, /*tp_is_gc*/ - 0, /*tp_bases*/ - 0, /*tp_mro*/ - 0, /*tp_cache*/ - 0, /*tp_subclasses*/ - 0, /*tp_weaklist*/ - 0, /*tp_del*/ - 0, /*tp_version_tag*/ - #if PY_VERSION_HEX >= 0x030400a1 - 0, /*tp_finalize*/ - #endif - #if PY_VERSION_HEX >= 0x030800b1 - 0, /*tp_vectorcall*/ - #endif - #if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 - 0, /*tp_print*/ - #endif -}; - -static struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__ *__pyx_freelist_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__[8]; -static int __pyx_freecount_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__ = 0; - -static PyObject *__pyx_tp_new_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { - PyObject *o; - if (CYTHON_COMPILING_IN_CPYTHON && likely((__pyx_freecount_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__ > 0) & (t->tp_basicsize == sizeof(struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__)))) { - o = (PyObject*)__pyx_freelist_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__[--__pyx_freecount_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__]; - memset(o, 0, sizeof(struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__)); - (void) PyObject_INIT(o, t); - PyObject_GC_Track(o); - } else { - o = (*t->tp_alloc)(t, 0); - if (unlikely(!o)) return 0; - } - return o; -} - -static void __pyx_tp_dealloc_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__(PyObject *o) { - struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__ *p = (struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__ *)o; - PyObject_GC_UnTrack(o); - Py_CLEAR(p->__pyx_v_info); - if (CYTHON_COMPILING_IN_CPYTHON && ((__pyx_freecount_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__ < 8) & (Py_TYPE(o)->tp_basicsize == sizeof(struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__)))) { - __pyx_freelist_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__[__pyx_freecount_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__++] = ((struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__ *)o); - } else { - (*Py_TYPE(o)->tp_free)(o); - } -} - -static int __pyx_tp_traverse_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__(PyObject *o, visitproc v, void *a) { - int e; - struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__ *p = (struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__ *)o; - if (p->__pyx_v_info) { - e = (*v)(p->__pyx_v_info, a); if (e) return e; - } - return 0; -} - -static int __pyx_tp_clear_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__(PyObject *o) { - PyObject* tmp; - struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__ *p = (struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__ *)o; - tmp = ((PyObject*)p->__pyx_v_info); - p->__pyx_v_info = ((PyObject*)Py_None); Py_INCREF(Py_None); - Py_XDECREF(tmp); - return 0; -} - -static PyTypeObject __pyx_type_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__ = { - PyVarObject_HEAD_INIT(0, 0) - "aiohttp._http_parser.__pyx_scope_struct_2___repr__", /*tp_name*/ - sizeof(struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__), /*tp_basicsize*/ - 0, /*tp_itemsize*/ - __pyx_tp_dealloc_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__, /*tp_dealloc*/ - #if PY_VERSION_HEX < 0x030800b4 - 0, /*tp_print*/ - #endif - #if PY_VERSION_HEX >= 0x030800b4 - 0, /*tp_vectorcall_offset*/ - #endif - 0, /*tp_getattr*/ - 0, /*tp_setattr*/ - #if PY_MAJOR_VERSION < 3 - 0, /*tp_compare*/ - #endif - #if PY_MAJOR_VERSION >= 3 - 0, /*tp_as_async*/ - #endif - 0, /*tp_repr*/ - 0, /*tp_as_number*/ - 0, /*tp_as_sequence*/ - 0, /*tp_as_mapping*/ - 0, /*tp_hash*/ - 0, /*tp_call*/ - 0, /*tp_str*/ - 0, /*tp_getattro*/ - 0, /*tp_setattro*/ - 0, /*tp_as_buffer*/ - Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ - 0, /*tp_doc*/ - __pyx_tp_traverse_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__, /*tp_traverse*/ - __pyx_tp_clear_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__, /*tp_clear*/ - 0, /*tp_richcompare*/ - 0, /*tp_weaklistoffset*/ - 0, /*tp_iter*/ - 0, /*tp_iternext*/ - 0, /*tp_methods*/ - 0, /*tp_members*/ - 0, /*tp_getset*/ - 0, /*tp_base*/ - 0, /*tp_dict*/ - 0, /*tp_descr_get*/ - 0, /*tp_descr_set*/ - 0, /*tp_dictoffset*/ - 0, /*tp_init*/ - 0, /*tp_alloc*/ - __pyx_tp_new_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__, /*tp_new*/ - 0, /*tp_free*/ - 0, /*tp_is_gc*/ - 0, /*tp_bases*/ - 0, /*tp_mro*/ - 0, /*tp_cache*/ - 0, /*tp_subclasses*/ - 0, /*tp_weaklist*/ - 0, /*tp_del*/ - 0, /*tp_version_tag*/ - #if PY_VERSION_HEX >= 0x030400a1 - 0, /*tp_finalize*/ - #endif - #if PY_VERSION_HEX >= 0x030800b1 - 0, /*tp_vectorcall*/ - #endif - #if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 - 0, /*tp_print*/ - #endif -}; - -static struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr *__pyx_freelist_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr[8]; -static int __pyx_freecount_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr = 0; - -static PyObject *__pyx_tp_new_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { - PyObject *o; - if (CYTHON_COMPILING_IN_CPYTHON && likely((__pyx_freecount_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr > 0) & (t->tp_basicsize == sizeof(struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr)))) { - o = (PyObject*)__pyx_freelist_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr[--__pyx_freecount_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr]; - memset(o, 0, sizeof(struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr)); - (void) PyObject_INIT(o, t); - PyObject_GC_Track(o); - } else { - o = (*t->tp_alloc)(t, 0); - if (unlikely(!o)) return 0; - } - return o; -} - -static void __pyx_tp_dealloc_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr(PyObject *o) { - struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr *p = (struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr *)o; - PyObject_GC_UnTrack(o); - Py_CLEAR(p->__pyx_outer_scope); - Py_CLEAR(p->__pyx_v_name); - Py_CLEAR(p->__pyx_v_val); - if (CYTHON_COMPILING_IN_CPYTHON && ((__pyx_freecount_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr < 8) & (Py_TYPE(o)->tp_basicsize == sizeof(struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr)))) { - __pyx_freelist_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr[__pyx_freecount_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr++] = ((struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr *)o); - } else { - (*Py_TYPE(o)->tp_free)(o); - } -} - -static int __pyx_tp_traverse_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr(PyObject *o, visitproc v, void *a) { - int e; - struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr *p = (struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr *)o; - if (p->__pyx_outer_scope) { - e = (*v)(((PyObject *)p->__pyx_outer_scope), a); if (e) return e; - } - if (p->__pyx_v_name) { - e = (*v)(p->__pyx_v_name, a); if (e) return e; - } - if (p->__pyx_v_val) { - e = (*v)(p->__pyx_v_val, a); if (e) return e; - } - return 0; -} - -static PyTypeObject __pyx_type_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr = { - PyVarObject_HEAD_INIT(0, 0) - "aiohttp._http_parser.__pyx_scope_struct_3_genexpr", /*tp_name*/ - sizeof(struct __pyx_obj_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr), /*tp_basicsize*/ - 0, /*tp_itemsize*/ - __pyx_tp_dealloc_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr, /*tp_dealloc*/ - #if PY_VERSION_HEX < 0x030800b4 - 0, /*tp_print*/ - #endif - #if PY_VERSION_HEX >= 0x030800b4 - 0, /*tp_vectorcall_offset*/ - #endif - 0, /*tp_getattr*/ - 0, /*tp_setattr*/ - #if PY_MAJOR_VERSION < 3 - 0, /*tp_compare*/ - #endif - #if PY_MAJOR_VERSION >= 3 - 0, /*tp_as_async*/ - #endif - 0, /*tp_repr*/ - 0, /*tp_as_number*/ - 0, /*tp_as_sequence*/ - 0, /*tp_as_mapping*/ - 0, /*tp_hash*/ - 0, /*tp_call*/ - 0, /*tp_str*/ - 0, /*tp_getattro*/ - 0, /*tp_setattro*/ - 0, /*tp_as_buffer*/ - Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ - 0, /*tp_doc*/ - __pyx_tp_traverse_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr, /*tp_traverse*/ - 0, /*tp_clear*/ - 0, /*tp_richcompare*/ - 0, /*tp_weaklistoffset*/ - 0, /*tp_iter*/ - 0, /*tp_iternext*/ - 0, /*tp_methods*/ - 0, /*tp_members*/ - 0, /*tp_getset*/ - 0, /*tp_base*/ - 0, /*tp_dict*/ - 0, /*tp_descr_get*/ - 0, /*tp_descr_set*/ - 0, /*tp_dictoffset*/ - 0, /*tp_init*/ - 0, /*tp_alloc*/ - __pyx_tp_new_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr, /*tp_new*/ - 0, /*tp_free*/ - 0, /*tp_is_gc*/ - 0, /*tp_bases*/ - 0, /*tp_mro*/ - 0, /*tp_cache*/ - 0, /*tp_subclasses*/ - 0, /*tp_weaklist*/ - 0, /*tp_del*/ - 0, /*tp_version_tag*/ - #if PY_VERSION_HEX >= 0x030400a1 - 0, /*tp_finalize*/ - #endif - #if PY_VERSION_HEX >= 0x030800b1 - 0, /*tp_vectorcall*/ - #endif - #if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 - 0, /*tp_print*/ - #endif -}; - -static PyMethodDef __pyx_methods[] = { - {0, 0, 0, 0} -}; - -#if PY_MAJOR_VERSION >= 3 -#if CYTHON_PEP489_MULTI_PHASE_INIT -static PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def); /*proto*/ -static int __pyx_pymod_exec__http_parser(PyObject* module); /*proto*/ -static PyModuleDef_Slot __pyx_moduledef_slots[] = { - {Py_mod_create, (void*)__pyx_pymod_create}, - {Py_mod_exec, (void*)__pyx_pymod_exec__http_parser}, - {0, NULL} -}; -#endif - -static struct PyModuleDef __pyx_moduledef = { - PyModuleDef_HEAD_INIT, - "_http_parser", - 0, /* m_doc */ - #if CYTHON_PEP489_MULTI_PHASE_INIT - 0, /* m_size */ - #else - -1, /* m_size */ - #endif - __pyx_methods /* m_methods */, - #if CYTHON_PEP489_MULTI_PHASE_INIT - __pyx_moduledef_slots, /* m_slots */ - #else - NULL, /* m_reload */ - #endif - NULL, /* m_traverse */ - NULL, /* m_clear */ - NULL /* m_free */ -}; -#endif -#ifndef CYTHON_SMALL_CODE -#if defined(__clang__) - #define CYTHON_SMALL_CODE -#elif defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3)) - #define CYTHON_SMALL_CODE __attribute__((cold)) -#else - #define CYTHON_SMALL_CODE -#endif -#endif - -static __Pyx_StringTabEntry __pyx_string_tab[] = { - {&__pyx_kp_u_, __pyx_k_, sizeof(__pyx_k_), 0, 1, 0, 0}, - {&__pyx_n_s_ACCEPT, __pyx_k_ACCEPT, sizeof(__pyx_k_ACCEPT), 0, 0, 1, 1}, - {&__pyx_n_s_ACCEPT_CHARSET, __pyx_k_ACCEPT_CHARSET, sizeof(__pyx_k_ACCEPT_CHARSET), 0, 0, 1, 1}, - {&__pyx_n_s_ACCEPT_ENCODING, __pyx_k_ACCEPT_ENCODING, sizeof(__pyx_k_ACCEPT_ENCODING), 0, 0, 1, 1}, - {&__pyx_n_s_ACCEPT_LANGUAGE, __pyx_k_ACCEPT_LANGUAGE, sizeof(__pyx_k_ACCEPT_LANGUAGE), 0, 0, 1, 1}, - {&__pyx_n_s_ACCEPT_RANGES, __pyx_k_ACCEPT_RANGES, sizeof(__pyx_k_ACCEPT_RANGES), 0, 0, 1, 1}, - {&__pyx_n_s_ACCESS_CONTROL_ALLOW_CREDENTIALS, __pyx_k_ACCESS_CONTROL_ALLOW_CREDENTIALS, sizeof(__pyx_k_ACCESS_CONTROL_ALLOW_CREDENTIALS), 0, 0, 1, 1}, - {&__pyx_n_s_ACCESS_CONTROL_ALLOW_HEADERS, __pyx_k_ACCESS_CONTROL_ALLOW_HEADERS, sizeof(__pyx_k_ACCESS_CONTROL_ALLOW_HEADERS), 0, 0, 1, 1}, - {&__pyx_n_s_ACCESS_CONTROL_ALLOW_METHODS, __pyx_k_ACCESS_CONTROL_ALLOW_METHODS, sizeof(__pyx_k_ACCESS_CONTROL_ALLOW_METHODS), 0, 0, 1, 1}, - {&__pyx_n_s_ACCESS_CONTROL_ALLOW_ORIGIN, __pyx_k_ACCESS_CONTROL_ALLOW_ORIGIN, sizeof(__pyx_k_ACCESS_CONTROL_ALLOW_ORIGIN), 0, 0, 1, 1}, - {&__pyx_n_s_ACCESS_CONTROL_EXPOSE_HEADERS, __pyx_k_ACCESS_CONTROL_EXPOSE_HEADERS, sizeof(__pyx_k_ACCESS_CONTROL_EXPOSE_HEADERS), 0, 0, 1, 1}, - {&__pyx_n_s_ACCESS_CONTROL_MAX_AGE, __pyx_k_ACCESS_CONTROL_MAX_AGE, sizeof(__pyx_k_ACCESS_CONTROL_MAX_AGE), 0, 0, 1, 1}, - {&__pyx_n_s_ACCESS_CONTROL_REQUEST_HEADERS, __pyx_k_ACCESS_CONTROL_REQUEST_HEADERS, sizeof(__pyx_k_ACCESS_CONTROL_REQUEST_HEADERS), 0, 0, 1, 1}, - {&__pyx_n_s_ACCESS_CONTROL_REQUEST_METHOD, __pyx_k_ACCESS_CONTROL_REQUEST_METHOD, sizeof(__pyx_k_ACCESS_CONTROL_REQUEST_METHOD), 0, 0, 1, 1}, - {&__pyx_n_s_AGE, __pyx_k_AGE, sizeof(__pyx_k_AGE), 0, 0, 1, 1}, - {&__pyx_n_s_ALLOW, __pyx_k_ALLOW, sizeof(__pyx_k_ALLOW), 0, 0, 1, 1}, - {&__pyx_n_s_AUTHORIZATION, __pyx_k_AUTHORIZATION, sizeof(__pyx_k_AUTHORIZATION), 0, 0, 1, 1}, - {&__pyx_n_s_BadHttpMessage, __pyx_k_BadHttpMessage, sizeof(__pyx_k_BadHttpMessage), 0, 0, 1, 1}, - {&__pyx_n_s_BadStatusLine, __pyx_k_BadStatusLine, sizeof(__pyx_k_BadStatusLine), 0, 0, 1, 1}, - {&__pyx_n_s_BaseException, __pyx_k_BaseException, sizeof(__pyx_k_BaseException), 0, 0, 1, 1}, - {&__pyx_n_s_CACHE_CONTROL, __pyx_k_CACHE_CONTROL, sizeof(__pyx_k_CACHE_CONTROL), 0, 0, 1, 1}, - {&__pyx_n_s_CIMultiDict, __pyx_k_CIMultiDict, sizeof(__pyx_k_CIMultiDict), 0, 0, 1, 1}, - {&__pyx_n_s_CIMultiDictProxy, __pyx_k_CIMultiDictProxy, sizeof(__pyx_k_CIMultiDictProxy), 0, 0, 1, 1}, - {&__pyx_n_s_CIMultiDictProxy_2, __pyx_k_CIMultiDictProxy_2, sizeof(__pyx_k_CIMultiDictProxy_2), 0, 0, 1, 1}, - {&__pyx_n_s_CIMultiDict_2, __pyx_k_CIMultiDict_2, sizeof(__pyx_k_CIMultiDict_2), 0, 0, 1, 1}, - {&__pyx_n_s_CONNECTION, __pyx_k_CONNECTION, sizeof(__pyx_k_CONNECTION), 0, 0, 1, 1}, - {&__pyx_n_s_CONTENT_DISPOSITION, __pyx_k_CONTENT_DISPOSITION, sizeof(__pyx_k_CONTENT_DISPOSITION), 0, 0, 1, 1}, - {&__pyx_n_s_CONTENT_ENCODING, __pyx_k_CONTENT_ENCODING, sizeof(__pyx_k_CONTENT_ENCODING), 0, 0, 1, 1}, - {&__pyx_n_s_CONTENT_LANGUAGE, __pyx_k_CONTENT_LANGUAGE, sizeof(__pyx_k_CONTENT_LANGUAGE), 0, 0, 1, 1}, - {&__pyx_n_s_CONTENT_LENGTH, __pyx_k_CONTENT_LENGTH, sizeof(__pyx_k_CONTENT_LENGTH), 0, 0, 1, 1}, - {&__pyx_n_s_CONTENT_LOCATION, __pyx_k_CONTENT_LOCATION, sizeof(__pyx_k_CONTENT_LOCATION), 0, 0, 1, 1}, - {&__pyx_n_s_CONTENT_MD5, __pyx_k_CONTENT_MD5, sizeof(__pyx_k_CONTENT_MD5), 0, 0, 1, 1}, - {&__pyx_n_s_CONTENT_RANGE, __pyx_k_CONTENT_RANGE, sizeof(__pyx_k_CONTENT_RANGE), 0, 0, 1, 1}, - {&__pyx_n_s_CONTENT_TRANSFER_ENCODING, __pyx_k_CONTENT_TRANSFER_ENCODING, sizeof(__pyx_k_CONTENT_TRANSFER_ENCODING), 0, 0, 1, 1}, - {&__pyx_n_s_CONTENT_TYPE, __pyx_k_CONTENT_TYPE, sizeof(__pyx_k_CONTENT_TYPE), 0, 0, 1, 1}, - {&__pyx_n_s_COOKIE, __pyx_k_COOKIE, sizeof(__pyx_k_COOKIE), 0, 0, 1, 1}, - {&__pyx_n_s_ContentLengthError, __pyx_k_ContentLengthError, sizeof(__pyx_k_ContentLengthError), 0, 0, 1, 1}, - {&__pyx_n_s_DATE, __pyx_k_DATE, sizeof(__pyx_k_DATE), 0, 0, 1, 1}, - {&__pyx_n_s_DESTINATION, __pyx_k_DESTINATION, sizeof(__pyx_k_DESTINATION), 0, 0, 1, 1}, - {&__pyx_n_s_DIGEST, __pyx_k_DIGEST, sizeof(__pyx_k_DIGEST), 0, 0, 1, 1}, - {&__pyx_n_s_DeflateBuffer, __pyx_k_DeflateBuffer, sizeof(__pyx_k_DeflateBuffer), 0, 0, 1, 1}, - {&__pyx_n_s_DeflateBuffer_2, __pyx_k_DeflateBuffer_2, sizeof(__pyx_k_DeflateBuffer_2), 0, 0, 1, 1}, - {&__pyx_n_s_EMPTY_PAYLOAD, __pyx_k_EMPTY_PAYLOAD, sizeof(__pyx_k_EMPTY_PAYLOAD), 0, 0, 1, 1}, - {&__pyx_n_s_EMPTY_PAYLOAD_2, __pyx_k_EMPTY_PAYLOAD_2, sizeof(__pyx_k_EMPTY_PAYLOAD_2), 0, 0, 1, 1}, - {&__pyx_n_s_ETAG, __pyx_k_ETAG, sizeof(__pyx_k_ETAG), 0, 0, 1, 1}, - {&__pyx_n_s_EXPECT, __pyx_k_EXPECT, sizeof(__pyx_k_EXPECT), 0, 0, 1, 1}, - {&__pyx_n_s_EXPIRES, __pyx_k_EXPIRES, sizeof(__pyx_k_EXPIRES), 0, 0, 1, 1}, - {&__pyx_n_s_FORWARDED, __pyx_k_FORWARDED, sizeof(__pyx_k_FORWARDED), 0, 0, 1, 1}, - {&__pyx_n_s_FROM, __pyx_k_FROM, sizeof(__pyx_k_FROM), 0, 0, 1, 1}, - {&__pyx_n_s_HOST, __pyx_k_HOST, sizeof(__pyx_k_HOST), 0, 0, 1, 1}, - {&__pyx_kp_u_Header_name_is_too_long, __pyx_k_Header_name_is_too_long, sizeof(__pyx_k_Header_name_is_too_long), 0, 1, 0, 0}, - {&__pyx_kp_u_Header_value_is_too_long, __pyx_k_Header_value_is_too_long, sizeof(__pyx_k_Header_value_is_too_long), 0, 1, 0, 0}, - {&__pyx_n_s_HttpRequestParser, __pyx_k_HttpRequestParser, sizeof(__pyx_k_HttpRequestParser), 0, 0, 1, 1}, - {&__pyx_n_u_HttpRequestParser, __pyx_k_HttpRequestParser, sizeof(__pyx_k_HttpRequestParser), 0, 1, 0, 1}, - {&__pyx_n_s_HttpResponseParser, __pyx_k_HttpResponseParser, sizeof(__pyx_k_HttpResponseParser), 0, 0, 1, 1}, - {&__pyx_n_u_HttpResponseParser, __pyx_k_HttpResponseParser, sizeof(__pyx_k_HttpResponseParser), 0, 1, 0, 1}, - {&__pyx_n_s_HttpVersion, __pyx_k_HttpVersion, sizeof(__pyx_k_HttpVersion), 0, 0, 1, 1}, - {&__pyx_n_s_HttpVersion10, __pyx_k_HttpVersion10, sizeof(__pyx_k_HttpVersion10), 0, 0, 1, 1}, - {&__pyx_n_s_HttpVersion10_2, __pyx_k_HttpVersion10_2, sizeof(__pyx_k_HttpVersion10_2), 0, 0, 1, 1}, - {&__pyx_n_s_HttpVersion11, __pyx_k_HttpVersion11, sizeof(__pyx_k_HttpVersion11), 0, 0, 1, 1}, - {&__pyx_n_s_HttpVersion11_2, __pyx_k_HttpVersion11_2, sizeof(__pyx_k_HttpVersion11_2), 0, 0, 1, 1}, - {&__pyx_n_s_HttpVersion_2, __pyx_k_HttpVersion_2, sizeof(__pyx_k_HttpVersion_2), 0, 0, 1, 1}, - {&__pyx_n_s_IF_MATCH, __pyx_k_IF_MATCH, sizeof(__pyx_k_IF_MATCH), 0, 0, 1, 1}, - {&__pyx_n_s_IF_MODIFIED_SINCE, __pyx_k_IF_MODIFIED_SINCE, sizeof(__pyx_k_IF_MODIFIED_SINCE), 0, 0, 1, 1}, - {&__pyx_n_s_IF_NONE_MATCH, __pyx_k_IF_NONE_MATCH, sizeof(__pyx_k_IF_NONE_MATCH), 0, 0, 1, 1}, - {&__pyx_n_s_IF_RANGE, __pyx_k_IF_RANGE, sizeof(__pyx_k_IF_RANGE), 0, 0, 1, 1}, - {&__pyx_n_s_IF_UNMODIFIED_SINCE, __pyx_k_IF_UNMODIFIED_SINCE, sizeof(__pyx_k_IF_UNMODIFIED_SINCE), 0, 0, 1, 1}, - {&__pyx_kp_s_Incompatible_checksums_s_vs_0x14, __pyx_k_Incompatible_checksums_s_vs_0x14, sizeof(__pyx_k_Incompatible_checksums_s_vs_0x14), 0, 0, 1, 0}, - {&__pyx_kp_s_Incompatible_checksums_s_vs_0xc7, __pyx_k_Incompatible_checksums_s_vs_0xc7, sizeof(__pyx_k_Incompatible_checksums_s_vs_0xc7), 0, 0, 1, 0}, - {&__pyx_n_s_InvalidHeader, __pyx_k_InvalidHeader, sizeof(__pyx_k_InvalidHeader), 0, 0, 1, 1}, - {&__pyx_n_s_InvalidURLError, __pyx_k_InvalidURLError, sizeof(__pyx_k_InvalidURLError), 0, 0, 1, 1}, - {&__pyx_n_s_KEEP_ALIVE, __pyx_k_KEEP_ALIVE, sizeof(__pyx_k_KEEP_ALIVE), 0, 0, 1, 1}, - {&__pyx_n_s_LAST_EVENT_ID, __pyx_k_LAST_EVENT_ID, sizeof(__pyx_k_LAST_EVENT_ID), 0, 0, 1, 1}, - {&__pyx_n_s_LAST_MODIFIED, __pyx_k_LAST_MODIFIED, sizeof(__pyx_k_LAST_MODIFIED), 0, 0, 1, 1}, - {&__pyx_n_s_LINK, __pyx_k_LINK, sizeof(__pyx_k_LINK), 0, 0, 1, 1}, - {&__pyx_n_s_LOCATION, __pyx_k_LOCATION, sizeof(__pyx_k_LOCATION), 0, 0, 1, 1}, - {&__pyx_n_s_LineTooLong, __pyx_k_LineTooLong, sizeof(__pyx_k_LineTooLong), 0, 0, 1, 1}, - {&__pyx_n_s_MAX_FORWARDS, __pyx_k_MAX_FORWARDS, sizeof(__pyx_k_MAX_FORWARDS), 0, 0, 1, 1}, - {&__pyx_n_s_MemoryError, __pyx_k_MemoryError, sizeof(__pyx_k_MemoryError), 0, 0, 1, 1}, - {&__pyx_kp_u_Not_enough_data_for_satisfy_cont, __pyx_k_Not_enough_data_for_satisfy_cont, sizeof(__pyx_k_Not_enough_data_for_satisfy_cont), 0, 1, 0, 0}, - {&__pyx_kp_u_Not_enough_data_for_satisfy_tran, __pyx_k_Not_enough_data_for_satisfy_tran, sizeof(__pyx_k_Not_enough_data_for_satisfy_tran), 0, 1, 0, 0}, - {&__pyx_n_s_ORIGIN, __pyx_k_ORIGIN, sizeof(__pyx_k_ORIGIN), 0, 0, 1, 1}, - {&__pyx_n_s_PRAGMA, __pyx_k_PRAGMA, sizeof(__pyx_k_PRAGMA), 0, 0, 1, 1}, - {&__pyx_n_s_PROXY_AUTHENTICATE, __pyx_k_PROXY_AUTHENTICATE, sizeof(__pyx_k_PROXY_AUTHENTICATE), 0, 0, 1, 1}, - {&__pyx_n_s_PROXY_AUTHORIZATION, __pyx_k_PROXY_AUTHORIZATION, sizeof(__pyx_k_PROXY_AUTHORIZATION), 0, 0, 1, 1}, - {&__pyx_n_s_PayloadEncodingError, __pyx_k_PayloadEncodingError, sizeof(__pyx_k_PayloadEncodingError), 0, 0, 1, 1}, - {&__pyx_n_s_PickleError, __pyx_k_PickleError, sizeof(__pyx_k_PickleError), 0, 0, 1, 1}, - {&__pyx_n_s_RANGE, __pyx_k_RANGE, sizeof(__pyx_k_RANGE), 0, 0, 1, 1}, - {&__pyx_n_s_REFERER, __pyx_k_REFERER, sizeof(__pyx_k_REFERER), 0, 0, 1, 1}, - {&__pyx_n_s_RETRY_AFTER, __pyx_k_RETRY_AFTER, sizeof(__pyx_k_RETRY_AFTER), 0, 0, 1, 1}, - {&__pyx_kp_u_RawRequestMessage, __pyx_k_RawRequestMessage, sizeof(__pyx_k_RawRequestMessage), 0, 1, 0, 0}, - {&__pyx_n_s_RawRequestMessage_2, __pyx_k_RawRequestMessage_2, sizeof(__pyx_k_RawRequestMessage_2), 0, 0, 1, 1}, - {&__pyx_n_u_RawRequestMessage_2, __pyx_k_RawRequestMessage_2, sizeof(__pyx_k_RawRequestMessage_2), 0, 1, 0, 1}, - {&__pyx_kp_u_RawResponseMessage, __pyx_k_RawResponseMessage, sizeof(__pyx_k_RawResponseMessage), 0, 1, 0, 0}, - {&__pyx_n_s_RawResponseMessage_2, __pyx_k_RawResponseMessage_2, sizeof(__pyx_k_RawResponseMessage_2), 0, 0, 1, 1}, - {&__pyx_n_u_RawResponseMessage_2, __pyx_k_RawResponseMessage_2, sizeof(__pyx_k_RawResponseMessage_2), 0, 1, 0, 1}, - {&__pyx_n_s_SEC_WEBSOCKET_ACCEPT, __pyx_k_SEC_WEBSOCKET_ACCEPT, sizeof(__pyx_k_SEC_WEBSOCKET_ACCEPT), 0, 0, 1, 1}, - {&__pyx_n_s_SEC_WEBSOCKET_EXTENSIONS, __pyx_k_SEC_WEBSOCKET_EXTENSIONS, sizeof(__pyx_k_SEC_WEBSOCKET_EXTENSIONS), 0, 0, 1, 1}, - {&__pyx_n_s_SEC_WEBSOCKET_KEY, __pyx_k_SEC_WEBSOCKET_KEY, sizeof(__pyx_k_SEC_WEBSOCKET_KEY), 0, 0, 1, 1}, - {&__pyx_n_s_SEC_WEBSOCKET_KEY1, __pyx_k_SEC_WEBSOCKET_KEY1, sizeof(__pyx_k_SEC_WEBSOCKET_KEY1), 0, 0, 1, 1}, - {&__pyx_n_s_SEC_WEBSOCKET_PROTOCOL, __pyx_k_SEC_WEBSOCKET_PROTOCOL, sizeof(__pyx_k_SEC_WEBSOCKET_PROTOCOL), 0, 0, 1, 1}, - {&__pyx_n_s_SEC_WEBSOCKET_VERSION, __pyx_k_SEC_WEBSOCKET_VERSION, sizeof(__pyx_k_SEC_WEBSOCKET_VERSION), 0, 0, 1, 1}, - {&__pyx_n_s_SERVER, __pyx_k_SERVER, sizeof(__pyx_k_SERVER), 0, 0, 1, 1}, - {&__pyx_n_s_SET_COOKIE, __pyx_k_SET_COOKIE, sizeof(__pyx_k_SET_COOKIE), 0, 0, 1, 1}, - {&__pyx_kp_u_Status_line_is_too_long, __pyx_k_Status_line_is_too_long, sizeof(__pyx_k_Status_line_is_too_long), 0, 1, 0, 0}, - {&__pyx_n_s_StreamReader, __pyx_k_StreamReader, sizeof(__pyx_k_StreamReader), 0, 0, 1, 1}, - {&__pyx_n_s_StreamReader_2, __pyx_k_StreamReader_2, sizeof(__pyx_k_StreamReader_2), 0, 0, 1, 1}, - {&__pyx_n_s_TE, __pyx_k_TE, sizeof(__pyx_k_TE), 0, 0, 1, 1}, - {&__pyx_n_s_TRAILER, __pyx_k_TRAILER, sizeof(__pyx_k_TRAILER), 0, 0, 1, 1}, - {&__pyx_n_s_TRANSFER_ENCODING, __pyx_k_TRANSFER_ENCODING, sizeof(__pyx_k_TRANSFER_ENCODING), 0, 0, 1, 1}, - {&__pyx_n_s_TransferEncodingError, __pyx_k_TransferEncodingError, sizeof(__pyx_k_TransferEncodingError), 0, 0, 1, 1}, - {&__pyx_n_s_TypeError, __pyx_k_TypeError, sizeof(__pyx_k_TypeError), 0, 0, 1, 1}, - {&__pyx_n_s_UPGRADE, __pyx_k_UPGRADE, sizeof(__pyx_k_UPGRADE), 0, 0, 1, 1}, - {&__pyx_n_s_URI, __pyx_k_URI, sizeof(__pyx_k_URI), 0, 0, 1, 1}, - {&__pyx_n_s_URL, __pyx_k_URL, sizeof(__pyx_k_URL), 0, 0, 1, 1}, - {&__pyx_n_s_URL_2, __pyx_k_URL_2, sizeof(__pyx_k_URL_2), 0, 0, 1, 1}, - {&__pyx_n_s_USER_AGENT, __pyx_k_USER_AGENT, sizeof(__pyx_k_USER_AGENT), 0, 0, 1, 1}, - {&__pyx_n_s_VARY, __pyx_k_VARY, sizeof(__pyx_k_VARY), 0, 0, 1, 1}, - {&__pyx_n_s_VIA, __pyx_k_VIA, sizeof(__pyx_k_VIA), 0, 0, 1, 1}, - {&__pyx_n_s_WANT_DIGEST, __pyx_k_WANT_DIGEST, sizeof(__pyx_k_WANT_DIGEST), 0, 0, 1, 1}, - {&__pyx_n_s_WARNING, __pyx_k_WARNING, sizeof(__pyx_k_WARNING), 0, 0, 1, 1}, - {&__pyx_n_s_WWW_AUTHENTICATE, __pyx_k_WWW_AUTHENTICATE, sizeof(__pyx_k_WWW_AUTHENTICATE), 0, 0, 1, 1}, - {&__pyx_n_s_X_FORWARDED_FOR, __pyx_k_X_FORWARDED_FOR, sizeof(__pyx_k_X_FORWARDED_FOR), 0, 0, 1, 1}, - {&__pyx_n_s_X_FORWARDED_HOST, __pyx_k_X_FORWARDED_HOST, sizeof(__pyx_k_X_FORWARDED_HOST), 0, 0, 1, 1}, - {&__pyx_n_s_X_FORWARDED_PROTO, __pyx_k_X_FORWARDED_PROTO, sizeof(__pyx_k_X_FORWARDED_PROTO), 0, 0, 1, 1}, - {&__pyx_kp_u__11, __pyx_k__11, sizeof(__pyx_k__11), 0, 1, 0, 0}, - {&__pyx_kp_u__2, __pyx_k__2, sizeof(__pyx_k__2), 0, 1, 0, 0}, - {&__pyx_kp_u__3, __pyx_k__3, sizeof(__pyx_k__3), 0, 1, 0, 0}, - {&__pyx_n_s__4, __pyx_k__4, sizeof(__pyx_k__4), 0, 0, 1, 1}, - {&__pyx_kp_b__4, __pyx_k__4, sizeof(__pyx_k__4), 0, 0, 0, 0}, - {&__pyx_kp_u__4, __pyx_k__4, sizeof(__pyx_k__4), 0, 1, 0, 0}, - {&__pyx_n_s_add, __pyx_k_add, sizeof(__pyx_k_add), 0, 0, 1, 1}, - {&__pyx_n_s_aiohttp, __pyx_k_aiohttp, sizeof(__pyx_k_aiohttp), 0, 0, 1, 1}, - {&__pyx_n_s_aiohttp__http_parser, __pyx_k_aiohttp__http_parser, sizeof(__pyx_k_aiohttp__http_parser), 0, 0, 1, 1}, - {&__pyx_kp_s_aiohttp__http_parser_pyx, __pyx_k_aiohttp__http_parser_pyx, sizeof(__pyx_k_aiohttp__http_parser_pyx), 0, 0, 1, 0}, - {&__pyx_n_s_all, __pyx_k_all, sizeof(__pyx_k_all), 0, 0, 1, 1}, - {&__pyx_n_s_args, __pyx_k_args, sizeof(__pyx_k_args), 0, 0, 1, 1}, - {&__pyx_n_s_auto_decompress, __pyx_k_auto_decompress, sizeof(__pyx_k_auto_decompress), 0, 0, 1, 1}, - {&__pyx_n_s_begin_http_chunk_receiving, __pyx_k_begin_http_chunk_receiving, sizeof(__pyx_k_begin_http_chunk_receiving), 0, 0, 1, 1}, - {&__pyx_n_u_br, __pyx_k_br, sizeof(__pyx_k_br), 0, 1, 0, 1}, - {&__pyx_n_s_buf_data, __pyx_k_buf_data, sizeof(__pyx_k_buf_data), 0, 0, 1, 1}, - {&__pyx_n_s_build, __pyx_k_build, sizeof(__pyx_k_build), 0, 0, 1, 1}, - {&__pyx_n_s_chunked, __pyx_k_chunked, sizeof(__pyx_k_chunked), 0, 0, 1, 1}, - {&__pyx_n_u_chunked, __pyx_k_chunked, sizeof(__pyx_k_chunked), 0, 1, 0, 1}, - {&__pyx_n_s_cline_in_traceback, __pyx_k_cline_in_traceback, sizeof(__pyx_k_cline_in_traceback), 0, 0, 1, 1}, - {&__pyx_n_s_close, __pyx_k_close, sizeof(__pyx_k_close), 0, 0, 1, 1}, - {&__pyx_n_s_code, __pyx_k_code, sizeof(__pyx_k_code), 0, 0, 1, 1}, - {&__pyx_n_u_code, __pyx_k_code, sizeof(__pyx_k_code), 0, 1, 0, 1}, - {&__pyx_n_s_compression, __pyx_k_compression, sizeof(__pyx_k_compression), 0, 0, 1, 1}, - {&__pyx_n_u_compression, __pyx_k_compression, sizeof(__pyx_k_compression), 0, 1, 0, 1}, - {&__pyx_n_u_deflate, __pyx_k_deflate, sizeof(__pyx_k_deflate), 0, 1, 0, 1}, - {&__pyx_n_s_dict, __pyx_k_dict, sizeof(__pyx_k_dict), 0, 0, 1, 1}, - {&__pyx_n_s_encoded, __pyx_k_encoded, sizeof(__pyx_k_encoded), 0, 0, 1, 1}, - {&__pyx_n_s_end_http_chunk_receiving, __pyx_k_end_http_chunk_receiving, sizeof(__pyx_k_end_http_chunk_receiving), 0, 0, 1, 1}, - {&__pyx_n_s_feed_data, __pyx_k_feed_data, sizeof(__pyx_k_feed_data), 0, 0, 1, 1}, - {&__pyx_n_s_feed_eof, __pyx_k_feed_eof, sizeof(__pyx_k_feed_eof), 0, 0, 1, 1}, - {&__pyx_n_s_format, __pyx_k_format, sizeof(__pyx_k_format), 0, 0, 1, 1}, - {&__pyx_n_s_fragment, __pyx_k_fragment, sizeof(__pyx_k_fragment), 0, 0, 1, 1}, - {&__pyx_n_s_genexpr, __pyx_k_genexpr, sizeof(__pyx_k_genexpr), 0, 0, 1, 1}, - {&__pyx_n_s_getstate, __pyx_k_getstate, sizeof(__pyx_k_getstate), 0, 0, 1, 1}, - {&__pyx_n_u_gzip, __pyx_k_gzip, sizeof(__pyx_k_gzip), 0, 1, 0, 1}, - {&__pyx_n_s_hdrs, __pyx_k_hdrs, sizeof(__pyx_k_hdrs), 0, 0, 1, 1}, - {&__pyx_n_s_headers, __pyx_k_headers, sizeof(__pyx_k_headers), 0, 0, 1, 1}, - {&__pyx_n_u_headers, __pyx_k_headers, sizeof(__pyx_k_headers), 0, 1, 0, 1}, - {&__pyx_n_s_host, __pyx_k_host, sizeof(__pyx_k_host), 0, 0, 1, 1}, - {&__pyx_n_s_http_exceptions, __pyx_k_http_exceptions, sizeof(__pyx_k_http_exceptions), 0, 0, 1, 1}, - {&__pyx_n_s_http_parser, __pyx_k_http_parser, sizeof(__pyx_k_http_parser), 0, 0, 1, 1}, - {&__pyx_n_s_http_writer, __pyx_k_http_writer, sizeof(__pyx_k_http_writer), 0, 0, 1, 1}, - {&__pyx_n_s_i, __pyx_k_i, sizeof(__pyx_k_i), 0, 0, 1, 1}, - {&__pyx_n_s_import, __pyx_k_import, sizeof(__pyx_k_import), 0, 0, 1, 1}, - {&__pyx_kp_u_invalid_url_r, __pyx_k_invalid_url_r, sizeof(__pyx_k_invalid_url_r), 0, 1, 0, 0}, - {&__pyx_n_s_limit, __pyx_k_limit, sizeof(__pyx_k_limit), 0, 0, 1, 1}, - {&__pyx_n_s_loop, __pyx_k_loop, sizeof(__pyx_k_loop), 0, 0, 1, 1}, - {&__pyx_n_s_lower, __pyx_k_lower, sizeof(__pyx_k_lower), 0, 0, 1, 1}, - {&__pyx_n_s_main, __pyx_k_main, sizeof(__pyx_k_main), 0, 0, 1, 1}, - {&__pyx_n_s_max_field_size, __pyx_k_max_field_size, sizeof(__pyx_k_max_field_size), 0, 0, 1, 1}, - {&__pyx_n_s_max_headers, __pyx_k_max_headers, sizeof(__pyx_k_max_headers), 0, 0, 1, 1}, - {&__pyx_n_s_max_line_size, __pyx_k_max_line_size, sizeof(__pyx_k_max_line_size), 0, 0, 1, 1}, - {&__pyx_n_s_method, __pyx_k_method, sizeof(__pyx_k_method), 0, 0, 1, 1}, - {&__pyx_n_u_method, __pyx_k_method, sizeof(__pyx_k_method), 0, 1, 0, 1}, - {&__pyx_n_s_multidict, __pyx_k_multidict, sizeof(__pyx_k_multidict), 0, 0, 1, 1}, - {&__pyx_n_s_name, __pyx_k_name, sizeof(__pyx_k_name), 0, 0, 1, 1}, - {&__pyx_n_s_new, __pyx_k_new, sizeof(__pyx_k_new), 0, 0, 1, 1}, - {&__pyx_kp_s_no_default___reduce___due_to_non, __pyx_k_no_default___reduce___due_to_non, sizeof(__pyx_k_no_default___reduce___due_to_non), 0, 0, 1, 0}, - {&__pyx_n_s_parse_url, __pyx_k_parse_url, sizeof(__pyx_k_parse_url), 0, 0, 1, 1}, - {&__pyx_n_s_partition, __pyx_k_partition, sizeof(__pyx_k_partition), 0, 0, 1, 1}, - {&__pyx_n_s_password, __pyx_k_password, sizeof(__pyx_k_password), 0, 0, 1, 1}, - {&__pyx_n_s_path, __pyx_k_path, sizeof(__pyx_k_path), 0, 0, 1, 1}, - {&__pyx_n_u_path, __pyx_k_path, sizeof(__pyx_k_path), 0, 1, 0, 1}, - {&__pyx_n_s_payload_exception, __pyx_k_payload_exception, sizeof(__pyx_k_payload_exception), 0, 0, 1, 1}, - {&__pyx_n_s_pickle, __pyx_k_pickle, sizeof(__pyx_k_pickle), 0, 0, 1, 1}, - {&__pyx_n_s_port, __pyx_k_port, sizeof(__pyx_k_port), 0, 0, 1, 1}, - {&__pyx_n_s_protocol, __pyx_k_protocol, sizeof(__pyx_k_protocol), 0, 0, 1, 1}, - {&__pyx_n_s_py_buf, __pyx_k_py_buf, sizeof(__pyx_k_py_buf), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_PickleError, __pyx_k_pyx_PickleError, sizeof(__pyx_k_pyx_PickleError), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_checksum, __pyx_k_pyx_checksum, sizeof(__pyx_k_pyx_checksum), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_result, __pyx_k_pyx_result, sizeof(__pyx_k_pyx_result), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_state, __pyx_k_pyx_state, sizeof(__pyx_k_pyx_state), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_type, __pyx_k_pyx_type, sizeof(__pyx_k_pyx_type), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_unpickle_RawRequestMessage, __pyx_k_pyx_unpickle_RawRequestMessage, sizeof(__pyx_k_pyx_unpickle_RawRequestMessage), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_unpickle_RawResponseMessag, __pyx_k_pyx_unpickle_RawResponseMessag, sizeof(__pyx_k_pyx_unpickle_RawResponseMessag), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_vtable, __pyx_k_pyx_vtable, sizeof(__pyx_k_pyx_vtable), 0, 0, 1, 1}, - {&__pyx_n_s_query_string, __pyx_k_query_string, sizeof(__pyx_k_query_string), 0, 0, 1, 1}, - {&__pyx_n_s_range, __pyx_k_range, sizeof(__pyx_k_range), 0, 0, 1, 1}, - {&__pyx_n_s_raw_headers, __pyx_k_raw_headers, sizeof(__pyx_k_raw_headers), 0, 0, 1, 1}, - {&__pyx_n_u_raw_headers, __pyx_k_raw_headers, sizeof(__pyx_k_raw_headers), 0, 1, 0, 1}, - {&__pyx_n_s_read_until_eof, __pyx_k_read_until_eof, sizeof(__pyx_k_read_until_eof), 0, 0, 1, 1}, - {&__pyx_n_s_reason, __pyx_k_reason, sizeof(__pyx_k_reason), 0, 0, 1, 1}, - {&__pyx_n_u_reason, __pyx_k_reason, sizeof(__pyx_k_reason), 0, 1, 0, 1}, - {&__pyx_n_s_reduce, __pyx_k_reduce, sizeof(__pyx_k_reduce), 0, 0, 1, 1}, - {&__pyx_n_s_reduce_cython, __pyx_k_reduce_cython, sizeof(__pyx_k_reduce_cython), 0, 0, 1, 1}, - {&__pyx_n_s_reduce_ex, __pyx_k_reduce_ex, sizeof(__pyx_k_reduce_ex), 0, 0, 1, 1}, - {&__pyx_n_s_repr___locals_genexpr, __pyx_k_repr___locals_genexpr, sizeof(__pyx_k_repr___locals_genexpr), 0, 0, 1, 1}, - {&__pyx_n_s_response_with_body, __pyx_k_response_with_body, sizeof(__pyx_k_response_with_body), 0, 0, 1, 1}, - {&__pyx_n_s_scheme, __pyx_k_scheme, sizeof(__pyx_k_scheme), 0, 0, 1, 1}, - {&__pyx_n_s_send, __pyx_k_send, sizeof(__pyx_k_send), 0, 0, 1, 1}, - {&__pyx_n_s_set_exception, __pyx_k_set_exception, sizeof(__pyx_k_set_exception), 0, 0, 1, 1}, - {&__pyx_n_s_setstate, __pyx_k_setstate, sizeof(__pyx_k_setstate), 0, 0, 1, 1}, - {&__pyx_n_s_setstate_cython, __pyx_k_setstate_cython, sizeof(__pyx_k_setstate_cython), 0, 0, 1, 1}, - {&__pyx_n_s_should_close, __pyx_k_should_close, sizeof(__pyx_k_should_close), 0, 0, 1, 1}, - {&__pyx_n_u_should_close, __pyx_k_should_close, sizeof(__pyx_k_should_close), 0, 1, 0, 1}, - {&__pyx_n_s_streams, __pyx_k_streams, sizeof(__pyx_k_streams), 0, 0, 1, 1}, - {&__pyx_kp_s_stringsource, __pyx_k_stringsource, sizeof(__pyx_k_stringsource), 0, 0, 1, 0}, - {&__pyx_n_s_test, __pyx_k_test, sizeof(__pyx_k_test), 0, 0, 1, 1}, - {&__pyx_n_s_throw, __pyx_k_throw, sizeof(__pyx_k_throw), 0, 0, 1, 1}, - {&__pyx_n_s_timer, __pyx_k_timer, sizeof(__pyx_k_timer), 0, 0, 1, 1}, - {&__pyx_kp_u_unknown, __pyx_k_unknown, sizeof(__pyx_k_unknown), 0, 1, 0, 0}, - {&__pyx_n_s_update, __pyx_k_update, sizeof(__pyx_k_update), 0, 0, 1, 1}, - {&__pyx_n_s_upgrade, __pyx_k_upgrade, sizeof(__pyx_k_upgrade), 0, 0, 1, 1}, - {&__pyx_n_u_upgrade, __pyx_k_upgrade, sizeof(__pyx_k_upgrade), 0, 1, 0, 1}, - {&__pyx_n_s_url, __pyx_k_url, sizeof(__pyx_k_url), 0, 0, 1, 1}, - {&__pyx_n_u_url, __pyx_k_url, sizeof(__pyx_k_url), 0, 1, 0, 1}, - {&__pyx_n_s_user, __pyx_k_user, sizeof(__pyx_k_user), 0, 0, 1, 1}, - {&__pyx_n_s_version, __pyx_k_version, sizeof(__pyx_k_version), 0, 0, 1, 1}, - {&__pyx_n_u_version, __pyx_k_version, sizeof(__pyx_k_version), 0, 1, 0, 1}, - {&__pyx_n_s_yarl, __pyx_k_yarl, sizeof(__pyx_k_yarl), 0, 0, 1, 1}, - {0, 0, 0, 0, 0, 0, 0} -}; -static CYTHON_SMALL_CODE int __Pyx_InitCachedBuiltins(void) { - __pyx_builtin_range = __Pyx_GetBuiltinName(__pyx_n_s_range); if (!__pyx_builtin_range) __PYX_ERR(0, 87, __pyx_L1_error) - __pyx_builtin_MemoryError = __Pyx_GetBuiltinName(__pyx_n_s_MemoryError); if (!__pyx_builtin_MemoryError) __PYX_ERR(0, 316, __pyx_L1_error) - __pyx_builtin_TypeError = __Pyx_GetBuiltinName(__pyx_n_s_TypeError); if (!__pyx_builtin_TypeError) __PYX_ERR(1, 2, __pyx_L1_error) - __pyx_builtin_BaseException = __Pyx_GetBuiltinName(__pyx_n_s_BaseException); if (!__pyx_builtin_BaseException) __PYX_ERR(0, 631, __pyx_L1_error) - return 0; - __pyx_L1_error:; - return -1; -} - -static CYTHON_SMALL_CODE int __Pyx_InitCachedConstants(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0); - - /* "(tree fragment)":2 - * def __reduce_cython__(self): - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< - * def __setstate_cython__(self, __pyx_state): - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") - */ - __pyx_tuple__5 = PyTuple_Pack(1, __pyx_kp_s_no_default___reduce___due_to_non); if (unlikely(!__pyx_tuple__5)) __PYX_ERR(1, 2, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__5); - __Pyx_GIVEREF(__pyx_tuple__5); - - /* "(tree fragment)":4 - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") - * def __setstate_cython__(self, __pyx_state): - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< - */ - __pyx_tuple__6 = PyTuple_Pack(1, __pyx_kp_s_no_default___reduce___due_to_non); if (unlikely(!__pyx_tuple__6)) __PYX_ERR(1, 4, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__6); - __Pyx_GIVEREF(__pyx_tuple__6); - - /* "(tree fragment)":2 - * def __reduce_cython__(self): - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< - * def __setstate_cython__(self, __pyx_state): - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") - */ - __pyx_tuple__7 = PyTuple_Pack(1, __pyx_kp_s_no_default___reduce___due_to_non); if (unlikely(!__pyx_tuple__7)) __PYX_ERR(1, 2, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__7); - __Pyx_GIVEREF(__pyx_tuple__7); - - /* "(tree fragment)":4 - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") - * def __setstate_cython__(self, __pyx_state): - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< - */ - __pyx_tuple__8 = PyTuple_Pack(1, __pyx_kp_s_no_default___reduce___due_to_non); if (unlikely(!__pyx_tuple__8)) __PYX_ERR(1, 4, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__8); - __Pyx_GIVEREF(__pyx_tuple__8); - - /* "(tree fragment)":2 - * def __reduce_cython__(self): - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< - * def __setstate_cython__(self, __pyx_state): - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") - */ - __pyx_tuple__9 = PyTuple_Pack(1, __pyx_kp_s_no_default___reduce___due_to_non); if (unlikely(!__pyx_tuple__9)) __PYX_ERR(1, 2, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__9); - __Pyx_GIVEREF(__pyx_tuple__9); - - /* "(tree fragment)":4 - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") - * def __setstate_cython__(self, __pyx_state): - * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< - */ - __pyx_tuple__10 = PyTuple_Pack(1, __pyx_kp_s_no_default___reduce___due_to_non); if (unlikely(!__pyx_tuple__10)) __PYX_ERR(1, 4, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__10); - __Pyx_GIVEREF(__pyx_tuple__10); - - /* "aiohttp/_http_parser.pyx":57 - * char* PyByteArray_AsString(object) - * - * __all__ = ('HttpRequestParser', 'HttpResponseParser', # <<<<<<<<<<<<<< - * 'RawRequestMessage', 'RawResponseMessage') - * - */ - __pyx_tuple__12 = PyTuple_Pack(4, __pyx_n_u_HttpRequestParser, __pyx_n_u_HttpResponseParser, __pyx_n_u_RawRequestMessage_2, __pyx_n_u_RawResponseMessage_2); if (unlikely(!__pyx_tuple__12)) __PYX_ERR(0, 57, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__12); - __Pyx_GIVEREF(__pyx_tuple__12); - - /* "aiohttp/_http_parser.pyx":785 - * - * - * def parse_url(url): # <<<<<<<<<<<<<< - * cdef: - * Py_buffer py_buf - */ - __pyx_tuple__13 = PyTuple_Pack(3, __pyx_n_s_url, __pyx_n_s_py_buf, __pyx_n_s_buf_data); if (unlikely(!__pyx_tuple__13)) __PYX_ERR(0, 785, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__13); - __Pyx_GIVEREF(__pyx_tuple__13); - __pyx_codeobj__14 = (PyObject*)__Pyx_PyCode_New(1, 0, 3, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__13, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_aiohttp__http_parser_pyx, __pyx_n_s_parse_url, 785, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__14)) __PYX_ERR(0, 785, __pyx_L1_error) - - /* "(tree fragment)":1 - * def __pyx_unpickle_RawRequestMessage(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< - * cdef object __pyx_PickleError - * cdef object __pyx_result - */ - __pyx_tuple__15 = PyTuple_Pack(5, __pyx_n_s_pyx_type, __pyx_n_s_pyx_checksum, __pyx_n_s_pyx_state, __pyx_n_s_pyx_PickleError, __pyx_n_s_pyx_result); if (unlikely(!__pyx_tuple__15)) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__15); - __Pyx_GIVEREF(__pyx_tuple__15); - __pyx_codeobj__16 = (PyObject*)__Pyx_PyCode_New(3, 0, 5, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__15, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_pyx_unpickle_RawRequestMessage, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__16)) __PYX_ERR(1, 1, __pyx_L1_error) - __pyx_tuple__17 = PyTuple_Pack(5, __pyx_n_s_pyx_type, __pyx_n_s_pyx_checksum, __pyx_n_s_pyx_state, __pyx_n_s_pyx_PickleError, __pyx_n_s_pyx_result); if (unlikely(!__pyx_tuple__17)) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__17); - __Pyx_GIVEREF(__pyx_tuple__17); - __pyx_codeobj__18 = (PyObject*)__Pyx_PyCode_New(3, 0, 5, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__17, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_pyx_unpickle_RawResponseMessag, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__18)) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_RefNannyFinishContext(); - return 0; - __pyx_L1_error:; - __Pyx_RefNannyFinishContext(); - return -1; -} - -static CYTHON_SMALL_CODE int __Pyx_InitGlobals(void) { - __pyx_umethod_PyUnicode_Type_partition.type = (PyObject*)&PyUnicode_Type; - if (__Pyx_InitStrings(__pyx_string_tab) < 0) __PYX_ERR(0, 1, __pyx_L1_error); - __pyx_int_21004882 = PyInt_FromLong(21004882L); if (unlikely(!__pyx_int_21004882)) __PYX_ERR(0, 1, __pyx_L1_error) - __pyx_int_209127132 = PyInt_FromLong(209127132L); if (unlikely(!__pyx_int_209127132)) __PYX_ERR(0, 1, __pyx_L1_error) - return 0; - __pyx_L1_error:; - return -1; -} - -static CYTHON_SMALL_CODE int __Pyx_modinit_global_init_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_variable_export_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_function_export_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_type_init_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_type_import_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_variable_import_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_function_import_code(void); /*proto*/ - -static int __Pyx_modinit_global_init_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_global_init_code", 0); - /*--- Global init code ---*/ - __pyx_v_7aiohttp_12_http_parser_headers = ((PyObject*)Py_None); Py_INCREF(Py_None); - __pyx_v_7aiohttp_12_http_parser_URL = Py_None; Py_INCREF(Py_None); - __pyx_v_7aiohttp_12_http_parser_URL_build = Py_None; Py_INCREF(Py_None); - __pyx_v_7aiohttp_12_http_parser_CIMultiDict = Py_None; Py_INCREF(Py_None); - __pyx_v_7aiohttp_12_http_parser_CIMultiDictProxy = Py_None; Py_INCREF(Py_None); - __pyx_v_7aiohttp_12_http_parser_HttpVersion = Py_None; Py_INCREF(Py_None); - __pyx_v_7aiohttp_12_http_parser_HttpVersion10 = Py_None; Py_INCREF(Py_None); - __pyx_v_7aiohttp_12_http_parser_HttpVersion11 = Py_None; Py_INCREF(Py_None); - __pyx_v_7aiohttp_12_http_parser_SEC_WEBSOCKET_KEY1 = Py_None; Py_INCREF(Py_None); - __pyx_v_7aiohttp_12_http_parser_CONTENT_ENCODING = Py_None; Py_INCREF(Py_None); - __pyx_v_7aiohttp_12_http_parser_EMPTY_PAYLOAD = Py_None; Py_INCREF(Py_None); - __pyx_v_7aiohttp_12_http_parser_StreamReader = Py_None; Py_INCREF(Py_None); - __pyx_v_7aiohttp_12_http_parser_DeflateBuffer = Py_None; Py_INCREF(Py_None); - __pyx_v_7aiohttp_12_http_parser__http_method = ((PyObject*)Py_None); Py_INCREF(Py_None); - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_variable_export_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_variable_export_code", 0); - /*--- Variable export code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_function_export_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_function_export_code", 0); - /*--- Function export code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_type_init_code(void) { - __Pyx_RefNannyDeclarations - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__Pyx_modinit_type_init_code", 0); - /*--- Type init code ---*/ - if (PyType_Ready(&__pyx_type_7aiohttp_12_http_parser_RawRequestMessage) < 0) __PYX_ERR(0, 110, __pyx_L1_error) - #if PY_VERSION_HEX < 0x030800B1 - __pyx_type_7aiohttp_12_http_parser_RawRequestMessage.tp_print = 0; - #endif - if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type_7aiohttp_12_http_parser_RawRequestMessage.tp_dictoffset && __pyx_type_7aiohttp_12_http_parser_RawRequestMessage.tp_getattro == PyObject_GenericGetAttr)) { - __pyx_type_7aiohttp_12_http_parser_RawRequestMessage.tp_getattro = __Pyx_PyObject_GenericGetAttr; - } - if (PyObject_SetAttr(__pyx_m, __pyx_n_s_RawRequestMessage_2, (PyObject *)&__pyx_type_7aiohttp_12_http_parser_RawRequestMessage) < 0) __PYX_ERR(0, 110, __pyx_L1_error) - if (__Pyx_setup_reduce((PyObject*)&__pyx_type_7aiohttp_12_http_parser_RawRequestMessage) < 0) __PYX_ERR(0, 110, __pyx_L1_error) - __pyx_ptype_7aiohttp_12_http_parser_RawRequestMessage = &__pyx_type_7aiohttp_12_http_parser_RawRequestMessage; - if (PyType_Ready(&__pyx_type_7aiohttp_12_http_parser_RawResponseMessage) < 0) __PYX_ERR(0, 210, __pyx_L1_error) - #if PY_VERSION_HEX < 0x030800B1 - __pyx_type_7aiohttp_12_http_parser_RawResponseMessage.tp_print = 0; - #endif - if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type_7aiohttp_12_http_parser_RawResponseMessage.tp_dictoffset && __pyx_type_7aiohttp_12_http_parser_RawResponseMessage.tp_getattro == PyObject_GenericGetAttr)) { - __pyx_type_7aiohttp_12_http_parser_RawResponseMessage.tp_getattro = __Pyx_PyObject_GenericGetAttr; - } - if (PyObject_SetAttr(__pyx_m, __pyx_n_s_RawResponseMessage_2, (PyObject *)&__pyx_type_7aiohttp_12_http_parser_RawResponseMessage) < 0) __PYX_ERR(0, 210, __pyx_L1_error) - if (__Pyx_setup_reduce((PyObject*)&__pyx_type_7aiohttp_12_http_parser_RawResponseMessage) < 0) __PYX_ERR(0, 210, __pyx_L1_error) - __pyx_ptype_7aiohttp_12_http_parser_RawResponseMessage = &__pyx_type_7aiohttp_12_http_parser_RawResponseMessage; - __pyx_vtabptr_7aiohttp_12_http_parser_HttpParser = &__pyx_vtable_7aiohttp_12_http_parser_HttpParser; - __pyx_vtable_7aiohttp_12_http_parser_HttpParser._init = (PyObject *(*)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *, enum http_parser_type, PyObject *, PyObject *, int, struct __pyx_opt_args_7aiohttp_12_http_parser_10HttpParser__init *__pyx_optional_args))__pyx_f_7aiohttp_12_http_parser_10HttpParser__init; - __pyx_vtable_7aiohttp_12_http_parser_HttpParser._process_header = (PyObject *(*)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *))__pyx_f_7aiohttp_12_http_parser_10HttpParser__process_header; - __pyx_vtable_7aiohttp_12_http_parser_HttpParser._on_header_field = (PyObject *(*)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *, char *, size_t))__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_header_field; - __pyx_vtable_7aiohttp_12_http_parser_HttpParser._on_header_value = (PyObject *(*)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *, char *, size_t))__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_header_value; - __pyx_vtable_7aiohttp_12_http_parser_HttpParser._on_headers_complete = (PyObject *(*)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *))__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_headers_complete; - __pyx_vtable_7aiohttp_12_http_parser_HttpParser._on_message_complete = (PyObject *(*)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *))__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_message_complete; - __pyx_vtable_7aiohttp_12_http_parser_HttpParser._on_chunk_header = (PyObject *(*)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *))__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_chunk_header; - __pyx_vtable_7aiohttp_12_http_parser_HttpParser._on_chunk_complete = (PyObject *(*)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *))__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_chunk_complete; - __pyx_vtable_7aiohttp_12_http_parser_HttpParser._on_status_complete = (PyObject *(*)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *))__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_status_complete; - __pyx_vtable_7aiohttp_12_http_parser_HttpParser.http_version = (PyObject *(*)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *))__pyx_f_7aiohttp_12_http_parser_10HttpParser_http_version; - if (PyType_Ready(&__pyx_type_7aiohttp_12_http_parser_HttpParser) < 0) __PYX_ERR(0, 272, __pyx_L1_error) - #if PY_VERSION_HEX < 0x030800B1 - __pyx_type_7aiohttp_12_http_parser_HttpParser.tp_print = 0; - #endif - if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type_7aiohttp_12_http_parser_HttpParser.tp_dictoffset && __pyx_type_7aiohttp_12_http_parser_HttpParser.tp_getattro == PyObject_GenericGetAttr)) { - __pyx_type_7aiohttp_12_http_parser_HttpParser.tp_getattro = __Pyx_PyObject_GenericGetAttr; - } - if (__Pyx_SetVtable(__pyx_type_7aiohttp_12_http_parser_HttpParser.tp_dict, __pyx_vtabptr_7aiohttp_12_http_parser_HttpParser) < 0) __PYX_ERR(0, 272, __pyx_L1_error) - if (__Pyx_setup_reduce((PyObject*)&__pyx_type_7aiohttp_12_http_parser_HttpParser) < 0) __PYX_ERR(0, 272, __pyx_L1_error) - __pyx_ptype_7aiohttp_12_http_parser_HttpParser = &__pyx_type_7aiohttp_12_http_parser_HttpParser; - __pyx_vtabptr_7aiohttp_12_http_parser_HttpRequestParser = &__pyx_vtable_7aiohttp_12_http_parser_HttpRequestParser; - __pyx_vtable_7aiohttp_12_http_parser_HttpRequestParser.__pyx_base = *__pyx_vtabptr_7aiohttp_12_http_parser_HttpParser; - __pyx_vtable_7aiohttp_12_http_parser_HttpRequestParser.__pyx_base._on_status_complete = (PyObject *(*)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *))__pyx_f_7aiohttp_12_http_parser_17HttpRequestParser__on_status_complete; - __pyx_type_7aiohttp_12_http_parser_HttpRequestParser.tp_base = __pyx_ptype_7aiohttp_12_http_parser_HttpParser; - if (PyType_Ready(&__pyx_type_7aiohttp_12_http_parser_HttpRequestParser) < 0) __PYX_ERR(0, 563, __pyx_L1_error) - #if PY_VERSION_HEX < 0x030800B1 - __pyx_type_7aiohttp_12_http_parser_HttpRequestParser.tp_print = 0; - #endif - if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type_7aiohttp_12_http_parser_HttpRequestParser.tp_dictoffset && __pyx_type_7aiohttp_12_http_parser_HttpRequestParser.tp_getattro == PyObject_GenericGetAttr)) { - __pyx_type_7aiohttp_12_http_parser_HttpRequestParser.tp_getattro = __Pyx_PyObject_GenericGetAttr; - } - if (__Pyx_SetVtable(__pyx_type_7aiohttp_12_http_parser_HttpRequestParser.tp_dict, __pyx_vtabptr_7aiohttp_12_http_parser_HttpRequestParser) < 0) __PYX_ERR(0, 563, __pyx_L1_error) - if (PyObject_SetAttr(__pyx_m, __pyx_n_s_HttpRequestParser, (PyObject *)&__pyx_type_7aiohttp_12_http_parser_HttpRequestParser) < 0) __PYX_ERR(0, 563, __pyx_L1_error) - if (__Pyx_setup_reduce((PyObject*)&__pyx_type_7aiohttp_12_http_parser_HttpRequestParser) < 0) __PYX_ERR(0, 563, __pyx_L1_error) - __pyx_ptype_7aiohttp_12_http_parser_HttpRequestParser = &__pyx_type_7aiohttp_12_http_parser_HttpRequestParser; - __pyx_vtabptr_7aiohttp_12_http_parser_HttpResponseParser = &__pyx_vtable_7aiohttp_12_http_parser_HttpResponseParser; - __pyx_vtable_7aiohttp_12_http_parser_HttpResponseParser.__pyx_base = *__pyx_vtabptr_7aiohttp_12_http_parser_HttpParser; - __pyx_vtable_7aiohttp_12_http_parser_HttpResponseParser.__pyx_base._on_status_complete = (PyObject *(*)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *))__pyx_f_7aiohttp_12_http_parser_18HttpResponseParser__on_status_complete; - __pyx_type_7aiohttp_12_http_parser_HttpResponseParser.tp_base = __pyx_ptype_7aiohttp_12_http_parser_HttpParser; - if (PyType_Ready(&__pyx_type_7aiohttp_12_http_parser_HttpResponseParser) < 0) __PYX_ERR(0, 591, __pyx_L1_error) - #if PY_VERSION_HEX < 0x030800B1 - __pyx_type_7aiohttp_12_http_parser_HttpResponseParser.tp_print = 0; - #endif - if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type_7aiohttp_12_http_parser_HttpResponseParser.tp_dictoffset && __pyx_type_7aiohttp_12_http_parser_HttpResponseParser.tp_getattro == PyObject_GenericGetAttr)) { - __pyx_type_7aiohttp_12_http_parser_HttpResponseParser.tp_getattro = __Pyx_PyObject_GenericGetAttr; - } - if (__Pyx_SetVtable(__pyx_type_7aiohttp_12_http_parser_HttpResponseParser.tp_dict, __pyx_vtabptr_7aiohttp_12_http_parser_HttpResponseParser) < 0) __PYX_ERR(0, 591, __pyx_L1_error) - if (PyObject_SetAttr(__pyx_m, __pyx_n_s_HttpResponseParser, (PyObject *)&__pyx_type_7aiohttp_12_http_parser_HttpResponseParser) < 0) __PYX_ERR(0, 591, __pyx_L1_error) - if (__Pyx_setup_reduce((PyObject*)&__pyx_type_7aiohttp_12_http_parser_HttpResponseParser) < 0) __PYX_ERR(0, 591, __pyx_L1_error) - __pyx_ptype_7aiohttp_12_http_parser_HttpResponseParser = &__pyx_type_7aiohttp_12_http_parser_HttpResponseParser; - if (PyType_Ready(&__pyx_type_7aiohttp_12_http_parser___pyx_scope_struct____repr__) < 0) __PYX_ERR(0, 135, __pyx_L1_error) - #if PY_VERSION_HEX < 0x030800B1 - __pyx_type_7aiohttp_12_http_parser___pyx_scope_struct____repr__.tp_print = 0; - #endif - if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type_7aiohttp_12_http_parser___pyx_scope_struct____repr__.tp_dictoffset && __pyx_type_7aiohttp_12_http_parser___pyx_scope_struct____repr__.tp_getattro == PyObject_GenericGetAttr)) { - __pyx_type_7aiohttp_12_http_parser___pyx_scope_struct____repr__.tp_getattro = __Pyx_PyObject_GenericGetAttrNoDict; - } - __pyx_ptype_7aiohttp_12_http_parser___pyx_scope_struct____repr__ = &__pyx_type_7aiohttp_12_http_parser___pyx_scope_struct____repr__; - if (PyType_Ready(&__pyx_type_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr) < 0) __PYX_ERR(0, 147, __pyx_L1_error) - #if PY_VERSION_HEX < 0x030800B1 - __pyx_type_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr.tp_print = 0; - #endif - if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr.tp_dictoffset && __pyx_type_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr.tp_getattro == PyObject_GenericGetAttr)) { - __pyx_type_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr.tp_getattro = __Pyx_PyObject_GenericGetAttrNoDict; - } - __pyx_ptype_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr = &__pyx_type_7aiohttp_12_http_parser___pyx_scope_struct_1_genexpr; - if (PyType_Ready(&__pyx_type_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__) < 0) __PYX_ERR(0, 233, __pyx_L1_error) - #if PY_VERSION_HEX < 0x030800B1 - __pyx_type_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__.tp_print = 0; - #endif - if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__.tp_dictoffset && __pyx_type_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__.tp_getattro == PyObject_GenericGetAttr)) { - __pyx_type_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__.tp_getattro = __Pyx_PyObject_GenericGetAttrNoDict; - } - __pyx_ptype_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__ = &__pyx_type_7aiohttp_12_http_parser___pyx_scope_struct_2___repr__; - if (PyType_Ready(&__pyx_type_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr) < 0) __PYX_ERR(0, 244, __pyx_L1_error) - #if PY_VERSION_HEX < 0x030800B1 - __pyx_type_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr.tp_print = 0; - #endif - if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr.tp_dictoffset && __pyx_type_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr.tp_getattro == PyObject_GenericGetAttr)) { - __pyx_type_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr.tp_getattro = __Pyx_PyObject_GenericGetAttrNoDict; - } - __pyx_ptype_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr = &__pyx_type_7aiohttp_12_http_parser___pyx_scope_struct_3_genexpr; - __Pyx_RefNannyFinishContext(); - return 0; - __pyx_L1_error:; - __Pyx_RefNannyFinishContext(); - return -1; -} - -static int __Pyx_modinit_type_import_code(void) { - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__Pyx_modinit_type_import_code", 0); - /*--- Type import code ---*/ - __pyx_t_1 = PyImport_ImportModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 9, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_ptype_7cpython_4type_type = __Pyx_ImportType(__pyx_t_1, __Pyx_BUILTIN_MODULE_NAME, "type", - #if defined(PYPY_VERSION_NUM) && PYPY_VERSION_NUM < 0x050B0000 - sizeof(PyTypeObject), - #else - sizeof(PyHeapTypeObject), - #endif - __Pyx_ImportType_CheckSize_Warn); - if (!__pyx_ptype_7cpython_4type_type) __PYX_ERR(2, 9, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyImport_ImportModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_t_1)) __PYX_ERR(3, 8, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_ptype_7cpython_4bool_bool = __Pyx_ImportType(__pyx_t_1, __Pyx_BUILTIN_MODULE_NAME, "bool", sizeof(PyBoolObject), __Pyx_ImportType_CheckSize_Warn); - if (!__pyx_ptype_7cpython_4bool_bool) __PYX_ERR(3, 8, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyImport_ImportModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_t_1)) __PYX_ERR(4, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_ptype_7cpython_7complex_complex = __Pyx_ImportType(__pyx_t_1, __Pyx_BUILTIN_MODULE_NAME, "complex", sizeof(PyComplexObject), __Pyx_ImportType_CheckSize_Warn); - if (!__pyx_ptype_7cpython_7complex_complex) __PYX_ERR(4, 15, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_RefNannyFinishContext(); - return 0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_RefNannyFinishContext(); - return -1; -} - -static int __Pyx_modinit_variable_import_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_variable_import_code", 0); - /*--- Variable import code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_function_import_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_function_import_code", 0); - /*--- Function import code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - - -#ifndef CYTHON_NO_PYINIT_EXPORT -#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC -#elif PY_MAJOR_VERSION < 3 -#ifdef __cplusplus -#define __Pyx_PyMODINIT_FUNC extern "C" void -#else -#define __Pyx_PyMODINIT_FUNC void -#endif -#else -#ifdef __cplusplus -#define __Pyx_PyMODINIT_FUNC extern "C" PyObject * -#else -#define __Pyx_PyMODINIT_FUNC PyObject * -#endif -#endif - - -#if PY_MAJOR_VERSION < 3 -__Pyx_PyMODINIT_FUNC init_http_parser(void) CYTHON_SMALL_CODE; /*proto*/ -__Pyx_PyMODINIT_FUNC init_http_parser(void) -#else -__Pyx_PyMODINIT_FUNC PyInit__http_parser(void) CYTHON_SMALL_CODE; /*proto*/ -__Pyx_PyMODINIT_FUNC PyInit__http_parser(void) -#if CYTHON_PEP489_MULTI_PHASE_INIT -{ - return PyModuleDef_Init(&__pyx_moduledef); -} -static CYTHON_SMALL_CODE int __Pyx_check_single_interpreter(void) { - #if PY_VERSION_HEX >= 0x030700A1 - static PY_INT64_T main_interpreter_id = -1; - PY_INT64_T current_id = PyInterpreterState_GetID(PyThreadState_Get()->interp); - if (main_interpreter_id == -1) { - main_interpreter_id = current_id; - return (unlikely(current_id == -1)) ? -1 : 0; - } else if (unlikely(main_interpreter_id != current_id)) - #else - static PyInterpreterState *main_interpreter = NULL; - PyInterpreterState *current_interpreter = PyThreadState_Get()->interp; - if (!main_interpreter) { - main_interpreter = current_interpreter; - } else if (unlikely(main_interpreter != current_interpreter)) - #endif - { - PyErr_SetString( - PyExc_ImportError, - "Interpreter change detected - this module can only be loaded into one interpreter per process."); - return -1; - } - return 0; -} -static CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *moddict, const char* from_name, const char* to_name, int allow_none) { - PyObject *value = PyObject_GetAttrString(spec, from_name); - int result = 0; - if (likely(value)) { - if (allow_none || value != Py_None) { - result = PyDict_SetItemString(moddict, to_name, value); - } - Py_DECREF(value); - } else if (PyErr_ExceptionMatches(PyExc_AttributeError)) { - PyErr_Clear(); - } else { - result = -1; - } - return result; -} -static CYTHON_SMALL_CODE PyObject* __pyx_pymod_create(PyObject *spec, CYTHON_UNUSED PyModuleDef *def) { - PyObject *module = NULL, *moddict, *modname; - if (__Pyx_check_single_interpreter()) - return NULL; - if (__pyx_m) - return __Pyx_NewRef(__pyx_m); - modname = PyObject_GetAttrString(spec, "name"); - if (unlikely(!modname)) goto bad; - module = PyModule_NewObject(modname); - Py_DECREF(modname); - if (unlikely(!module)) goto bad; - moddict = PyModule_GetDict(module); - if (unlikely(!moddict)) goto bad; - if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "loader", "__loader__", 1) < 0)) goto bad; - if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "origin", "__file__", 1) < 0)) goto bad; - if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "parent", "__package__", 1) < 0)) goto bad; - if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "submodule_search_locations", "__path__", 0) < 0)) goto bad; - return module; -bad: - Py_XDECREF(module); - return NULL; -} - - -static CYTHON_SMALL_CODE int __pyx_pymod_exec__http_parser(PyObject *__pyx_pyinit_module) -#endif -#endif -{ - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - PyObject *__pyx_t_5 = NULL; - PyObject *__pyx_t_6 = NULL; - PyObject *__pyx_t_7 = NULL; - PyObject *__pyx_t_8 = NULL; - PyObject *__pyx_t_9 = NULL; - PyObject *__pyx_t_10 = NULL; - PyObject *__pyx_t_11 = NULL; - PyObject *__pyx_t_12 = NULL; - PyObject *__pyx_t_13 = NULL; - PyObject *__pyx_t_14 = NULL; - PyObject *__pyx_t_15 = NULL; - PyObject *__pyx_t_16 = NULL; - PyObject *__pyx_t_17 = NULL; - PyObject *__pyx_t_18 = NULL; - PyObject *__pyx_t_19 = NULL; - PyObject *__pyx_t_20 = NULL; - PyObject *__pyx_t_21 = NULL; - PyObject *__pyx_t_22 = NULL; - PyObject *__pyx_t_23 = NULL; - PyObject *__pyx_t_24 = NULL; - PyObject *__pyx_t_25 = NULL; - PyObject *__pyx_t_26 = NULL; - PyObject *__pyx_t_27 = NULL; - PyObject *__pyx_t_28 = NULL; - PyObject *__pyx_t_29 = NULL; - PyObject *__pyx_t_30 = NULL; - PyObject *__pyx_t_31 = NULL; - PyObject *__pyx_t_32 = NULL; - PyObject *__pyx_t_33 = NULL; - PyObject *__pyx_t_34 = NULL; - PyObject *__pyx_t_35 = NULL; - PyObject *__pyx_t_36 = NULL; - PyObject *__pyx_t_37 = NULL; - PyObject *__pyx_t_38 = NULL; - PyObject *__pyx_t_39 = NULL; - PyObject *__pyx_t_40 = NULL; - PyObject *__pyx_t_41 = NULL; - PyObject *__pyx_t_42 = NULL; - PyObject *__pyx_t_43 = NULL; - PyObject *__pyx_t_44 = NULL; - PyObject *__pyx_t_45 = NULL; - PyObject *__pyx_t_46 = NULL; - PyObject *__pyx_t_47 = NULL; - PyObject *__pyx_t_48 = NULL; - PyObject *__pyx_t_49 = NULL; - PyObject *__pyx_t_50 = NULL; - PyObject *__pyx_t_51 = NULL; - PyObject *__pyx_t_52 = NULL; - PyObject *__pyx_t_53 = NULL; - PyObject *__pyx_t_54 = NULL; - PyObject *__pyx_t_55 = NULL; - PyObject *__pyx_t_56 = NULL; - PyObject *__pyx_t_57 = NULL; - PyObject *__pyx_t_58 = NULL; - PyObject *__pyx_t_59 = NULL; - PyObject *__pyx_t_60 = NULL; - PyObject *__pyx_t_61 = NULL; - PyObject *__pyx_t_62 = NULL; - PyObject *__pyx_t_63 = NULL; - PyObject *__pyx_t_64 = NULL; - PyObject *__pyx_t_65 = NULL; - PyObject *__pyx_t_66 = NULL; - PyObject *__pyx_t_67 = NULL; - PyObject *__pyx_t_68 = NULL; - PyObject *__pyx_t_69 = NULL; - PyObject *__pyx_t_70 = NULL; - PyObject *__pyx_t_71 = NULL; - PyObject *__pyx_t_72 = NULL; - PyObject *__pyx_t_73 = NULL; - PyObject *__pyx_t_74 = NULL; - PyObject *__pyx_t_75 = NULL; - PyObject *__pyx_t_76 = NULL; - PyObject *__pyx_t_77 = NULL; - PyObject *__pyx_t_78 = NULL; - long __pyx_t_79; - enum http_method __pyx_t_80; - char const *__pyx_t_81; - int __pyx_t_82; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannyDeclarations - #if CYTHON_PEP489_MULTI_PHASE_INIT - if (__pyx_m) { - if (__pyx_m == __pyx_pyinit_module) return 0; - PyErr_SetString(PyExc_RuntimeError, "Module '_http_parser' has already been imported. Re-initialisation is not supported."); - return -1; - } - #elif PY_MAJOR_VERSION >= 3 - if (__pyx_m) return __Pyx_NewRef(__pyx_m); - #endif - #if CYTHON_REFNANNY -__Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny"); -if (!__Pyx_RefNanny) { - PyErr_Clear(); - __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny"); - if (!__Pyx_RefNanny) - Py_FatalError("failed to import 'refnanny' module"); -} -#endif - __Pyx_RefNannySetupContext("__Pyx_PyMODINIT_FUNC PyInit__http_parser(void)", 0); - if (__Pyx_check_binary_version() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #ifdef __Pxy_PyFrame_Initialize_Offsets - __Pxy_PyFrame_Initialize_Offsets(); - #endif - __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) __PYX_ERR(0, 1, __pyx_L1_error) - __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) __PYX_ERR(0, 1, __pyx_L1_error) - __pyx_empty_unicode = PyUnicode_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_unicode)) __PYX_ERR(0, 1, __pyx_L1_error) - #ifdef __Pyx_CyFunction_USED - if (__pyx_CyFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_FusedFunction_USED - if (__pyx_FusedFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_Coroutine_USED - if (__pyx_Coroutine_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_Generator_USED - if (__pyx_Generator_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_AsyncGen_USED - if (__pyx_AsyncGen_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_StopAsyncIteration_USED - if (__pyx_StopAsyncIteration_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - /*--- Library function declarations ---*/ - /*--- Threads initialization code ---*/ - #if defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS - #ifdef WITH_THREAD /* Python build with threading support? */ - PyEval_InitThreads(); - #endif - #endif - /*--- Module creation code ---*/ - #if CYTHON_PEP489_MULTI_PHASE_INIT - __pyx_m = __pyx_pyinit_module; - Py_INCREF(__pyx_m); - #else - #if PY_MAJOR_VERSION < 3 - __pyx_m = Py_InitModule4("_http_parser", __pyx_methods, 0, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m); - #else - __pyx_m = PyModule_Create(&__pyx_moduledef); - #endif - if (unlikely(!__pyx_m)) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - __pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(0, 1, __pyx_L1_error) - Py_INCREF(__pyx_d); - __pyx_b = PyImport_AddModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(0, 1, __pyx_L1_error) - Py_INCREF(__pyx_b); - __pyx_cython_runtime = PyImport_AddModule((char *) "cython_runtime"); if (unlikely(!__pyx_cython_runtime)) __PYX_ERR(0, 1, __pyx_L1_error) - Py_INCREF(__pyx_cython_runtime); - if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(0, 1, __pyx_L1_error); - /*--- Initialize various global constants etc. ---*/ - if (__Pyx_InitGlobals() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #if PY_MAJOR_VERSION < 3 && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT) - if (__Pyx_init_sys_getdefaultencoding_params() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - if (__pyx_module_is_main_aiohttp___http_parser) { - if (PyObject_SetAttr(__pyx_m, __pyx_n_s_name, __pyx_n_s_main) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - } - #if PY_MAJOR_VERSION >= 3 - { - PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) __PYX_ERR(0, 1, __pyx_L1_error) - if (!PyDict_GetItemString(modules, "aiohttp._http_parser")) { - if (unlikely(PyDict_SetItemString(modules, "aiohttp._http_parser", __pyx_m) < 0)) __PYX_ERR(0, 1, __pyx_L1_error) - } - } - #endif - /*--- Builtin init code ---*/ - if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - /*--- Constants init code ---*/ - if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - /*--- Global type/function init code ---*/ - (void)__Pyx_modinit_global_init_code(); - (void)__Pyx_modinit_variable_export_code(); - (void)__Pyx_modinit_function_export_code(); - if (unlikely(__Pyx_modinit_type_init_code() < 0)) __PYX_ERR(0, 1, __pyx_L1_error) - if (unlikely(__Pyx_modinit_type_import_code() < 0)) __PYX_ERR(0, 1, __pyx_L1_error) - (void)__Pyx_modinit_variable_import_code(); - (void)__Pyx_modinit_function_import_code(); - /*--- Execution code ---*/ - #if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) - if (__Pyx_patch_abc() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - - /* "aiohttp/_http_parser.pyx":19 - * from libc.string cimport memcpy - * - * from multidict import CIMultiDict as _CIMultiDict, CIMultiDictProxy as _CIMultiDictProxy # <<<<<<<<<<<<<< - * from yarl import URL as _URL - * - */ - __pyx_t_1 = PyList_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 19, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF(__pyx_n_s_CIMultiDict); - __Pyx_GIVEREF(__pyx_n_s_CIMultiDict); - PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_CIMultiDict); - __Pyx_INCREF(__pyx_n_s_CIMultiDictProxy); - __Pyx_GIVEREF(__pyx_n_s_CIMultiDictProxy); - PyList_SET_ITEM(__pyx_t_1, 1, __pyx_n_s_CIMultiDictProxy); - __pyx_t_2 = __Pyx_Import(__pyx_n_s_multidict, __pyx_t_1, 0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 19, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_CIMultiDict); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 19, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_CIMultiDict_2, __pyx_t_1) < 0) __PYX_ERR(0, 19, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_CIMultiDictProxy); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 19, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_CIMultiDictProxy_2, __pyx_t_1) < 0) __PYX_ERR(0, 19, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "aiohttp/_http_parser.pyx":20 - * - * from multidict import CIMultiDict as _CIMultiDict, CIMultiDictProxy as _CIMultiDictProxy - * from yarl import URL as _URL # <<<<<<<<<<<<<< - * - * from aiohttp import hdrs - */ - __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 20, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(__pyx_n_s_URL); - __Pyx_GIVEREF(__pyx_n_s_URL); - PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_URL); - __pyx_t_1 = __Pyx_Import(__pyx_n_s_yarl, __pyx_t_2, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 20, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_URL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 20, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_URL_2, __pyx_t_2) < 0) __PYX_ERR(0, 20, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":22 - * from yarl import URL as _URL - * - * from aiohttp import hdrs # <<<<<<<<<<<<<< - * - * from .http_exceptions import ( - */ - __pyx_t_1 = PyList_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 22, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF(__pyx_n_s_hdrs); - __Pyx_GIVEREF(__pyx_n_s_hdrs); - PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_hdrs); - __pyx_t_2 = __Pyx_Import(__pyx_n_s_aiohttp, __pyx_t_1, 0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 22, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 22, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_hdrs, __pyx_t_1) < 0) __PYX_ERR(0, 22, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "aiohttp/_http_parser.pyx":25 - * - * from .http_exceptions import ( - * BadHttpMessage, # <<<<<<<<<<<<<< - * BadStatusLine, - * ContentLengthError, - */ - __pyx_t_2 = PyList_New(8); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 25, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(__pyx_n_s_BadHttpMessage); - __Pyx_GIVEREF(__pyx_n_s_BadHttpMessage); - PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_BadHttpMessage); - __Pyx_INCREF(__pyx_n_s_BadStatusLine); - __Pyx_GIVEREF(__pyx_n_s_BadStatusLine); - PyList_SET_ITEM(__pyx_t_2, 1, __pyx_n_s_BadStatusLine); - __Pyx_INCREF(__pyx_n_s_ContentLengthError); - __Pyx_GIVEREF(__pyx_n_s_ContentLengthError); - PyList_SET_ITEM(__pyx_t_2, 2, __pyx_n_s_ContentLengthError); - __Pyx_INCREF(__pyx_n_s_InvalidHeader); - __Pyx_GIVEREF(__pyx_n_s_InvalidHeader); - PyList_SET_ITEM(__pyx_t_2, 3, __pyx_n_s_InvalidHeader); - __Pyx_INCREF(__pyx_n_s_InvalidURLError); - __Pyx_GIVEREF(__pyx_n_s_InvalidURLError); - PyList_SET_ITEM(__pyx_t_2, 4, __pyx_n_s_InvalidURLError); - __Pyx_INCREF(__pyx_n_s_LineTooLong); - __Pyx_GIVEREF(__pyx_n_s_LineTooLong); - PyList_SET_ITEM(__pyx_t_2, 5, __pyx_n_s_LineTooLong); - __Pyx_INCREF(__pyx_n_s_PayloadEncodingError); - __Pyx_GIVEREF(__pyx_n_s_PayloadEncodingError); - PyList_SET_ITEM(__pyx_t_2, 6, __pyx_n_s_PayloadEncodingError); - __Pyx_INCREF(__pyx_n_s_TransferEncodingError); - __Pyx_GIVEREF(__pyx_n_s_TransferEncodingError); - PyList_SET_ITEM(__pyx_t_2, 7, __pyx_n_s_TransferEncodingError); - - /* "aiohttp/_http_parser.pyx":24 - * from aiohttp import hdrs - * - * from .http_exceptions import ( # <<<<<<<<<<<<<< - * BadHttpMessage, - * BadStatusLine, - */ - __pyx_t_1 = __Pyx_Import(__pyx_n_s_http_exceptions, __pyx_t_2, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 24, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_BadHttpMessage); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 24, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_BadHttpMessage, __pyx_t_2) < 0) __PYX_ERR(0, 25, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_BadStatusLine); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 24, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_BadStatusLine, __pyx_t_2) < 0) __PYX_ERR(0, 26, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_ContentLengthError); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 24, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_ContentLengthError, __pyx_t_2) < 0) __PYX_ERR(0, 27, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_InvalidHeader); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 24, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_InvalidHeader, __pyx_t_2) < 0) __PYX_ERR(0, 28, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_InvalidURLError); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 24, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_InvalidURLError, __pyx_t_2) < 0) __PYX_ERR(0, 29, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_LineTooLong); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 24, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_LineTooLong, __pyx_t_2) < 0) __PYX_ERR(0, 30, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_PayloadEncodingError); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 24, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_PayloadEncodingError, __pyx_t_2) < 0) __PYX_ERR(0, 31, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_TransferEncodingError); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 24, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_TransferEncodingError, __pyx_t_2) < 0) __PYX_ERR(0, 32, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":34 - * TransferEncodingError, - * ) - * from .http_parser import DeflateBuffer as _DeflateBuffer # <<<<<<<<<<<<<< - * from .http_writer import ( - * HttpVersion as _HttpVersion, - */ - __pyx_t_1 = PyList_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 34, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF(__pyx_n_s_DeflateBuffer); - __Pyx_GIVEREF(__pyx_n_s_DeflateBuffer); - PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_DeflateBuffer); - __pyx_t_2 = __Pyx_Import(__pyx_n_s_http_parser, __pyx_t_1, 1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 34, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_DeflateBuffer); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 34, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_DeflateBuffer_2, __pyx_t_1) < 0) __PYX_ERR(0, 34, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "aiohttp/_http_parser.pyx":36 - * from .http_parser import DeflateBuffer as _DeflateBuffer - * from .http_writer import ( - * HttpVersion as _HttpVersion, # <<<<<<<<<<<<<< - * HttpVersion10 as _HttpVersion10, - * HttpVersion11 as _HttpVersion11, - */ - __pyx_t_2 = PyList_New(3); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 36, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(__pyx_n_s_HttpVersion); - __Pyx_GIVEREF(__pyx_n_s_HttpVersion); - PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_HttpVersion); - __Pyx_INCREF(__pyx_n_s_HttpVersion10); - __Pyx_GIVEREF(__pyx_n_s_HttpVersion10); - PyList_SET_ITEM(__pyx_t_2, 1, __pyx_n_s_HttpVersion10); - __Pyx_INCREF(__pyx_n_s_HttpVersion11); - __Pyx_GIVEREF(__pyx_n_s_HttpVersion11); - PyList_SET_ITEM(__pyx_t_2, 2, __pyx_n_s_HttpVersion11); - - /* "aiohttp/_http_parser.pyx":35 - * ) - * from .http_parser import DeflateBuffer as _DeflateBuffer - * from .http_writer import ( # <<<<<<<<<<<<<< - * HttpVersion as _HttpVersion, - * HttpVersion10 as _HttpVersion10, - */ - __pyx_t_1 = __Pyx_Import(__pyx_n_s_http_writer, __pyx_t_2, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 35, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_HttpVersion); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 35, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_HttpVersion_2, __pyx_t_2) < 0) __PYX_ERR(0, 36, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_HttpVersion10); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 35, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_HttpVersion10_2, __pyx_t_2) < 0) __PYX_ERR(0, 37, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_HttpVersion11); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 35, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_HttpVersion11_2, __pyx_t_2) < 0) __PYX_ERR(0, 38, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":40 - * HttpVersion11 as _HttpVersion11, - * ) - * from .streams import EMPTY_PAYLOAD as _EMPTY_PAYLOAD, StreamReader as _StreamReader # <<<<<<<<<<<<<< - * - * cimport cython - */ - __pyx_t_1 = PyList_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 40, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF(__pyx_n_s_EMPTY_PAYLOAD); - __Pyx_GIVEREF(__pyx_n_s_EMPTY_PAYLOAD); - PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_EMPTY_PAYLOAD); - __Pyx_INCREF(__pyx_n_s_StreamReader); - __Pyx_GIVEREF(__pyx_n_s_StreamReader); - PyList_SET_ITEM(__pyx_t_1, 1, __pyx_n_s_StreamReader); - __pyx_t_2 = __Pyx_Import(__pyx_n_s_streams, __pyx_t_1, 1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 40, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_EMPTY_PAYLOAD); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 40, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_EMPTY_PAYLOAD_2, __pyx_t_1) < 0) __PYX_ERR(0, 40, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_StreamReader); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 40, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_StreamReader_2, __pyx_t_1) < 0) __PYX_ERR(0, 40, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "aiohttp/_headers.pxi":4 - * # Run ./tools/gen.py to update it after the origin changing. - * - * from . import hdrs # <<<<<<<<<<<<<< - * cdef tuple headers = ( - * hdrs.ACCEPT, - */ - __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(5, 4, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(__pyx_n_s_hdrs); - __Pyx_GIVEREF(__pyx_n_s_hdrs); - PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_hdrs); - __pyx_t_1 = __Pyx_Import(__pyx_n_s__4, __pyx_t_2, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 4, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_2)) __PYX_ERR(5, 4, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_hdrs, __pyx_t_2) < 0) __PYX_ERR(5, 4, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":6 - * from . import hdrs - * cdef tuple headers = ( - * hdrs.ACCEPT, # <<<<<<<<<<<<<< - * hdrs.ACCEPT_CHARSET, - * hdrs.ACCEPT_ENCODING, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 6, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_ACCEPT); if (unlikely(!__pyx_t_2)) __PYX_ERR(5, 6, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":7 - * cdef tuple headers = ( - * hdrs.ACCEPT, - * hdrs.ACCEPT_CHARSET, # <<<<<<<<<<<<<< - * hdrs.ACCEPT_ENCODING, - * hdrs.ACCEPT_LANGUAGE, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 7, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_ACCEPT_CHARSET); if (unlikely(!__pyx_t_3)) __PYX_ERR(5, 7, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":8 - * hdrs.ACCEPT, - * hdrs.ACCEPT_CHARSET, - * hdrs.ACCEPT_ENCODING, # <<<<<<<<<<<<<< - * hdrs.ACCEPT_LANGUAGE, - * hdrs.ACCEPT_RANGES, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 8, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_ACCEPT_ENCODING); if (unlikely(!__pyx_t_4)) __PYX_ERR(5, 8, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":9 - * hdrs.ACCEPT_CHARSET, - * hdrs.ACCEPT_ENCODING, - * hdrs.ACCEPT_LANGUAGE, # <<<<<<<<<<<<<< - * hdrs.ACCEPT_RANGES, - * hdrs.ACCESS_CONTROL_ALLOW_CREDENTIALS, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 9, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_ACCEPT_LANGUAGE); if (unlikely(!__pyx_t_5)) __PYX_ERR(5, 9, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_5); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":10 - * hdrs.ACCEPT_ENCODING, - * hdrs.ACCEPT_LANGUAGE, - * hdrs.ACCEPT_RANGES, # <<<<<<<<<<<<<< - * hdrs.ACCESS_CONTROL_ALLOW_CREDENTIALS, - * hdrs.ACCESS_CONTROL_ALLOW_HEADERS, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 10, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_ACCEPT_RANGES); if (unlikely(!__pyx_t_6)) __PYX_ERR(5, 10, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_6); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":11 - * hdrs.ACCEPT_LANGUAGE, - * hdrs.ACCEPT_RANGES, - * hdrs.ACCESS_CONTROL_ALLOW_CREDENTIALS, # <<<<<<<<<<<<<< - * hdrs.ACCESS_CONTROL_ALLOW_HEADERS, - * hdrs.ACCESS_CONTROL_ALLOW_METHODS, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 11, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_ACCESS_CONTROL_ALLOW_CREDENTIALS); if (unlikely(!__pyx_t_7)) __PYX_ERR(5, 11, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_7); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":12 - * hdrs.ACCEPT_RANGES, - * hdrs.ACCESS_CONTROL_ALLOW_CREDENTIALS, - * hdrs.ACCESS_CONTROL_ALLOW_HEADERS, # <<<<<<<<<<<<<< - * hdrs.ACCESS_CONTROL_ALLOW_METHODS, - * hdrs.ACCESS_CONTROL_ALLOW_ORIGIN, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 12, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_ACCESS_CONTROL_ALLOW_HEADERS); if (unlikely(!__pyx_t_8)) __PYX_ERR(5, 12, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_8); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":13 - * hdrs.ACCESS_CONTROL_ALLOW_CREDENTIALS, - * hdrs.ACCESS_CONTROL_ALLOW_HEADERS, - * hdrs.ACCESS_CONTROL_ALLOW_METHODS, # <<<<<<<<<<<<<< - * hdrs.ACCESS_CONTROL_ALLOW_ORIGIN, - * hdrs.ACCESS_CONTROL_EXPOSE_HEADERS, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_9 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_ACCESS_CONTROL_ALLOW_METHODS); if (unlikely(!__pyx_t_9)) __PYX_ERR(5, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_9); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":14 - * hdrs.ACCESS_CONTROL_ALLOW_HEADERS, - * hdrs.ACCESS_CONTROL_ALLOW_METHODS, - * hdrs.ACCESS_CONTROL_ALLOW_ORIGIN, # <<<<<<<<<<<<<< - * hdrs.ACCESS_CONTROL_EXPOSE_HEADERS, - * hdrs.ACCESS_CONTROL_MAX_AGE, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 14, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_10 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_ACCESS_CONTROL_ALLOW_ORIGIN); if (unlikely(!__pyx_t_10)) __PYX_ERR(5, 14, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_10); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":15 - * hdrs.ACCESS_CONTROL_ALLOW_METHODS, - * hdrs.ACCESS_CONTROL_ALLOW_ORIGIN, - * hdrs.ACCESS_CONTROL_EXPOSE_HEADERS, # <<<<<<<<<<<<<< - * hdrs.ACCESS_CONTROL_MAX_AGE, - * hdrs.ACCESS_CONTROL_REQUEST_HEADERS, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_11 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_ACCESS_CONTROL_EXPOSE_HEADERS); if (unlikely(!__pyx_t_11)) __PYX_ERR(5, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_11); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":16 - * hdrs.ACCESS_CONTROL_ALLOW_ORIGIN, - * hdrs.ACCESS_CONTROL_EXPOSE_HEADERS, - * hdrs.ACCESS_CONTROL_MAX_AGE, # <<<<<<<<<<<<<< - * hdrs.ACCESS_CONTROL_REQUEST_HEADERS, - * hdrs.ACCESS_CONTROL_REQUEST_METHOD, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 16, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_12 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_ACCESS_CONTROL_MAX_AGE); if (unlikely(!__pyx_t_12)) __PYX_ERR(5, 16, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_12); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":17 - * hdrs.ACCESS_CONTROL_EXPOSE_HEADERS, - * hdrs.ACCESS_CONTROL_MAX_AGE, - * hdrs.ACCESS_CONTROL_REQUEST_HEADERS, # <<<<<<<<<<<<<< - * hdrs.ACCESS_CONTROL_REQUEST_METHOD, - * hdrs.AGE, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 17, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_13 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_ACCESS_CONTROL_REQUEST_HEADERS); if (unlikely(!__pyx_t_13)) __PYX_ERR(5, 17, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_13); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":18 - * hdrs.ACCESS_CONTROL_MAX_AGE, - * hdrs.ACCESS_CONTROL_REQUEST_HEADERS, - * hdrs.ACCESS_CONTROL_REQUEST_METHOD, # <<<<<<<<<<<<<< - * hdrs.AGE, - * hdrs.ALLOW, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 18, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_14 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_ACCESS_CONTROL_REQUEST_METHOD); if (unlikely(!__pyx_t_14)) __PYX_ERR(5, 18, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_14); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":19 - * hdrs.ACCESS_CONTROL_REQUEST_HEADERS, - * hdrs.ACCESS_CONTROL_REQUEST_METHOD, - * hdrs.AGE, # <<<<<<<<<<<<<< - * hdrs.ALLOW, - * hdrs.AUTHORIZATION, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 19, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_15 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_AGE); if (unlikely(!__pyx_t_15)) __PYX_ERR(5, 19, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_15); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":20 - * hdrs.ACCESS_CONTROL_REQUEST_METHOD, - * hdrs.AGE, - * hdrs.ALLOW, # <<<<<<<<<<<<<< - * hdrs.AUTHORIZATION, - * hdrs.CACHE_CONTROL, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 20, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_16 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_ALLOW); if (unlikely(!__pyx_t_16)) __PYX_ERR(5, 20, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_16); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":21 - * hdrs.AGE, - * hdrs.ALLOW, - * hdrs.AUTHORIZATION, # <<<<<<<<<<<<<< - * hdrs.CACHE_CONTROL, - * hdrs.CONNECTION, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 21, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_17 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_AUTHORIZATION); if (unlikely(!__pyx_t_17)) __PYX_ERR(5, 21, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_17); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":22 - * hdrs.ALLOW, - * hdrs.AUTHORIZATION, - * hdrs.CACHE_CONTROL, # <<<<<<<<<<<<<< - * hdrs.CONNECTION, - * hdrs.CONTENT_DISPOSITION, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 22, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_18 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_CACHE_CONTROL); if (unlikely(!__pyx_t_18)) __PYX_ERR(5, 22, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_18); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":23 - * hdrs.AUTHORIZATION, - * hdrs.CACHE_CONTROL, - * hdrs.CONNECTION, # <<<<<<<<<<<<<< - * hdrs.CONTENT_DISPOSITION, - * hdrs.CONTENT_ENCODING, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 23, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_19 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_CONNECTION); if (unlikely(!__pyx_t_19)) __PYX_ERR(5, 23, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_19); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":24 - * hdrs.CACHE_CONTROL, - * hdrs.CONNECTION, - * hdrs.CONTENT_DISPOSITION, # <<<<<<<<<<<<<< - * hdrs.CONTENT_ENCODING, - * hdrs.CONTENT_LANGUAGE, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 24, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_20 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_CONTENT_DISPOSITION); if (unlikely(!__pyx_t_20)) __PYX_ERR(5, 24, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_20); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":25 - * hdrs.CONNECTION, - * hdrs.CONTENT_DISPOSITION, - * hdrs.CONTENT_ENCODING, # <<<<<<<<<<<<<< - * hdrs.CONTENT_LANGUAGE, - * hdrs.CONTENT_LENGTH, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 25, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_21 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_CONTENT_ENCODING); if (unlikely(!__pyx_t_21)) __PYX_ERR(5, 25, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_21); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":26 - * hdrs.CONTENT_DISPOSITION, - * hdrs.CONTENT_ENCODING, - * hdrs.CONTENT_LANGUAGE, # <<<<<<<<<<<<<< - * hdrs.CONTENT_LENGTH, - * hdrs.CONTENT_LOCATION, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 26, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_22 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_CONTENT_LANGUAGE); if (unlikely(!__pyx_t_22)) __PYX_ERR(5, 26, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_22); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":27 - * hdrs.CONTENT_ENCODING, - * hdrs.CONTENT_LANGUAGE, - * hdrs.CONTENT_LENGTH, # <<<<<<<<<<<<<< - * hdrs.CONTENT_LOCATION, - * hdrs.CONTENT_MD5, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 27, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_23 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_CONTENT_LENGTH); if (unlikely(!__pyx_t_23)) __PYX_ERR(5, 27, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_23); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":28 - * hdrs.CONTENT_LANGUAGE, - * hdrs.CONTENT_LENGTH, - * hdrs.CONTENT_LOCATION, # <<<<<<<<<<<<<< - * hdrs.CONTENT_MD5, - * hdrs.CONTENT_RANGE, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 28, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_24 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_CONTENT_LOCATION); if (unlikely(!__pyx_t_24)) __PYX_ERR(5, 28, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_24); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":29 - * hdrs.CONTENT_LENGTH, - * hdrs.CONTENT_LOCATION, - * hdrs.CONTENT_MD5, # <<<<<<<<<<<<<< - * hdrs.CONTENT_RANGE, - * hdrs.CONTENT_TRANSFER_ENCODING, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 29, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_25 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_CONTENT_MD5); if (unlikely(!__pyx_t_25)) __PYX_ERR(5, 29, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_25); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":30 - * hdrs.CONTENT_LOCATION, - * hdrs.CONTENT_MD5, - * hdrs.CONTENT_RANGE, # <<<<<<<<<<<<<< - * hdrs.CONTENT_TRANSFER_ENCODING, - * hdrs.CONTENT_TYPE, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 30, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_26 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_CONTENT_RANGE); if (unlikely(!__pyx_t_26)) __PYX_ERR(5, 30, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_26); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":31 - * hdrs.CONTENT_MD5, - * hdrs.CONTENT_RANGE, - * hdrs.CONTENT_TRANSFER_ENCODING, # <<<<<<<<<<<<<< - * hdrs.CONTENT_TYPE, - * hdrs.COOKIE, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 31, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_27 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_CONTENT_TRANSFER_ENCODING); if (unlikely(!__pyx_t_27)) __PYX_ERR(5, 31, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_27); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":32 - * hdrs.CONTENT_RANGE, - * hdrs.CONTENT_TRANSFER_ENCODING, - * hdrs.CONTENT_TYPE, # <<<<<<<<<<<<<< - * hdrs.COOKIE, - * hdrs.DATE, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 32, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_28 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_CONTENT_TYPE); if (unlikely(!__pyx_t_28)) __PYX_ERR(5, 32, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_28); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":33 - * hdrs.CONTENT_TRANSFER_ENCODING, - * hdrs.CONTENT_TYPE, - * hdrs.COOKIE, # <<<<<<<<<<<<<< - * hdrs.DATE, - * hdrs.DESTINATION, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 33, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_29 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_COOKIE); if (unlikely(!__pyx_t_29)) __PYX_ERR(5, 33, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_29); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":34 - * hdrs.CONTENT_TYPE, - * hdrs.COOKIE, - * hdrs.DATE, # <<<<<<<<<<<<<< - * hdrs.DESTINATION, - * hdrs.DIGEST, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 34, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_30 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_DATE); if (unlikely(!__pyx_t_30)) __PYX_ERR(5, 34, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_30); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":35 - * hdrs.COOKIE, - * hdrs.DATE, - * hdrs.DESTINATION, # <<<<<<<<<<<<<< - * hdrs.DIGEST, - * hdrs.ETAG, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 35, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_31 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_DESTINATION); if (unlikely(!__pyx_t_31)) __PYX_ERR(5, 35, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_31); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":36 - * hdrs.DATE, - * hdrs.DESTINATION, - * hdrs.DIGEST, # <<<<<<<<<<<<<< - * hdrs.ETAG, - * hdrs.EXPECT, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 36, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_32 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_DIGEST); if (unlikely(!__pyx_t_32)) __PYX_ERR(5, 36, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_32); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":37 - * hdrs.DESTINATION, - * hdrs.DIGEST, - * hdrs.ETAG, # <<<<<<<<<<<<<< - * hdrs.EXPECT, - * hdrs.EXPIRES, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 37, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_33 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_ETAG); if (unlikely(!__pyx_t_33)) __PYX_ERR(5, 37, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_33); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":38 - * hdrs.DIGEST, - * hdrs.ETAG, - * hdrs.EXPECT, # <<<<<<<<<<<<<< - * hdrs.EXPIRES, - * hdrs.FORWARDED, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 38, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_34 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_EXPECT); if (unlikely(!__pyx_t_34)) __PYX_ERR(5, 38, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_34); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":39 - * hdrs.ETAG, - * hdrs.EXPECT, - * hdrs.EXPIRES, # <<<<<<<<<<<<<< - * hdrs.FORWARDED, - * hdrs.FROM, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 39, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_35 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_EXPIRES); if (unlikely(!__pyx_t_35)) __PYX_ERR(5, 39, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_35); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":40 - * hdrs.EXPECT, - * hdrs.EXPIRES, - * hdrs.FORWARDED, # <<<<<<<<<<<<<< - * hdrs.FROM, - * hdrs.HOST, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 40, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_36 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_FORWARDED); if (unlikely(!__pyx_t_36)) __PYX_ERR(5, 40, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_36); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":41 - * hdrs.EXPIRES, - * hdrs.FORWARDED, - * hdrs.FROM, # <<<<<<<<<<<<<< - * hdrs.HOST, - * hdrs.IF_MATCH, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 41, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_37 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_FROM); if (unlikely(!__pyx_t_37)) __PYX_ERR(5, 41, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_37); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":42 - * hdrs.FORWARDED, - * hdrs.FROM, - * hdrs.HOST, # <<<<<<<<<<<<<< - * hdrs.IF_MATCH, - * hdrs.IF_MODIFIED_SINCE, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 42, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_38 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_HOST); if (unlikely(!__pyx_t_38)) __PYX_ERR(5, 42, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_38); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":43 - * hdrs.FROM, - * hdrs.HOST, - * hdrs.IF_MATCH, # <<<<<<<<<<<<<< - * hdrs.IF_MODIFIED_SINCE, - * hdrs.IF_NONE_MATCH, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 43, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_39 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_IF_MATCH); if (unlikely(!__pyx_t_39)) __PYX_ERR(5, 43, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_39); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":44 - * hdrs.HOST, - * hdrs.IF_MATCH, - * hdrs.IF_MODIFIED_SINCE, # <<<<<<<<<<<<<< - * hdrs.IF_NONE_MATCH, - * hdrs.IF_RANGE, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 44, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_40 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_IF_MODIFIED_SINCE); if (unlikely(!__pyx_t_40)) __PYX_ERR(5, 44, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_40); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":45 - * hdrs.IF_MATCH, - * hdrs.IF_MODIFIED_SINCE, - * hdrs.IF_NONE_MATCH, # <<<<<<<<<<<<<< - * hdrs.IF_RANGE, - * hdrs.IF_UNMODIFIED_SINCE, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 45, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_41 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_IF_NONE_MATCH); if (unlikely(!__pyx_t_41)) __PYX_ERR(5, 45, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_41); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":46 - * hdrs.IF_MODIFIED_SINCE, - * hdrs.IF_NONE_MATCH, - * hdrs.IF_RANGE, # <<<<<<<<<<<<<< - * hdrs.IF_UNMODIFIED_SINCE, - * hdrs.KEEP_ALIVE, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 46, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_42 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_IF_RANGE); if (unlikely(!__pyx_t_42)) __PYX_ERR(5, 46, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_42); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":47 - * hdrs.IF_NONE_MATCH, - * hdrs.IF_RANGE, - * hdrs.IF_UNMODIFIED_SINCE, # <<<<<<<<<<<<<< - * hdrs.KEEP_ALIVE, - * hdrs.LAST_EVENT_ID, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 47, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_43 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_IF_UNMODIFIED_SINCE); if (unlikely(!__pyx_t_43)) __PYX_ERR(5, 47, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_43); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":48 - * hdrs.IF_RANGE, - * hdrs.IF_UNMODIFIED_SINCE, - * hdrs.KEEP_ALIVE, # <<<<<<<<<<<<<< - * hdrs.LAST_EVENT_ID, - * hdrs.LAST_MODIFIED, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 48, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_44 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_KEEP_ALIVE); if (unlikely(!__pyx_t_44)) __PYX_ERR(5, 48, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_44); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":49 - * hdrs.IF_UNMODIFIED_SINCE, - * hdrs.KEEP_ALIVE, - * hdrs.LAST_EVENT_ID, # <<<<<<<<<<<<<< - * hdrs.LAST_MODIFIED, - * hdrs.LINK, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 49, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_45 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_LAST_EVENT_ID); if (unlikely(!__pyx_t_45)) __PYX_ERR(5, 49, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_45); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":50 - * hdrs.KEEP_ALIVE, - * hdrs.LAST_EVENT_ID, - * hdrs.LAST_MODIFIED, # <<<<<<<<<<<<<< - * hdrs.LINK, - * hdrs.LOCATION, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 50, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_46 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_LAST_MODIFIED); if (unlikely(!__pyx_t_46)) __PYX_ERR(5, 50, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_46); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":51 - * hdrs.LAST_EVENT_ID, - * hdrs.LAST_MODIFIED, - * hdrs.LINK, # <<<<<<<<<<<<<< - * hdrs.LOCATION, - * hdrs.MAX_FORWARDS, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 51, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_47 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_LINK); if (unlikely(!__pyx_t_47)) __PYX_ERR(5, 51, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_47); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":52 - * hdrs.LAST_MODIFIED, - * hdrs.LINK, - * hdrs.LOCATION, # <<<<<<<<<<<<<< - * hdrs.MAX_FORWARDS, - * hdrs.ORIGIN, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 52, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_48 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_LOCATION); if (unlikely(!__pyx_t_48)) __PYX_ERR(5, 52, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_48); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":53 - * hdrs.LINK, - * hdrs.LOCATION, - * hdrs.MAX_FORWARDS, # <<<<<<<<<<<<<< - * hdrs.ORIGIN, - * hdrs.PRAGMA, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 53, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_49 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_MAX_FORWARDS); if (unlikely(!__pyx_t_49)) __PYX_ERR(5, 53, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_49); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":54 - * hdrs.LOCATION, - * hdrs.MAX_FORWARDS, - * hdrs.ORIGIN, # <<<<<<<<<<<<<< - * hdrs.PRAGMA, - * hdrs.PROXY_AUTHENTICATE, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 54, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_50 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_ORIGIN); if (unlikely(!__pyx_t_50)) __PYX_ERR(5, 54, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_50); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":55 - * hdrs.MAX_FORWARDS, - * hdrs.ORIGIN, - * hdrs.PRAGMA, # <<<<<<<<<<<<<< - * hdrs.PROXY_AUTHENTICATE, - * hdrs.PROXY_AUTHORIZATION, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 55, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_51 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_PRAGMA); if (unlikely(!__pyx_t_51)) __PYX_ERR(5, 55, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_51); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":56 - * hdrs.ORIGIN, - * hdrs.PRAGMA, - * hdrs.PROXY_AUTHENTICATE, # <<<<<<<<<<<<<< - * hdrs.PROXY_AUTHORIZATION, - * hdrs.RANGE, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 56, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_52 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_PROXY_AUTHENTICATE); if (unlikely(!__pyx_t_52)) __PYX_ERR(5, 56, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_52); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":57 - * hdrs.PRAGMA, - * hdrs.PROXY_AUTHENTICATE, - * hdrs.PROXY_AUTHORIZATION, # <<<<<<<<<<<<<< - * hdrs.RANGE, - * hdrs.REFERER, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 57, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_53 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_PROXY_AUTHORIZATION); if (unlikely(!__pyx_t_53)) __PYX_ERR(5, 57, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_53); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":58 - * hdrs.PROXY_AUTHENTICATE, - * hdrs.PROXY_AUTHORIZATION, - * hdrs.RANGE, # <<<<<<<<<<<<<< - * hdrs.REFERER, - * hdrs.RETRY_AFTER, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 58, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_54 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_RANGE); if (unlikely(!__pyx_t_54)) __PYX_ERR(5, 58, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_54); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":59 - * hdrs.PROXY_AUTHORIZATION, - * hdrs.RANGE, - * hdrs.REFERER, # <<<<<<<<<<<<<< - * hdrs.RETRY_AFTER, - * hdrs.SEC_WEBSOCKET_ACCEPT, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 59, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_55 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_REFERER); if (unlikely(!__pyx_t_55)) __PYX_ERR(5, 59, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_55); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":60 - * hdrs.RANGE, - * hdrs.REFERER, - * hdrs.RETRY_AFTER, # <<<<<<<<<<<<<< - * hdrs.SEC_WEBSOCKET_ACCEPT, - * hdrs.SEC_WEBSOCKET_EXTENSIONS, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 60, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_56 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_RETRY_AFTER); if (unlikely(!__pyx_t_56)) __PYX_ERR(5, 60, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_56); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":61 - * hdrs.REFERER, - * hdrs.RETRY_AFTER, - * hdrs.SEC_WEBSOCKET_ACCEPT, # <<<<<<<<<<<<<< - * hdrs.SEC_WEBSOCKET_EXTENSIONS, - * hdrs.SEC_WEBSOCKET_KEY, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 61, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_57 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_SEC_WEBSOCKET_ACCEPT); if (unlikely(!__pyx_t_57)) __PYX_ERR(5, 61, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_57); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":62 - * hdrs.RETRY_AFTER, - * hdrs.SEC_WEBSOCKET_ACCEPT, - * hdrs.SEC_WEBSOCKET_EXTENSIONS, # <<<<<<<<<<<<<< - * hdrs.SEC_WEBSOCKET_KEY, - * hdrs.SEC_WEBSOCKET_KEY1, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 62, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_58 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_SEC_WEBSOCKET_EXTENSIONS); if (unlikely(!__pyx_t_58)) __PYX_ERR(5, 62, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_58); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":63 - * hdrs.SEC_WEBSOCKET_ACCEPT, - * hdrs.SEC_WEBSOCKET_EXTENSIONS, - * hdrs.SEC_WEBSOCKET_KEY, # <<<<<<<<<<<<<< - * hdrs.SEC_WEBSOCKET_KEY1, - * hdrs.SEC_WEBSOCKET_PROTOCOL, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 63, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_59 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_SEC_WEBSOCKET_KEY); if (unlikely(!__pyx_t_59)) __PYX_ERR(5, 63, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_59); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":64 - * hdrs.SEC_WEBSOCKET_EXTENSIONS, - * hdrs.SEC_WEBSOCKET_KEY, - * hdrs.SEC_WEBSOCKET_KEY1, # <<<<<<<<<<<<<< - * hdrs.SEC_WEBSOCKET_PROTOCOL, - * hdrs.SEC_WEBSOCKET_VERSION, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 64, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_60 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_SEC_WEBSOCKET_KEY1); if (unlikely(!__pyx_t_60)) __PYX_ERR(5, 64, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_60); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":65 - * hdrs.SEC_WEBSOCKET_KEY, - * hdrs.SEC_WEBSOCKET_KEY1, - * hdrs.SEC_WEBSOCKET_PROTOCOL, # <<<<<<<<<<<<<< - * hdrs.SEC_WEBSOCKET_VERSION, - * hdrs.SERVER, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 65, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_61 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_SEC_WEBSOCKET_PROTOCOL); if (unlikely(!__pyx_t_61)) __PYX_ERR(5, 65, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_61); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":66 - * hdrs.SEC_WEBSOCKET_KEY1, - * hdrs.SEC_WEBSOCKET_PROTOCOL, - * hdrs.SEC_WEBSOCKET_VERSION, # <<<<<<<<<<<<<< - * hdrs.SERVER, - * hdrs.SET_COOKIE, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 66, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_62 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_SEC_WEBSOCKET_VERSION); if (unlikely(!__pyx_t_62)) __PYX_ERR(5, 66, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_62); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":67 - * hdrs.SEC_WEBSOCKET_PROTOCOL, - * hdrs.SEC_WEBSOCKET_VERSION, - * hdrs.SERVER, # <<<<<<<<<<<<<< - * hdrs.SET_COOKIE, - * hdrs.TE, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 67, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_63 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_SERVER); if (unlikely(!__pyx_t_63)) __PYX_ERR(5, 67, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_63); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":68 - * hdrs.SEC_WEBSOCKET_VERSION, - * hdrs.SERVER, - * hdrs.SET_COOKIE, # <<<<<<<<<<<<<< - * hdrs.TE, - * hdrs.TRAILER, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 68, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_64 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_SET_COOKIE); if (unlikely(!__pyx_t_64)) __PYX_ERR(5, 68, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_64); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":69 - * hdrs.SERVER, - * hdrs.SET_COOKIE, - * hdrs.TE, # <<<<<<<<<<<<<< - * hdrs.TRAILER, - * hdrs.TRANSFER_ENCODING, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 69, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_65 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_TE); if (unlikely(!__pyx_t_65)) __PYX_ERR(5, 69, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_65); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":70 - * hdrs.SET_COOKIE, - * hdrs.TE, - * hdrs.TRAILER, # <<<<<<<<<<<<<< - * hdrs.TRANSFER_ENCODING, - * hdrs.URI, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 70, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_66 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_TRAILER); if (unlikely(!__pyx_t_66)) __PYX_ERR(5, 70, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_66); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":71 - * hdrs.TE, - * hdrs.TRAILER, - * hdrs.TRANSFER_ENCODING, # <<<<<<<<<<<<<< - * hdrs.URI, - * hdrs.UPGRADE, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 71, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_67 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_TRANSFER_ENCODING); if (unlikely(!__pyx_t_67)) __PYX_ERR(5, 71, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_67); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":72 - * hdrs.TRAILER, - * hdrs.TRANSFER_ENCODING, - * hdrs.URI, # <<<<<<<<<<<<<< - * hdrs.UPGRADE, - * hdrs.USER_AGENT, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 72, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_68 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_URI); if (unlikely(!__pyx_t_68)) __PYX_ERR(5, 72, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_68); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":73 - * hdrs.TRANSFER_ENCODING, - * hdrs.URI, - * hdrs.UPGRADE, # <<<<<<<<<<<<<< - * hdrs.USER_AGENT, - * hdrs.VARY, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 73, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_69 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_UPGRADE); if (unlikely(!__pyx_t_69)) __PYX_ERR(5, 73, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_69); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":74 - * hdrs.URI, - * hdrs.UPGRADE, - * hdrs.USER_AGENT, # <<<<<<<<<<<<<< - * hdrs.VARY, - * hdrs.VIA, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 74, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_70 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_USER_AGENT); if (unlikely(!__pyx_t_70)) __PYX_ERR(5, 74, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_70); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":75 - * hdrs.UPGRADE, - * hdrs.USER_AGENT, - * hdrs.VARY, # <<<<<<<<<<<<<< - * hdrs.VIA, - * hdrs.WWW_AUTHENTICATE, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 75, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_71 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_VARY); if (unlikely(!__pyx_t_71)) __PYX_ERR(5, 75, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_71); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":76 - * hdrs.USER_AGENT, - * hdrs.VARY, - * hdrs.VIA, # <<<<<<<<<<<<<< - * hdrs.WWW_AUTHENTICATE, - * hdrs.WANT_DIGEST, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 76, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_72 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_VIA); if (unlikely(!__pyx_t_72)) __PYX_ERR(5, 76, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_72); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":77 - * hdrs.VARY, - * hdrs.VIA, - * hdrs.WWW_AUTHENTICATE, # <<<<<<<<<<<<<< - * hdrs.WANT_DIGEST, - * hdrs.WARNING, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 77, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_73 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_WWW_AUTHENTICATE); if (unlikely(!__pyx_t_73)) __PYX_ERR(5, 77, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_73); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":78 - * hdrs.VIA, - * hdrs.WWW_AUTHENTICATE, - * hdrs.WANT_DIGEST, # <<<<<<<<<<<<<< - * hdrs.WARNING, - * hdrs.X_FORWARDED_FOR, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 78, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_74 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_WANT_DIGEST); if (unlikely(!__pyx_t_74)) __PYX_ERR(5, 78, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_74); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":79 - * hdrs.WWW_AUTHENTICATE, - * hdrs.WANT_DIGEST, - * hdrs.WARNING, # <<<<<<<<<<<<<< - * hdrs.X_FORWARDED_FOR, - * hdrs.X_FORWARDED_HOST, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 79, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_75 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_WARNING); if (unlikely(!__pyx_t_75)) __PYX_ERR(5, 79, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_75); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":80 - * hdrs.WANT_DIGEST, - * hdrs.WARNING, - * hdrs.X_FORWARDED_FOR, # <<<<<<<<<<<<<< - * hdrs.X_FORWARDED_HOST, - * hdrs.X_FORWARDED_PROTO, - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 80, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_76 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_X_FORWARDED_FOR); if (unlikely(!__pyx_t_76)) __PYX_ERR(5, 80, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_76); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":81 - * hdrs.WARNING, - * hdrs.X_FORWARDED_FOR, - * hdrs.X_FORWARDED_HOST, # <<<<<<<<<<<<<< - * hdrs.X_FORWARDED_PROTO, - * ) - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 81, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_77 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_X_FORWARDED_HOST); if (unlikely(!__pyx_t_77)) __PYX_ERR(5, 81, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_77); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":82 - * hdrs.X_FORWARDED_FOR, - * hdrs.X_FORWARDED_HOST, - * hdrs.X_FORWARDED_PROTO, # <<<<<<<<<<<<<< - * ) - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 82, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_78 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_X_FORWARDED_PROTO); if (unlikely(!__pyx_t_78)) __PYX_ERR(5, 82, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_78); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_headers.pxi":6 - * from . import hdrs - * cdef tuple headers = ( - * hdrs.ACCEPT, # <<<<<<<<<<<<<< - * hdrs.ACCEPT_CHARSET, - * hdrs.ACCEPT_ENCODING, - */ - __pyx_t_1 = PyTuple_New(77); if (unlikely(!__pyx_t_1)) __PYX_ERR(5, 6, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_GIVEREF(__pyx_t_2); - PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_t_2); - __Pyx_GIVEREF(__pyx_t_3); - PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_t_3); - __Pyx_GIVEREF(__pyx_t_4); - PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_t_4); - __Pyx_GIVEREF(__pyx_t_5); - PyTuple_SET_ITEM(__pyx_t_1, 3, __pyx_t_5); - __Pyx_GIVEREF(__pyx_t_6); - PyTuple_SET_ITEM(__pyx_t_1, 4, __pyx_t_6); - __Pyx_GIVEREF(__pyx_t_7); - PyTuple_SET_ITEM(__pyx_t_1, 5, __pyx_t_7); - __Pyx_GIVEREF(__pyx_t_8); - PyTuple_SET_ITEM(__pyx_t_1, 6, __pyx_t_8); - __Pyx_GIVEREF(__pyx_t_9); - PyTuple_SET_ITEM(__pyx_t_1, 7, __pyx_t_9); - __Pyx_GIVEREF(__pyx_t_10); - PyTuple_SET_ITEM(__pyx_t_1, 8, __pyx_t_10); - __Pyx_GIVEREF(__pyx_t_11); - PyTuple_SET_ITEM(__pyx_t_1, 9, __pyx_t_11); - __Pyx_GIVEREF(__pyx_t_12); - PyTuple_SET_ITEM(__pyx_t_1, 10, __pyx_t_12); - __Pyx_GIVEREF(__pyx_t_13); - PyTuple_SET_ITEM(__pyx_t_1, 11, __pyx_t_13); - __Pyx_GIVEREF(__pyx_t_14); - PyTuple_SET_ITEM(__pyx_t_1, 12, __pyx_t_14); - __Pyx_GIVEREF(__pyx_t_15); - PyTuple_SET_ITEM(__pyx_t_1, 13, __pyx_t_15); - __Pyx_GIVEREF(__pyx_t_16); - PyTuple_SET_ITEM(__pyx_t_1, 14, __pyx_t_16); - __Pyx_GIVEREF(__pyx_t_17); - PyTuple_SET_ITEM(__pyx_t_1, 15, __pyx_t_17); - __Pyx_GIVEREF(__pyx_t_18); - PyTuple_SET_ITEM(__pyx_t_1, 16, __pyx_t_18); - __Pyx_GIVEREF(__pyx_t_19); - PyTuple_SET_ITEM(__pyx_t_1, 17, __pyx_t_19); - __Pyx_GIVEREF(__pyx_t_20); - PyTuple_SET_ITEM(__pyx_t_1, 18, __pyx_t_20); - __Pyx_GIVEREF(__pyx_t_21); - PyTuple_SET_ITEM(__pyx_t_1, 19, __pyx_t_21); - __Pyx_GIVEREF(__pyx_t_22); - PyTuple_SET_ITEM(__pyx_t_1, 20, __pyx_t_22); - __Pyx_GIVEREF(__pyx_t_23); - PyTuple_SET_ITEM(__pyx_t_1, 21, __pyx_t_23); - __Pyx_GIVEREF(__pyx_t_24); - PyTuple_SET_ITEM(__pyx_t_1, 22, __pyx_t_24); - __Pyx_GIVEREF(__pyx_t_25); - PyTuple_SET_ITEM(__pyx_t_1, 23, __pyx_t_25); - __Pyx_GIVEREF(__pyx_t_26); - PyTuple_SET_ITEM(__pyx_t_1, 24, __pyx_t_26); - __Pyx_GIVEREF(__pyx_t_27); - PyTuple_SET_ITEM(__pyx_t_1, 25, __pyx_t_27); - __Pyx_GIVEREF(__pyx_t_28); - PyTuple_SET_ITEM(__pyx_t_1, 26, __pyx_t_28); - __Pyx_GIVEREF(__pyx_t_29); - PyTuple_SET_ITEM(__pyx_t_1, 27, __pyx_t_29); - __Pyx_GIVEREF(__pyx_t_30); - PyTuple_SET_ITEM(__pyx_t_1, 28, __pyx_t_30); - __Pyx_GIVEREF(__pyx_t_31); - PyTuple_SET_ITEM(__pyx_t_1, 29, __pyx_t_31); - __Pyx_GIVEREF(__pyx_t_32); - PyTuple_SET_ITEM(__pyx_t_1, 30, __pyx_t_32); - __Pyx_GIVEREF(__pyx_t_33); - PyTuple_SET_ITEM(__pyx_t_1, 31, __pyx_t_33); - __Pyx_GIVEREF(__pyx_t_34); - PyTuple_SET_ITEM(__pyx_t_1, 32, __pyx_t_34); - __Pyx_GIVEREF(__pyx_t_35); - PyTuple_SET_ITEM(__pyx_t_1, 33, __pyx_t_35); - __Pyx_GIVEREF(__pyx_t_36); - PyTuple_SET_ITEM(__pyx_t_1, 34, __pyx_t_36); - __Pyx_GIVEREF(__pyx_t_37); - PyTuple_SET_ITEM(__pyx_t_1, 35, __pyx_t_37); - __Pyx_GIVEREF(__pyx_t_38); - PyTuple_SET_ITEM(__pyx_t_1, 36, __pyx_t_38); - __Pyx_GIVEREF(__pyx_t_39); - PyTuple_SET_ITEM(__pyx_t_1, 37, __pyx_t_39); - __Pyx_GIVEREF(__pyx_t_40); - PyTuple_SET_ITEM(__pyx_t_1, 38, __pyx_t_40); - __Pyx_GIVEREF(__pyx_t_41); - PyTuple_SET_ITEM(__pyx_t_1, 39, __pyx_t_41); - __Pyx_GIVEREF(__pyx_t_42); - PyTuple_SET_ITEM(__pyx_t_1, 40, __pyx_t_42); - __Pyx_GIVEREF(__pyx_t_43); - PyTuple_SET_ITEM(__pyx_t_1, 41, __pyx_t_43); - __Pyx_GIVEREF(__pyx_t_44); - PyTuple_SET_ITEM(__pyx_t_1, 42, __pyx_t_44); - __Pyx_GIVEREF(__pyx_t_45); - PyTuple_SET_ITEM(__pyx_t_1, 43, __pyx_t_45); - __Pyx_GIVEREF(__pyx_t_46); - PyTuple_SET_ITEM(__pyx_t_1, 44, __pyx_t_46); - __Pyx_GIVEREF(__pyx_t_47); - PyTuple_SET_ITEM(__pyx_t_1, 45, __pyx_t_47); - __Pyx_GIVEREF(__pyx_t_48); - PyTuple_SET_ITEM(__pyx_t_1, 46, __pyx_t_48); - __Pyx_GIVEREF(__pyx_t_49); - PyTuple_SET_ITEM(__pyx_t_1, 47, __pyx_t_49); - __Pyx_GIVEREF(__pyx_t_50); - PyTuple_SET_ITEM(__pyx_t_1, 48, __pyx_t_50); - __Pyx_GIVEREF(__pyx_t_51); - PyTuple_SET_ITEM(__pyx_t_1, 49, __pyx_t_51); - __Pyx_GIVEREF(__pyx_t_52); - PyTuple_SET_ITEM(__pyx_t_1, 50, __pyx_t_52); - __Pyx_GIVEREF(__pyx_t_53); - PyTuple_SET_ITEM(__pyx_t_1, 51, __pyx_t_53); - __Pyx_GIVEREF(__pyx_t_54); - PyTuple_SET_ITEM(__pyx_t_1, 52, __pyx_t_54); - __Pyx_GIVEREF(__pyx_t_55); - PyTuple_SET_ITEM(__pyx_t_1, 53, __pyx_t_55); - __Pyx_GIVEREF(__pyx_t_56); - PyTuple_SET_ITEM(__pyx_t_1, 54, __pyx_t_56); - __Pyx_GIVEREF(__pyx_t_57); - PyTuple_SET_ITEM(__pyx_t_1, 55, __pyx_t_57); - __Pyx_GIVEREF(__pyx_t_58); - PyTuple_SET_ITEM(__pyx_t_1, 56, __pyx_t_58); - __Pyx_GIVEREF(__pyx_t_59); - PyTuple_SET_ITEM(__pyx_t_1, 57, __pyx_t_59); - __Pyx_GIVEREF(__pyx_t_60); - PyTuple_SET_ITEM(__pyx_t_1, 58, __pyx_t_60); - __Pyx_GIVEREF(__pyx_t_61); - PyTuple_SET_ITEM(__pyx_t_1, 59, __pyx_t_61); - __Pyx_GIVEREF(__pyx_t_62); - PyTuple_SET_ITEM(__pyx_t_1, 60, __pyx_t_62); - __Pyx_GIVEREF(__pyx_t_63); - PyTuple_SET_ITEM(__pyx_t_1, 61, __pyx_t_63); - __Pyx_GIVEREF(__pyx_t_64); - PyTuple_SET_ITEM(__pyx_t_1, 62, __pyx_t_64); - __Pyx_GIVEREF(__pyx_t_65); - PyTuple_SET_ITEM(__pyx_t_1, 63, __pyx_t_65); - __Pyx_GIVEREF(__pyx_t_66); - PyTuple_SET_ITEM(__pyx_t_1, 64, __pyx_t_66); - __Pyx_GIVEREF(__pyx_t_67); - PyTuple_SET_ITEM(__pyx_t_1, 65, __pyx_t_67); - __Pyx_GIVEREF(__pyx_t_68); - PyTuple_SET_ITEM(__pyx_t_1, 66, __pyx_t_68); - __Pyx_GIVEREF(__pyx_t_69); - PyTuple_SET_ITEM(__pyx_t_1, 67, __pyx_t_69); - __Pyx_GIVEREF(__pyx_t_70); - PyTuple_SET_ITEM(__pyx_t_1, 68, __pyx_t_70); - __Pyx_GIVEREF(__pyx_t_71); - PyTuple_SET_ITEM(__pyx_t_1, 69, __pyx_t_71); - __Pyx_GIVEREF(__pyx_t_72); - PyTuple_SET_ITEM(__pyx_t_1, 70, __pyx_t_72); - __Pyx_GIVEREF(__pyx_t_73); - PyTuple_SET_ITEM(__pyx_t_1, 71, __pyx_t_73); - __Pyx_GIVEREF(__pyx_t_74); - PyTuple_SET_ITEM(__pyx_t_1, 72, __pyx_t_74); - __Pyx_GIVEREF(__pyx_t_75); - PyTuple_SET_ITEM(__pyx_t_1, 73, __pyx_t_75); - __Pyx_GIVEREF(__pyx_t_76); - PyTuple_SET_ITEM(__pyx_t_1, 74, __pyx_t_76); - __Pyx_GIVEREF(__pyx_t_77); - PyTuple_SET_ITEM(__pyx_t_1, 75, __pyx_t_77); - __Pyx_GIVEREF(__pyx_t_78); - PyTuple_SET_ITEM(__pyx_t_1, 76, __pyx_t_78); - __pyx_t_2 = 0; - __pyx_t_3 = 0; - __pyx_t_4 = 0; - __pyx_t_5 = 0; - __pyx_t_6 = 0; - __pyx_t_7 = 0; - __pyx_t_8 = 0; - __pyx_t_9 = 0; - __pyx_t_10 = 0; - __pyx_t_11 = 0; - __pyx_t_12 = 0; - __pyx_t_13 = 0; - __pyx_t_14 = 0; - __pyx_t_15 = 0; - __pyx_t_16 = 0; - __pyx_t_17 = 0; - __pyx_t_18 = 0; - __pyx_t_19 = 0; - __pyx_t_20 = 0; - __pyx_t_21 = 0; - __pyx_t_22 = 0; - __pyx_t_23 = 0; - __pyx_t_24 = 0; - __pyx_t_25 = 0; - __pyx_t_26 = 0; - __pyx_t_27 = 0; - __pyx_t_28 = 0; - __pyx_t_29 = 0; - __pyx_t_30 = 0; - __pyx_t_31 = 0; - __pyx_t_32 = 0; - __pyx_t_33 = 0; - __pyx_t_34 = 0; - __pyx_t_35 = 0; - __pyx_t_36 = 0; - __pyx_t_37 = 0; - __pyx_t_38 = 0; - __pyx_t_39 = 0; - __pyx_t_40 = 0; - __pyx_t_41 = 0; - __pyx_t_42 = 0; - __pyx_t_43 = 0; - __pyx_t_44 = 0; - __pyx_t_45 = 0; - __pyx_t_46 = 0; - __pyx_t_47 = 0; - __pyx_t_48 = 0; - __pyx_t_49 = 0; - __pyx_t_50 = 0; - __pyx_t_51 = 0; - __pyx_t_52 = 0; - __pyx_t_53 = 0; - __pyx_t_54 = 0; - __pyx_t_55 = 0; - __pyx_t_56 = 0; - __pyx_t_57 = 0; - __pyx_t_58 = 0; - __pyx_t_59 = 0; - __pyx_t_60 = 0; - __pyx_t_61 = 0; - __pyx_t_62 = 0; - __pyx_t_63 = 0; - __pyx_t_64 = 0; - __pyx_t_65 = 0; - __pyx_t_66 = 0; - __pyx_t_67 = 0; - __pyx_t_68 = 0; - __pyx_t_69 = 0; - __pyx_t_70 = 0; - __pyx_t_71 = 0; - __pyx_t_72 = 0; - __pyx_t_73 = 0; - __pyx_t_74 = 0; - __pyx_t_75 = 0; - __pyx_t_76 = 0; - __pyx_t_77 = 0; - __pyx_t_78 = 0; - __Pyx_XGOTREF(__pyx_v_7aiohttp_12_http_parser_headers); - __Pyx_DECREF_SET(__pyx_v_7aiohttp_12_http_parser_headers, ((PyObject*)__pyx_t_1)); - __Pyx_GIVEREF(__pyx_t_1); - __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":57 - * char* PyByteArray_AsString(object) - * - * __all__ = ('HttpRequestParser', 'HttpResponseParser', # <<<<<<<<<<<<<< - * 'RawRequestMessage', 'RawResponseMessage') - * - */ - if (PyDict_SetItem(__pyx_d, __pyx_n_s_all, __pyx_tuple__12) < 0) __PYX_ERR(0, 57, __pyx_L1_error) - - /* "aiohttp/_http_parser.pyx":60 - * 'RawRequestMessage', 'RawResponseMessage') - * - * cdef object URL = _URL # <<<<<<<<<<<<<< - * cdef object URL_build = URL.build - * cdef object CIMultiDict = _CIMultiDict - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_URL_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 60, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_XGOTREF(__pyx_v_7aiohttp_12_http_parser_URL); - __Pyx_DECREF_SET(__pyx_v_7aiohttp_12_http_parser_URL, __pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":61 - * - * cdef object URL = _URL - * cdef object URL_build = URL.build # <<<<<<<<<<<<<< - * cdef object CIMultiDict = _CIMultiDict - * cdef object CIMultiDictProxy = _CIMultiDictProxy - */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_7aiohttp_12_http_parser_URL, __pyx_n_s_build); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 61, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_XGOTREF(__pyx_v_7aiohttp_12_http_parser_URL_build); - __Pyx_DECREF_SET(__pyx_v_7aiohttp_12_http_parser_URL_build, __pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":62 - * cdef object URL = _URL - * cdef object URL_build = URL.build - * cdef object CIMultiDict = _CIMultiDict # <<<<<<<<<<<<<< - * cdef object CIMultiDictProxy = _CIMultiDictProxy - * cdef object HttpVersion = _HttpVersion - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_CIMultiDict_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 62, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_XGOTREF(__pyx_v_7aiohttp_12_http_parser_CIMultiDict); - __Pyx_DECREF_SET(__pyx_v_7aiohttp_12_http_parser_CIMultiDict, __pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":63 - * cdef object URL_build = URL.build - * cdef object CIMultiDict = _CIMultiDict - * cdef object CIMultiDictProxy = _CIMultiDictProxy # <<<<<<<<<<<<<< - * cdef object HttpVersion = _HttpVersion - * cdef object HttpVersion10 = _HttpVersion10 - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_CIMultiDictProxy_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 63, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_XGOTREF(__pyx_v_7aiohttp_12_http_parser_CIMultiDictProxy); - __Pyx_DECREF_SET(__pyx_v_7aiohttp_12_http_parser_CIMultiDictProxy, __pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":64 - * cdef object CIMultiDict = _CIMultiDict - * cdef object CIMultiDictProxy = _CIMultiDictProxy - * cdef object HttpVersion = _HttpVersion # <<<<<<<<<<<<<< - * cdef object HttpVersion10 = _HttpVersion10 - * cdef object HttpVersion11 = _HttpVersion11 - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_HttpVersion_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 64, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_XGOTREF(__pyx_v_7aiohttp_12_http_parser_HttpVersion); - __Pyx_DECREF_SET(__pyx_v_7aiohttp_12_http_parser_HttpVersion, __pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":65 - * cdef object CIMultiDictProxy = _CIMultiDictProxy - * cdef object HttpVersion = _HttpVersion - * cdef object HttpVersion10 = _HttpVersion10 # <<<<<<<<<<<<<< - * cdef object HttpVersion11 = _HttpVersion11 - * cdef object SEC_WEBSOCKET_KEY1 = hdrs.SEC_WEBSOCKET_KEY1 - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_HttpVersion10_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 65, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_XGOTREF(__pyx_v_7aiohttp_12_http_parser_HttpVersion10); - __Pyx_DECREF_SET(__pyx_v_7aiohttp_12_http_parser_HttpVersion10, __pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":66 - * cdef object HttpVersion = _HttpVersion - * cdef object HttpVersion10 = _HttpVersion10 - * cdef object HttpVersion11 = _HttpVersion11 # <<<<<<<<<<<<<< - * cdef object SEC_WEBSOCKET_KEY1 = hdrs.SEC_WEBSOCKET_KEY1 - * cdef object CONTENT_ENCODING = hdrs.CONTENT_ENCODING - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_HttpVersion11_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 66, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_XGOTREF(__pyx_v_7aiohttp_12_http_parser_HttpVersion11); - __Pyx_DECREF_SET(__pyx_v_7aiohttp_12_http_parser_HttpVersion11, __pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":67 - * cdef object HttpVersion10 = _HttpVersion10 - * cdef object HttpVersion11 = _HttpVersion11 - * cdef object SEC_WEBSOCKET_KEY1 = hdrs.SEC_WEBSOCKET_KEY1 # <<<<<<<<<<<<<< - * cdef object CONTENT_ENCODING = hdrs.CONTENT_ENCODING - * cdef object EMPTY_PAYLOAD = _EMPTY_PAYLOAD - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 67, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_78 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_SEC_WEBSOCKET_KEY1); if (unlikely(!__pyx_t_78)) __PYX_ERR(0, 67, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_78); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_XGOTREF(__pyx_v_7aiohttp_12_http_parser_SEC_WEBSOCKET_KEY1); - __Pyx_DECREF_SET(__pyx_v_7aiohttp_12_http_parser_SEC_WEBSOCKET_KEY1, __pyx_t_78); - __Pyx_GIVEREF(__pyx_t_78); - __pyx_t_78 = 0; - - /* "aiohttp/_http_parser.pyx":68 - * cdef object HttpVersion11 = _HttpVersion11 - * cdef object SEC_WEBSOCKET_KEY1 = hdrs.SEC_WEBSOCKET_KEY1 - * cdef object CONTENT_ENCODING = hdrs.CONTENT_ENCODING # <<<<<<<<<<<<<< - * cdef object EMPTY_PAYLOAD = _EMPTY_PAYLOAD - * cdef object StreamReader = _StreamReader - */ - __Pyx_GetModuleGlobalName(__pyx_t_78, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_78)) __PYX_ERR(0, 68, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_78); - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_78, __pyx_n_s_CONTENT_ENCODING); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 68, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_78); __pyx_t_78 = 0; - __Pyx_XGOTREF(__pyx_v_7aiohttp_12_http_parser_CONTENT_ENCODING); - __Pyx_DECREF_SET(__pyx_v_7aiohttp_12_http_parser_CONTENT_ENCODING, __pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":69 - * cdef object SEC_WEBSOCKET_KEY1 = hdrs.SEC_WEBSOCKET_KEY1 - * cdef object CONTENT_ENCODING = hdrs.CONTENT_ENCODING - * cdef object EMPTY_PAYLOAD = _EMPTY_PAYLOAD # <<<<<<<<<<<<<< - * cdef object StreamReader = _StreamReader - * cdef object DeflateBuffer = _DeflateBuffer - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_EMPTY_PAYLOAD_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 69, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_XGOTREF(__pyx_v_7aiohttp_12_http_parser_EMPTY_PAYLOAD); - __Pyx_DECREF_SET(__pyx_v_7aiohttp_12_http_parser_EMPTY_PAYLOAD, __pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":70 - * cdef object CONTENT_ENCODING = hdrs.CONTENT_ENCODING - * cdef object EMPTY_PAYLOAD = _EMPTY_PAYLOAD - * cdef object StreamReader = _StreamReader # <<<<<<<<<<<<<< - * cdef object DeflateBuffer = _DeflateBuffer - * - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_StreamReader_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 70, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_XGOTREF(__pyx_v_7aiohttp_12_http_parser_StreamReader); - __Pyx_DECREF_SET(__pyx_v_7aiohttp_12_http_parser_StreamReader, __pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":71 - * cdef object EMPTY_PAYLOAD = _EMPTY_PAYLOAD - * cdef object StreamReader = _StreamReader - * cdef object DeflateBuffer = _DeflateBuffer # <<<<<<<<<<<<<< - * - * - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_DeflateBuffer_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 71, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_XGOTREF(__pyx_v_7aiohttp_12_http_parser_DeflateBuffer); - __Pyx_DECREF_SET(__pyx_v_7aiohttp_12_http_parser_DeflateBuffer, __pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":85 - * DEF METHODS_COUNT = 34; - * - * cdef list _http_method = [] # <<<<<<<<<<<<<< - * - * for i in range(METHODS_COUNT): - */ - __pyx_t_1 = PyList_New(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 85, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_XGOTREF(__pyx_v_7aiohttp_12_http_parser__http_method); - __Pyx_DECREF_SET(__pyx_v_7aiohttp_12_http_parser__http_method, ((PyObject*)__pyx_t_1)); - __Pyx_GIVEREF(__pyx_t_1); - __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":87 - * cdef list _http_method = [] - * - * for i in range(METHODS_COUNT): # <<<<<<<<<<<<<< - * _http_method.append( - * cparser.http_method_str( i).decode('ascii')) - */ - for (__pyx_t_79 = 0; __pyx_t_79 < 34; __pyx_t_79+=1) { - __pyx_t_1 = __Pyx_PyInt_From_long(__pyx_t_79); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 87, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_i, __pyx_t_1) < 0) __PYX_ERR(0, 87, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":88 - * - * for i in range(METHODS_COUNT): - * _http_method.append( # <<<<<<<<<<<<<< - * cparser.http_method_str( i).decode('ascii')) - * - */ - if (unlikely(__pyx_v_7aiohttp_12_http_parser__http_method == Py_None)) { - PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "append"); - __PYX_ERR(0, 88, __pyx_L1_error) - } - - /* "aiohttp/_http_parser.pyx":89 - * for i in range(METHODS_COUNT): - * _http_method.append( - * cparser.http_method_str( i).decode('ascii')) # <<<<<<<<<<<<<< - * - * - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_i); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 89, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_80 = ((enum http_method)__Pyx_PyInt_As_enum__http_method(__pyx_t_1)); if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 89, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_81 = http_method_str(((enum http_method)__pyx_t_80)); - __pyx_t_1 = __Pyx_decode_c_string(__pyx_t_81, 0, strlen(__pyx_t_81), NULL, NULL, PyUnicode_DecodeASCII); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 89, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - - /* "aiohttp/_http_parser.pyx":88 - * - * for i in range(METHODS_COUNT): - * _http_method.append( # <<<<<<<<<<<<<< - * cparser.http_method_str( i).decode('ascii')) - * - */ - __pyx_t_82 = __Pyx_PyList_Append(__pyx_v_7aiohttp_12_http_parser__http_method, __pyx_t_1); if (unlikely(__pyx_t_82 == ((int)-1))) __PYX_ERR(0, 88, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - } - - /* "aiohttp/_http_parser.pyx":785 - * - * - * def parse_url(url): # <<<<<<<<<<<<<< - * cdef: - * Py_buffer py_buf - */ - __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_7aiohttp_12_http_parser_1parse_url, NULL, __pyx_n_s_aiohttp__http_parser); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 785, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_parse_url, __pyx_t_1) < 0) __PYX_ERR(0, 785, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "(tree fragment)":1 - * def __pyx_unpickle_RawRequestMessage(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< - * cdef object __pyx_PickleError - * cdef object __pyx_result - */ - __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_7aiohttp_12_http_parser_3__pyx_unpickle_RawRequestMessage, NULL, __pyx_n_s_aiohttp__http_parser); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_pyx_unpickle_RawRequestMessage, __pyx_t_1) < 0) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "(tree fragment)":11 - * __pyx_unpickle_RawRequestMessage__set_state( __pyx_result, __pyx_state) - * return __pyx_result - * cdef __pyx_unpickle_RawRequestMessage__set_state(RawRequestMessage __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< - * __pyx_result.chunked = __pyx_state[0]; __pyx_result.compression = __pyx_state[1]; __pyx_result.headers = __pyx_state[2]; __pyx_result.method = __pyx_state[3]; __pyx_result.path = __pyx_state[4]; __pyx_result.raw_headers = __pyx_state[5]; __pyx_result.should_close = __pyx_state[6]; __pyx_result.upgrade = __pyx_state[7]; __pyx_result.url = __pyx_state[8]; __pyx_result.version = __pyx_state[9] - * if len(__pyx_state) > 10 and hasattr(__pyx_result, '__dict__'): - */ - __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_7aiohttp_12_http_parser_5__pyx_unpickle_RawResponseMessage, NULL, __pyx_n_s_aiohttp__http_parser); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_pyx_unpickle_RawResponseMessag, __pyx_t_1) < 0) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_http_parser.pyx":1 - * #cython: language_level=3 # <<<<<<<<<<<<<< - * # - * # Based on https://github.com/MagicStack/httptools - */ - __pyx_t_1 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_1) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /*--- Wrapped vars code ---*/ - - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_XDECREF(__pyx_t_5); - __Pyx_XDECREF(__pyx_t_6); - __Pyx_XDECREF(__pyx_t_7); - __Pyx_XDECREF(__pyx_t_8); - __Pyx_XDECREF(__pyx_t_9); - __Pyx_XDECREF(__pyx_t_10); - __Pyx_XDECREF(__pyx_t_11); - __Pyx_XDECREF(__pyx_t_12); - __Pyx_XDECREF(__pyx_t_13); - __Pyx_XDECREF(__pyx_t_14); - __Pyx_XDECREF(__pyx_t_15); - __Pyx_XDECREF(__pyx_t_16); - __Pyx_XDECREF(__pyx_t_17); - __Pyx_XDECREF(__pyx_t_18); - __Pyx_XDECREF(__pyx_t_19); - __Pyx_XDECREF(__pyx_t_20); - __Pyx_XDECREF(__pyx_t_21); - __Pyx_XDECREF(__pyx_t_22); - __Pyx_XDECREF(__pyx_t_23); - __Pyx_XDECREF(__pyx_t_24); - __Pyx_XDECREF(__pyx_t_25); - __Pyx_XDECREF(__pyx_t_26); - __Pyx_XDECREF(__pyx_t_27); - __Pyx_XDECREF(__pyx_t_28); - __Pyx_XDECREF(__pyx_t_29); - __Pyx_XDECREF(__pyx_t_30); - __Pyx_XDECREF(__pyx_t_31); - __Pyx_XDECREF(__pyx_t_32); - __Pyx_XDECREF(__pyx_t_33); - __Pyx_XDECREF(__pyx_t_34); - __Pyx_XDECREF(__pyx_t_35); - __Pyx_XDECREF(__pyx_t_36); - __Pyx_XDECREF(__pyx_t_37); - __Pyx_XDECREF(__pyx_t_38); - __Pyx_XDECREF(__pyx_t_39); - __Pyx_XDECREF(__pyx_t_40); - __Pyx_XDECREF(__pyx_t_41); - __Pyx_XDECREF(__pyx_t_42); - __Pyx_XDECREF(__pyx_t_43); - __Pyx_XDECREF(__pyx_t_44); - __Pyx_XDECREF(__pyx_t_45); - __Pyx_XDECREF(__pyx_t_46); - __Pyx_XDECREF(__pyx_t_47); - __Pyx_XDECREF(__pyx_t_48); - __Pyx_XDECREF(__pyx_t_49); - __Pyx_XDECREF(__pyx_t_50); - __Pyx_XDECREF(__pyx_t_51); - __Pyx_XDECREF(__pyx_t_52); - __Pyx_XDECREF(__pyx_t_53); - __Pyx_XDECREF(__pyx_t_54); - __Pyx_XDECREF(__pyx_t_55); - __Pyx_XDECREF(__pyx_t_56); - __Pyx_XDECREF(__pyx_t_57); - __Pyx_XDECREF(__pyx_t_58); - __Pyx_XDECREF(__pyx_t_59); - __Pyx_XDECREF(__pyx_t_60); - __Pyx_XDECREF(__pyx_t_61); - __Pyx_XDECREF(__pyx_t_62); - __Pyx_XDECREF(__pyx_t_63); - __Pyx_XDECREF(__pyx_t_64); - __Pyx_XDECREF(__pyx_t_65); - __Pyx_XDECREF(__pyx_t_66); - __Pyx_XDECREF(__pyx_t_67); - __Pyx_XDECREF(__pyx_t_68); - __Pyx_XDECREF(__pyx_t_69); - __Pyx_XDECREF(__pyx_t_70); - __Pyx_XDECREF(__pyx_t_71); - __Pyx_XDECREF(__pyx_t_72); - __Pyx_XDECREF(__pyx_t_73); - __Pyx_XDECREF(__pyx_t_74); - __Pyx_XDECREF(__pyx_t_75); - __Pyx_XDECREF(__pyx_t_76); - __Pyx_XDECREF(__pyx_t_77); - __Pyx_XDECREF(__pyx_t_78); - if (__pyx_m) { - if (__pyx_d) { - __Pyx_AddTraceback("init aiohttp._http_parser", __pyx_clineno, __pyx_lineno, __pyx_filename); - } - Py_CLEAR(__pyx_m); - } else if (!PyErr_Occurred()) { - PyErr_SetString(PyExc_ImportError, "init aiohttp._http_parser"); - } - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - #if CYTHON_PEP489_MULTI_PHASE_INIT - return (__pyx_m != NULL) ? 0 : -1; - #elif PY_MAJOR_VERSION >= 3 - return __pyx_m; - #else - return; - #endif -} - -/* --- Runtime support code --- */ -/* Refnanny */ -#if CYTHON_REFNANNY -static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) { - PyObject *m = NULL, *p = NULL; - void *r = NULL; - m = PyImport_ImportModule(modname); - if (!m) goto end; - p = PyObject_GetAttrString(m, "RefNannyAPI"); - if (!p) goto end; - r = PyLong_AsVoidPtr(p); -end: - Py_XDECREF(p); - Py_XDECREF(m); - return (__Pyx_RefNannyAPIStruct *)r; -} -#endif - -/* PyObjectGetAttrStr */ -#if CYTHON_USE_TYPE_SLOTS -static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name) { - PyTypeObject* tp = Py_TYPE(obj); - if (likely(tp->tp_getattro)) - return tp->tp_getattro(obj, attr_name); -#if PY_MAJOR_VERSION < 3 - if (likely(tp->tp_getattr)) - return tp->tp_getattr(obj, PyString_AS_STRING(attr_name)); -#endif - return PyObject_GetAttr(obj, attr_name); -} -#endif - -/* GetBuiltinName */ -static PyObject *__Pyx_GetBuiltinName(PyObject *name) { - PyObject* result = __Pyx_PyObject_GetAttrStr(__pyx_b, name); - if (unlikely(!result)) { - PyErr_Format(PyExc_NameError, -#if PY_MAJOR_VERSION >= 3 - "name '%U' is not defined", name); -#else - "name '%.200s' is not defined", PyString_AS_STRING(name)); -#endif - } - return result; -} - -/* GetItemInt */ -static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j) { - PyObject *r; - if (!j) return NULL; - r = PyObject_GetItem(o, j); - Py_DECREF(j); - return r; -} -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i, - CYTHON_NCP_UNUSED int wraparound, - CYTHON_NCP_UNUSED int boundscheck) { -#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - Py_ssize_t wrapped_i = i; - if (wraparound & unlikely(i < 0)) { - wrapped_i += PyList_GET_SIZE(o); - } - if ((!boundscheck) || likely(__Pyx_is_valid_index(wrapped_i, PyList_GET_SIZE(o)))) { - PyObject *r = PyList_GET_ITEM(o, wrapped_i); - Py_INCREF(r); - return r; - } - return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); -#else - return PySequence_GetItem(o, i); -#endif -} -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i, - CYTHON_NCP_UNUSED int wraparound, - CYTHON_NCP_UNUSED int boundscheck) { -#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - Py_ssize_t wrapped_i = i; - if (wraparound & unlikely(i < 0)) { - wrapped_i += PyTuple_GET_SIZE(o); - } - if ((!boundscheck) || likely(__Pyx_is_valid_index(wrapped_i, PyTuple_GET_SIZE(o)))) { - PyObject *r = PyTuple_GET_ITEM(o, wrapped_i); - Py_INCREF(r); - return r; - } - return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); -#else - return PySequence_GetItem(o, i); -#endif -} -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, int is_list, - CYTHON_NCP_UNUSED int wraparound, - CYTHON_NCP_UNUSED int boundscheck) { -#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS && CYTHON_USE_TYPE_SLOTS - if (is_list || PyList_CheckExact(o)) { - Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyList_GET_SIZE(o); - if ((!boundscheck) || (likely(__Pyx_is_valid_index(n, PyList_GET_SIZE(o))))) { - PyObject *r = PyList_GET_ITEM(o, n); - Py_INCREF(r); - return r; - } - } - else if (PyTuple_CheckExact(o)) { - Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyTuple_GET_SIZE(o); - if ((!boundscheck) || likely(__Pyx_is_valid_index(n, PyTuple_GET_SIZE(o)))) { - PyObject *r = PyTuple_GET_ITEM(o, n); - Py_INCREF(r); - return r; - } - } else { - PySequenceMethods *m = Py_TYPE(o)->tp_as_sequence; - if (likely(m && m->sq_item)) { - if (wraparound && unlikely(i < 0) && likely(m->sq_length)) { - Py_ssize_t l = m->sq_length(o); - if (likely(l >= 0)) { - i += l; - } else { - if (!PyErr_ExceptionMatches(PyExc_OverflowError)) - return NULL; - PyErr_Clear(); - } - } - return m->sq_item(o, i); - } - } -#else - if (is_list || PySequence_Check(o)) { - return PySequence_GetItem(o, i); - } -#endif - return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); -} - -/* decode_c_bytes */ -static CYTHON_INLINE PyObject* __Pyx_decode_c_bytes( - const char* cstring, Py_ssize_t length, Py_ssize_t start, Py_ssize_t stop, - const char* encoding, const char* errors, - PyObject* (*decode_func)(const char *s, Py_ssize_t size, const char *errors)) { - if (unlikely((start < 0) | (stop < 0))) { - if (start < 0) { - start += length; - if (start < 0) - start = 0; - } - if (stop < 0) - stop += length; - } - if (stop > length) - stop = length; - if (unlikely(stop <= start)) - return __Pyx_NewRef(__pyx_empty_unicode); - length = stop - start; - cstring += start; - if (decode_func) { - return decode_func(cstring, length, errors); - } else { - return PyUnicode_Decode(cstring, length, encoding, errors); - } -} - -/* RaiseArgTupleInvalid */ -static void __Pyx_RaiseArgtupleInvalid( - const char* func_name, - int exact, - Py_ssize_t num_min, - Py_ssize_t num_max, - Py_ssize_t num_found) -{ - Py_ssize_t num_expected; - const char *more_or_less; - if (num_found < num_min) { - num_expected = num_min; - more_or_less = "at least"; - } else { - num_expected = num_max; - more_or_less = "at most"; - } - if (exact) { - more_or_less = "exactly"; - } - PyErr_Format(PyExc_TypeError, - "%.200s() takes %.8s %" CYTHON_FORMAT_SSIZE_T "d positional argument%.1s (%" CYTHON_FORMAT_SSIZE_T "d given)", - func_name, more_or_less, num_expected, - (num_expected == 1) ? "" : "s", num_found); -} - -/* RaiseDoubleKeywords */ -static void __Pyx_RaiseDoubleKeywordsError( - const char* func_name, - PyObject* kw_name) -{ - PyErr_Format(PyExc_TypeError, - #if PY_MAJOR_VERSION >= 3 - "%s() got multiple values for keyword argument '%U'", func_name, kw_name); - #else - "%s() got multiple values for keyword argument '%s'", func_name, - PyString_AsString(kw_name)); - #endif -} - -/* ParseKeywords */ -static int __Pyx_ParseOptionalKeywords( - PyObject *kwds, - PyObject **argnames[], - PyObject *kwds2, - PyObject *values[], - Py_ssize_t num_pos_args, - const char* function_name) -{ - PyObject *key = 0, *value = 0; - Py_ssize_t pos = 0; - PyObject*** name; - PyObject*** first_kw_arg = argnames + num_pos_args; - while (PyDict_Next(kwds, &pos, &key, &value)) { - name = first_kw_arg; - while (*name && (**name != key)) name++; - if (*name) { - values[name-argnames] = value; - continue; - } - name = first_kw_arg; - #if PY_MAJOR_VERSION < 3 - if (likely(PyString_Check(key))) { - while (*name) { - if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key)) - && _PyString_Eq(**name, key)) { - values[name-argnames] = value; - break; - } - name++; - } - if (*name) continue; - else { - PyObject*** argname = argnames; - while (argname != first_kw_arg) { - if ((**argname == key) || ( - (CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**argname) == PyString_GET_SIZE(key)) - && _PyString_Eq(**argname, key))) { - goto arg_passed_twice; - } - argname++; - } - } - } else - #endif - if (likely(PyUnicode_Check(key))) { - while (*name) { - int cmp = (**name == key) ? 0 : - #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 - (__Pyx_PyUnicode_GET_LENGTH(**name) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 : - #endif - PyUnicode_Compare(**name, key); - if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; - if (cmp == 0) { - values[name-argnames] = value; - break; - } - name++; - } - if (*name) continue; - else { - PyObject*** argname = argnames; - while (argname != first_kw_arg) { - int cmp = (**argname == key) ? 0 : - #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 - (__Pyx_PyUnicode_GET_LENGTH(**argname) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 : - #endif - PyUnicode_Compare(**argname, key); - if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; - if (cmp == 0) goto arg_passed_twice; - argname++; - } - } - } else - goto invalid_keyword_type; - if (kwds2) { - if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad; - } else { - goto invalid_keyword; - } - } - return 0; -arg_passed_twice: - __Pyx_RaiseDoubleKeywordsError(function_name, key); - goto bad; -invalid_keyword_type: - PyErr_Format(PyExc_TypeError, - "%.200s() keywords must be strings", function_name); - goto bad; -invalid_keyword: - PyErr_Format(PyExc_TypeError, - #if PY_MAJOR_VERSION < 3 - "%.200s() got an unexpected keyword argument '%.200s'", - function_name, PyString_AsString(key)); - #else - "%s() got an unexpected keyword argument '%U'", - function_name, key); - #endif -bad: - return -1; -} - -/* None */ -static CYTHON_INLINE void __Pyx_RaiseClosureNameError(const char *varname) { - PyErr_Format(PyExc_NameError, "free variable '%s' referenced before assignment in enclosing scope", varname); -} - -/* RaiseTooManyValuesToUnpack */ -static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected) { - PyErr_Format(PyExc_ValueError, - "too many values to unpack (expected %" CYTHON_FORMAT_SSIZE_T "d)", expected); -} - -/* RaiseNeedMoreValuesToUnpack */ -static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index) { - PyErr_Format(PyExc_ValueError, - "need more than %" CYTHON_FORMAT_SSIZE_T "d value%.1s to unpack", - index, (index == 1) ? "" : "s"); -} - -/* IterFinish */ -static CYTHON_INLINE int __Pyx_IterFinish(void) { -#if CYTHON_FAST_THREAD_STATE - PyThreadState *tstate = __Pyx_PyThreadState_Current; - PyObject* exc_type = tstate->curexc_type; - if (unlikely(exc_type)) { - if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) { - PyObject *exc_value, *exc_tb; - exc_value = tstate->curexc_value; - exc_tb = tstate->curexc_traceback; - tstate->curexc_type = 0; - tstate->curexc_value = 0; - tstate->curexc_traceback = 0; - Py_DECREF(exc_type); - Py_XDECREF(exc_value); - Py_XDECREF(exc_tb); - return 0; - } else { - return -1; - } - } - return 0; -#else - if (unlikely(PyErr_Occurred())) { - if (likely(PyErr_ExceptionMatches(PyExc_StopIteration))) { - PyErr_Clear(); - return 0; - } else { - return -1; - } - } - return 0; -#endif -} - -/* UnpackItemEndCheck */ -static int __Pyx_IternextUnpackEndCheck(PyObject *retval, Py_ssize_t expected) { - if (unlikely(retval)) { - Py_DECREF(retval); - __Pyx_RaiseTooManyValuesError(expected); - return -1; - } else { - return __Pyx_IterFinish(); - } - return 0; -} - -/* KeywordStringCheck */ -static int __Pyx_CheckKeywordStrings( - PyObject *kwdict, - const char* function_name, - int kw_allowed) -{ - PyObject* key = 0; - Py_ssize_t pos = 0; -#if CYTHON_COMPILING_IN_PYPY - if (!kw_allowed && PyDict_Next(kwdict, &pos, &key, 0)) - goto invalid_keyword; - return 1; -#else - while (PyDict_Next(kwdict, &pos, &key, 0)) { - #if PY_MAJOR_VERSION < 3 - if (unlikely(!PyString_Check(key))) - #endif - if (unlikely(!PyUnicode_Check(key))) - goto invalid_keyword_type; - } - if ((!kw_allowed) && unlikely(key)) - goto invalid_keyword; - return 1; -invalid_keyword_type: - PyErr_Format(PyExc_TypeError, - "%.200s() keywords must be strings", function_name); - return 0; -#endif -invalid_keyword: - PyErr_Format(PyExc_TypeError, - #if PY_MAJOR_VERSION < 3 - "%.200s() got an unexpected keyword argument '%.200s'", - function_name, PyString_AsString(key)); - #else - "%s() got an unexpected keyword argument '%U'", - function_name, key); - #endif - return 0; -} - -/* ExtTypeTest */ -static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type) { - if (unlikely(!type)) { - PyErr_SetString(PyExc_SystemError, "Missing type object"); - return 0; - } - if (likely(__Pyx_TypeCheck(obj, type))) - return 1; - PyErr_Format(PyExc_TypeError, "Cannot convert %.200s to %.200s", - Py_TYPE(obj)->tp_name, type->tp_name); - return 0; -} - -/* DictGetItem */ -#if PY_MAJOR_VERSION >= 3 && !CYTHON_COMPILING_IN_PYPY -static PyObject *__Pyx_PyDict_GetItem(PyObject *d, PyObject* key) { - PyObject *value; - value = PyDict_GetItemWithError(d, key); - if (unlikely(!value)) { - if (!PyErr_Occurred()) { - if (unlikely(PyTuple_Check(key))) { - PyObject* args = PyTuple_Pack(1, key); - if (likely(args)) { - PyErr_SetObject(PyExc_KeyError, args); - Py_DECREF(args); - } - } else { - PyErr_SetObject(PyExc_KeyError, key); - } - } - return NULL; - } - Py_INCREF(value); - return value; -} -#endif - -/* PyErrExceptionMatches */ -#if CYTHON_FAST_THREAD_STATE -static int __Pyx_PyErr_ExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { - Py_ssize_t i, n; - n = PyTuple_GET_SIZE(tuple); -#if PY_MAJOR_VERSION >= 3 - for (i=0; icurexc_type; - if (exc_type == err) return 1; - if (unlikely(!exc_type)) return 0; - if (unlikely(PyTuple_Check(err))) - return __Pyx_PyErr_ExceptionMatchesTuple(exc_type, err); - return __Pyx_PyErr_GivenExceptionMatches(exc_type, err); -} -#endif - -/* PyErrFetchRestore */ -#if CYTHON_FAST_THREAD_STATE -static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { - PyObject *tmp_type, *tmp_value, *tmp_tb; - tmp_type = tstate->curexc_type; - tmp_value = tstate->curexc_value; - tmp_tb = tstate->curexc_traceback; - tstate->curexc_type = type; - tstate->curexc_value = value; - tstate->curexc_traceback = tb; - Py_XDECREF(tmp_type); - Py_XDECREF(tmp_value); - Py_XDECREF(tmp_tb); -} -static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { - *type = tstate->curexc_type; - *value = tstate->curexc_value; - *tb = tstate->curexc_traceback; - tstate->curexc_type = 0; - tstate->curexc_value = 0; - tstate->curexc_traceback = 0; -} -#endif - -/* GetAttr */ -static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *o, PyObject *n) { -#if CYTHON_USE_TYPE_SLOTS -#if PY_MAJOR_VERSION >= 3 - if (likely(PyUnicode_Check(n))) -#else - if (likely(PyString_Check(n))) -#endif - return __Pyx_PyObject_GetAttrStr(o, n); -#endif - return PyObject_GetAttr(o, n); -} - -/* GetAttr3 */ -static PyObject *__Pyx_GetAttr3Default(PyObject *d) { - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - if (unlikely(!__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError))) - return NULL; - __Pyx_PyErr_Clear(); - Py_INCREF(d); - return d; -} -static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *o, PyObject *n, PyObject *d) { - PyObject *r = __Pyx_GetAttr(o, n); - return (likely(r)) ? r : __Pyx_GetAttr3Default(d); -} - -/* PyDictVersioning */ -#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS -static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj) { - PyObject *dict = Py_TYPE(obj)->tp_dict; - return likely(dict) ? __PYX_GET_DICT_VERSION(dict) : 0; -} -static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj) { - PyObject **dictptr = NULL; - Py_ssize_t offset = Py_TYPE(obj)->tp_dictoffset; - if (offset) { -#if CYTHON_COMPILING_IN_CPYTHON - dictptr = (likely(offset > 0)) ? (PyObject **) ((char *)obj + offset) : _PyObject_GetDictPtr(obj); -#else - dictptr = _PyObject_GetDictPtr(obj); -#endif - } - return (dictptr && *dictptr) ? __PYX_GET_DICT_VERSION(*dictptr) : 0; -} -static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version) { - PyObject *dict = Py_TYPE(obj)->tp_dict; - if (unlikely(!dict) || unlikely(tp_dict_version != __PYX_GET_DICT_VERSION(dict))) - return 0; - return obj_dict_version == __Pyx_get_object_dict_version(obj); -} -#endif - -/* GetModuleGlobalName */ -#if CYTHON_USE_DICT_VERSIONS -static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value) -#else -static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name) -#endif -{ - PyObject *result; -#if !CYTHON_AVOID_BORROWED_REFS -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 - result = _PyDict_GetItem_KnownHash(__pyx_d, name, ((PyASCIIObject *) name)->hash); - __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) - if (likely(result)) { - return __Pyx_NewRef(result); - } else if (unlikely(PyErr_Occurred())) { - return NULL; - } -#else - result = PyDict_GetItem(__pyx_d, name); - __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) - if (likely(result)) { - return __Pyx_NewRef(result); - } -#endif -#else - result = PyObject_GetItem(__pyx_d, name); - __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) - if (likely(result)) { - return __Pyx_NewRef(result); - } - PyErr_Clear(); -#endif - return __Pyx_GetBuiltinName(name); -} - -/* PyFunctionFastCall */ -#if CYTHON_FAST_PYCALL -static PyObject* __Pyx_PyFunction_FastCallNoKw(PyCodeObject *co, PyObject **args, Py_ssize_t na, - PyObject *globals) { - PyFrameObject *f; - PyThreadState *tstate = __Pyx_PyThreadState_Current; - PyObject **fastlocals; - Py_ssize_t i; - PyObject *result; - assert(globals != NULL); - /* XXX Perhaps we should create a specialized - PyFrame_New() that doesn't take locals, but does - take builtins without sanity checking them. - */ - assert(tstate != NULL); - f = PyFrame_New(tstate, co, globals, NULL); - if (f == NULL) { - return NULL; - } - fastlocals = __Pyx_PyFrame_GetLocalsplus(f); - for (i = 0; i < na; i++) { - Py_INCREF(*args); - fastlocals[i] = *args++; - } - result = PyEval_EvalFrameEx(f,0); - ++tstate->recursion_depth; - Py_DECREF(f); - --tstate->recursion_depth; - return result; -} -#if 1 || PY_VERSION_HEX < 0x030600B1 -static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs) { - PyCodeObject *co = (PyCodeObject *)PyFunction_GET_CODE(func); - PyObject *globals = PyFunction_GET_GLOBALS(func); - PyObject *argdefs = PyFunction_GET_DEFAULTS(func); - PyObject *closure; -#if PY_MAJOR_VERSION >= 3 - PyObject *kwdefs; -#endif - PyObject *kwtuple, **k; - PyObject **d; - Py_ssize_t nd; - Py_ssize_t nk; - PyObject *result; - assert(kwargs == NULL || PyDict_Check(kwargs)); - nk = kwargs ? PyDict_Size(kwargs) : 0; - if (Py_EnterRecursiveCall((char*)" while calling a Python object")) { - return NULL; - } - if ( -#if PY_MAJOR_VERSION >= 3 - co->co_kwonlyargcount == 0 && -#endif - likely(kwargs == NULL || nk == 0) && - co->co_flags == (CO_OPTIMIZED | CO_NEWLOCALS | CO_NOFREE)) { - if (argdefs == NULL && co->co_argcount == nargs) { - result = __Pyx_PyFunction_FastCallNoKw(co, args, nargs, globals); - goto done; - } - else if (nargs == 0 && argdefs != NULL - && co->co_argcount == Py_SIZE(argdefs)) { - /* function called with no arguments, but all parameters have - a default value: use default values as arguments .*/ - args = &PyTuple_GET_ITEM(argdefs, 0); - result =__Pyx_PyFunction_FastCallNoKw(co, args, Py_SIZE(argdefs), globals); - goto done; - } - } - if (kwargs != NULL) { - Py_ssize_t pos, i; - kwtuple = PyTuple_New(2 * nk); - if (kwtuple == NULL) { - result = NULL; - goto done; - } - k = &PyTuple_GET_ITEM(kwtuple, 0); - pos = i = 0; - while (PyDict_Next(kwargs, &pos, &k[i], &k[i+1])) { - Py_INCREF(k[i]); - Py_INCREF(k[i+1]); - i += 2; - } - nk = i / 2; - } - else { - kwtuple = NULL; - k = NULL; - } - closure = PyFunction_GET_CLOSURE(func); -#if PY_MAJOR_VERSION >= 3 - kwdefs = PyFunction_GET_KW_DEFAULTS(func); -#endif - if (argdefs != NULL) { - d = &PyTuple_GET_ITEM(argdefs, 0); - nd = Py_SIZE(argdefs); - } - else { - d = NULL; - nd = 0; - } -#if PY_MAJOR_VERSION >= 3 - result = PyEval_EvalCodeEx((PyObject*)co, globals, (PyObject *)NULL, - args, (int)nargs, - k, (int)nk, - d, (int)nd, kwdefs, closure); -#else - result = PyEval_EvalCodeEx(co, globals, (PyObject *)NULL, - args, (int)nargs, - k, (int)nk, - d, (int)nd, closure); -#endif - Py_XDECREF(kwtuple); -done: - Py_LeaveRecursiveCall(); - return result; -} -#endif -#endif - -/* PyObjectCall */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) { - PyObject *result; - ternaryfunc call = func->ob_type->tp_call; - if (unlikely(!call)) - return PyObject_Call(func, arg, kw); - if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) - return NULL; - result = (*call)(func, arg, kw); - Py_LeaveRecursiveCall(); - if (unlikely(!result) && unlikely(!PyErr_Occurred())) { - PyErr_SetString( - PyExc_SystemError, - "NULL result without error in PyObject_Call"); - } - return result; -} -#endif - -/* PyObjectCallMethO */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg) { - PyObject *self, *result; - PyCFunction cfunc; - cfunc = PyCFunction_GET_FUNCTION(func); - self = PyCFunction_GET_SELF(func); - if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) - return NULL; - result = cfunc(self, arg); - Py_LeaveRecursiveCall(); - if (unlikely(!result) && unlikely(!PyErr_Occurred())) { - PyErr_SetString( - PyExc_SystemError, - "NULL result without error in PyObject_Call"); - } - return result; -} -#endif - -/* PyObjectCallNoArg */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func) { -#if CYTHON_FAST_PYCALL - if (PyFunction_Check(func)) { - return __Pyx_PyFunction_FastCall(func, NULL, 0); - } -#endif -#ifdef __Pyx_CyFunction_USED - if (likely(PyCFunction_Check(func) || __Pyx_CyFunction_Check(func))) -#else - if (likely(PyCFunction_Check(func))) -#endif - { - if (likely(PyCFunction_GET_FLAGS(func) & METH_NOARGS)) { - return __Pyx_PyObject_CallMethO(func, NULL); - } - } - return __Pyx_PyObject_Call(func, __pyx_empty_tuple, NULL); -} -#endif - -/* PyCFunctionFastCall */ -#if CYTHON_FAST_PYCCALL -static CYTHON_INLINE PyObject * __Pyx_PyCFunction_FastCall(PyObject *func_obj, PyObject **args, Py_ssize_t nargs) { - PyCFunctionObject *func = (PyCFunctionObject*)func_obj; - PyCFunction meth = PyCFunction_GET_FUNCTION(func); - PyObject *self = PyCFunction_GET_SELF(func); - int flags = PyCFunction_GET_FLAGS(func); - assert(PyCFunction_Check(func)); - assert(METH_FASTCALL == (flags & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS | METH_STACKLESS))); - assert(nargs >= 0); - assert(nargs == 0 || args != NULL); - /* _PyCFunction_FastCallDict() must not be called with an exception set, - because it may clear it (directly or indirectly) and so the - caller loses its exception */ - assert(!PyErr_Occurred()); - if ((PY_VERSION_HEX < 0x030700A0) || unlikely(flags & METH_KEYWORDS)) { - return (*((__Pyx_PyCFunctionFastWithKeywords)(void*)meth)) (self, args, nargs, NULL); - } else { - return (*((__Pyx_PyCFunctionFast)(void*)meth)) (self, args, nargs); - } -} -#endif - -/* PyObjectCallOneArg */ -#if CYTHON_COMPILING_IN_CPYTHON -static PyObject* __Pyx__PyObject_CallOneArg(PyObject *func, PyObject *arg) { - PyObject *result; - PyObject *args = PyTuple_New(1); - if (unlikely(!args)) return NULL; - Py_INCREF(arg); - PyTuple_SET_ITEM(args, 0, arg); - result = __Pyx_PyObject_Call(func, args, NULL); - Py_DECREF(args); - return result; -} -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { -#if CYTHON_FAST_PYCALL - if (PyFunction_Check(func)) { - return __Pyx_PyFunction_FastCall(func, &arg, 1); - } -#endif - if (likely(PyCFunction_Check(func))) { - if (likely(PyCFunction_GET_FLAGS(func) & METH_O)) { - return __Pyx_PyObject_CallMethO(func, arg); -#if CYTHON_FAST_PYCCALL - } else if (PyCFunction_GET_FLAGS(func) & METH_FASTCALL) { - return __Pyx_PyCFunction_FastCall(func, &arg, 1); -#endif - } - } - return __Pyx__PyObject_CallOneArg(func, arg); -} -#else -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { - PyObject *result; - PyObject *args = PyTuple_Pack(1, arg); - if (unlikely(!args)) return NULL; - result = __Pyx_PyObject_Call(func, args, NULL); - Py_DECREF(args); - return result; -} -#endif - -/* PyObjectCall2Args */ -static CYTHON_UNUSED PyObject* __Pyx_PyObject_Call2Args(PyObject* function, PyObject* arg1, PyObject* arg2) { - PyObject *args, *result = NULL; - #if CYTHON_FAST_PYCALL - if (PyFunction_Check(function)) { - PyObject *args[2] = {arg1, arg2}; - return __Pyx_PyFunction_FastCall(function, args, 2); - } - #endif - #if CYTHON_FAST_PYCCALL - if (__Pyx_PyFastCFunction_Check(function)) { - PyObject *args[2] = {arg1, arg2}; - return __Pyx_PyCFunction_FastCall(function, args, 2); - } - #endif - args = PyTuple_New(2); - if (unlikely(!args)) goto done; - Py_INCREF(arg1); - PyTuple_SET_ITEM(args, 0, arg1); - Py_INCREF(arg2); - PyTuple_SET_ITEM(args, 1, arg2); - Py_INCREF(function); - result = __Pyx_PyObject_Call(function, args, NULL); - Py_DECREF(args); - Py_DECREF(function); -done: - return result; -} - -/* RaiseException */ -#if PY_MAJOR_VERSION < 3 -static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, - CYTHON_UNUSED PyObject *cause) { - __Pyx_PyThreadState_declare - Py_XINCREF(type); - if (!value || value == Py_None) - value = NULL; - else - Py_INCREF(value); - if (!tb || tb == Py_None) - tb = NULL; - else { - Py_INCREF(tb); - if (!PyTraceBack_Check(tb)) { - PyErr_SetString(PyExc_TypeError, - "raise: arg 3 must be a traceback or None"); - goto raise_error; - } - } - if (PyType_Check(type)) { -#if CYTHON_COMPILING_IN_PYPY - if (!value) { - Py_INCREF(Py_None); - value = Py_None; - } -#endif - PyErr_NormalizeException(&type, &value, &tb); - } else { - if (value) { - PyErr_SetString(PyExc_TypeError, - "instance exception may not have a separate value"); - goto raise_error; - } - value = type; - type = (PyObject*) Py_TYPE(type); - Py_INCREF(type); - if (!PyType_IsSubtype((PyTypeObject *)type, (PyTypeObject *)PyExc_BaseException)) { - PyErr_SetString(PyExc_TypeError, - "raise: exception class must be a subclass of BaseException"); - goto raise_error; - } - } - __Pyx_PyThreadState_assign - __Pyx_ErrRestore(type, value, tb); - return; -raise_error: - Py_XDECREF(value); - Py_XDECREF(type); - Py_XDECREF(tb); - return; -} -#else -static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) { - PyObject* owned_instance = NULL; - if (tb == Py_None) { - tb = 0; - } else if (tb && !PyTraceBack_Check(tb)) { - PyErr_SetString(PyExc_TypeError, - "raise: arg 3 must be a traceback or None"); - goto bad; - } - if (value == Py_None) - value = 0; - if (PyExceptionInstance_Check(type)) { - if (value) { - PyErr_SetString(PyExc_TypeError, - "instance exception may not have a separate value"); - goto bad; - } - value = type; - type = (PyObject*) Py_TYPE(value); - } else if (PyExceptionClass_Check(type)) { - PyObject *instance_class = NULL; - if (value && PyExceptionInstance_Check(value)) { - instance_class = (PyObject*) Py_TYPE(value); - if (instance_class != type) { - int is_subclass = PyObject_IsSubclass(instance_class, type); - if (!is_subclass) { - instance_class = NULL; - } else if (unlikely(is_subclass == -1)) { - goto bad; - } else { - type = instance_class; - } - } - } - if (!instance_class) { - PyObject *args; - if (!value) - args = PyTuple_New(0); - else if (PyTuple_Check(value)) { - Py_INCREF(value); - args = value; - } else - args = PyTuple_Pack(1, value); - if (!args) - goto bad; - owned_instance = PyObject_Call(type, args, NULL); - Py_DECREF(args); - if (!owned_instance) - goto bad; - value = owned_instance; - if (!PyExceptionInstance_Check(value)) { - PyErr_Format(PyExc_TypeError, - "calling %R should have returned an instance of " - "BaseException, not %R", - type, Py_TYPE(value)); - goto bad; - } - } - } else { - PyErr_SetString(PyExc_TypeError, - "raise: exception class must be a subclass of BaseException"); - goto bad; - } - if (cause) { - PyObject *fixed_cause; - if (cause == Py_None) { - fixed_cause = NULL; - } else if (PyExceptionClass_Check(cause)) { - fixed_cause = PyObject_CallObject(cause, NULL); - if (fixed_cause == NULL) - goto bad; - } else if (PyExceptionInstance_Check(cause)) { - fixed_cause = cause; - Py_INCREF(fixed_cause); - } else { - PyErr_SetString(PyExc_TypeError, - "exception causes must derive from " - "BaseException"); - goto bad; - } - PyException_SetCause(value, fixed_cause); - } - PyErr_SetObject(type, value); - if (tb) { -#if CYTHON_COMPILING_IN_PYPY - PyObject *tmp_type, *tmp_value, *tmp_tb; - PyErr_Fetch(&tmp_type, &tmp_value, &tmp_tb); - Py_INCREF(tb); - PyErr_Restore(tmp_type, tmp_value, tb); - Py_XDECREF(tmp_tb); -#else - PyThreadState *tstate = __Pyx_PyThreadState_Current; - PyObject* tmp_tb = tstate->curexc_traceback; - if (tb != tmp_tb) { - Py_INCREF(tb); - tstate->curexc_traceback = tb; - Py_XDECREF(tmp_tb); - } -#endif - } -bad: - Py_XDECREF(owned_instance); - return; -} -#endif - -/* BytesEquals */ -static CYTHON_INLINE int __Pyx_PyBytes_Equals(PyObject* s1, PyObject* s2, int equals) { -#if CYTHON_COMPILING_IN_PYPY - return PyObject_RichCompareBool(s1, s2, equals); -#else - if (s1 == s2) { - return (equals == Py_EQ); - } else if (PyBytes_CheckExact(s1) & PyBytes_CheckExact(s2)) { - const char *ps1, *ps2; - Py_ssize_t length = PyBytes_GET_SIZE(s1); - if (length != PyBytes_GET_SIZE(s2)) - return (equals == Py_NE); - ps1 = PyBytes_AS_STRING(s1); - ps2 = PyBytes_AS_STRING(s2); - if (ps1[0] != ps2[0]) { - return (equals == Py_NE); - } else if (length == 1) { - return (equals == Py_EQ); - } else { - int result; -#if CYTHON_USE_UNICODE_INTERNALS - Py_hash_t hash1, hash2; - hash1 = ((PyBytesObject*)s1)->ob_shash; - hash2 = ((PyBytesObject*)s2)->ob_shash; - if (hash1 != hash2 && hash1 != -1 && hash2 != -1) { - return (equals == Py_NE); - } -#endif - result = memcmp(ps1, ps2, (size_t)length); - return (equals == Py_EQ) ? (result == 0) : (result != 0); - } - } else if ((s1 == Py_None) & PyBytes_CheckExact(s2)) { - return (equals == Py_NE); - } else if ((s2 == Py_None) & PyBytes_CheckExact(s1)) { - return (equals == Py_NE); - } else { - int result; - PyObject* py_result = PyObject_RichCompare(s1, s2, equals); - if (!py_result) - return -1; - result = __Pyx_PyObject_IsTrue(py_result); - Py_DECREF(py_result); - return result; - } -#endif -} - -/* UnicodeEquals */ -static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int equals) { -#if CYTHON_COMPILING_IN_PYPY - return PyObject_RichCompareBool(s1, s2, equals); -#else -#if PY_MAJOR_VERSION < 3 - PyObject* owned_ref = NULL; -#endif - int s1_is_unicode, s2_is_unicode; - if (s1 == s2) { - goto return_eq; - } - s1_is_unicode = PyUnicode_CheckExact(s1); - s2_is_unicode = PyUnicode_CheckExact(s2); -#if PY_MAJOR_VERSION < 3 - if ((s1_is_unicode & (!s2_is_unicode)) && PyString_CheckExact(s2)) { - owned_ref = PyUnicode_FromObject(s2); - if (unlikely(!owned_ref)) - return -1; - s2 = owned_ref; - s2_is_unicode = 1; - } else if ((s2_is_unicode & (!s1_is_unicode)) && PyString_CheckExact(s1)) { - owned_ref = PyUnicode_FromObject(s1); - if (unlikely(!owned_ref)) - return -1; - s1 = owned_ref; - s1_is_unicode = 1; - } else if (((!s2_is_unicode) & (!s1_is_unicode))) { - return __Pyx_PyBytes_Equals(s1, s2, equals); - } -#endif - if (s1_is_unicode & s2_is_unicode) { - Py_ssize_t length; - int kind; - void *data1, *data2; - if (unlikely(__Pyx_PyUnicode_READY(s1) < 0) || unlikely(__Pyx_PyUnicode_READY(s2) < 0)) - return -1; - length = __Pyx_PyUnicode_GET_LENGTH(s1); - if (length != __Pyx_PyUnicode_GET_LENGTH(s2)) { - goto return_ne; - } -#if CYTHON_USE_UNICODE_INTERNALS - { - Py_hash_t hash1, hash2; - #if CYTHON_PEP393_ENABLED - hash1 = ((PyASCIIObject*)s1)->hash; - hash2 = ((PyASCIIObject*)s2)->hash; - #else - hash1 = ((PyUnicodeObject*)s1)->hash; - hash2 = ((PyUnicodeObject*)s2)->hash; - #endif - if (hash1 != hash2 && hash1 != -1 && hash2 != -1) { - goto return_ne; - } - } -#endif - kind = __Pyx_PyUnicode_KIND(s1); - if (kind != __Pyx_PyUnicode_KIND(s2)) { - goto return_ne; - } - data1 = __Pyx_PyUnicode_DATA(s1); - data2 = __Pyx_PyUnicode_DATA(s2); - if (__Pyx_PyUnicode_READ(kind, data1, 0) != __Pyx_PyUnicode_READ(kind, data2, 0)) { - goto return_ne; - } else if (length == 1) { - goto return_eq; - } else { - int result = memcmp(data1, data2, (size_t)(length * kind)); - #if PY_MAJOR_VERSION < 3 - Py_XDECREF(owned_ref); - #endif - return (equals == Py_EQ) ? (result == 0) : (result != 0); - } - } else if ((s1 == Py_None) & s2_is_unicode) { - goto return_ne; - } else if ((s2 == Py_None) & s1_is_unicode) { - goto return_ne; - } else { - int result; - PyObject* py_result = PyObject_RichCompare(s1, s2, equals); - #if PY_MAJOR_VERSION < 3 - Py_XDECREF(owned_ref); - #endif - if (!py_result) - return -1; - result = __Pyx_PyObject_IsTrue(py_result); - Py_DECREF(py_result); - return result; - } -return_eq: - #if PY_MAJOR_VERSION < 3 - Py_XDECREF(owned_ref); - #endif - return (equals == Py_EQ); -return_ne: - #if PY_MAJOR_VERSION < 3 - Py_XDECREF(owned_ref); - #endif - return (equals == Py_NE); -#endif -} - -/* SliceObject */ -static CYTHON_INLINE PyObject* __Pyx_PyObject_GetSlice(PyObject* obj, - Py_ssize_t cstart, Py_ssize_t cstop, - PyObject** _py_start, PyObject** _py_stop, PyObject** _py_slice, - int has_cstart, int has_cstop, CYTHON_UNUSED int wraparound) { -#if CYTHON_USE_TYPE_SLOTS - PyMappingMethods* mp; -#if PY_MAJOR_VERSION < 3 - PySequenceMethods* ms = Py_TYPE(obj)->tp_as_sequence; - if (likely(ms && ms->sq_slice)) { - if (!has_cstart) { - if (_py_start && (*_py_start != Py_None)) { - cstart = __Pyx_PyIndex_AsSsize_t(*_py_start); - if ((cstart == (Py_ssize_t)-1) && PyErr_Occurred()) goto bad; - } else - cstart = 0; - } - if (!has_cstop) { - if (_py_stop && (*_py_stop != Py_None)) { - cstop = __Pyx_PyIndex_AsSsize_t(*_py_stop); - if ((cstop == (Py_ssize_t)-1) && PyErr_Occurred()) goto bad; - } else - cstop = PY_SSIZE_T_MAX; - } - if (wraparound && unlikely((cstart < 0) | (cstop < 0)) && likely(ms->sq_length)) { - Py_ssize_t l = ms->sq_length(obj); - if (likely(l >= 0)) { - if (cstop < 0) { - cstop += l; - if (cstop < 0) cstop = 0; - } - if (cstart < 0) { - cstart += l; - if (cstart < 0) cstart = 0; - } - } else { - if (!PyErr_ExceptionMatches(PyExc_OverflowError)) - goto bad; - PyErr_Clear(); - } - } - return ms->sq_slice(obj, cstart, cstop); - } -#endif - mp = Py_TYPE(obj)->tp_as_mapping; - if (likely(mp && mp->mp_subscript)) -#endif - { - PyObject* result; - PyObject *py_slice, *py_start, *py_stop; - if (_py_slice) { - py_slice = *_py_slice; - } else { - PyObject* owned_start = NULL; - PyObject* owned_stop = NULL; - if (_py_start) { - py_start = *_py_start; - } else { - if (has_cstart) { - owned_start = py_start = PyInt_FromSsize_t(cstart); - if (unlikely(!py_start)) goto bad; - } else - py_start = Py_None; - } - if (_py_stop) { - py_stop = *_py_stop; - } else { - if (has_cstop) { - owned_stop = py_stop = PyInt_FromSsize_t(cstop); - if (unlikely(!py_stop)) { - Py_XDECREF(owned_start); - goto bad; - } - } else - py_stop = Py_None; - } - py_slice = PySlice_New(py_start, py_stop, Py_None); - Py_XDECREF(owned_start); - Py_XDECREF(owned_stop); - if (unlikely(!py_slice)) goto bad; - } -#if CYTHON_USE_TYPE_SLOTS - result = mp->mp_subscript(obj, py_slice); -#else - result = PyObject_GetItem(obj, py_slice); -#endif - if (!_py_slice) { - Py_DECREF(py_slice); - } - return result; - } - PyErr_Format(PyExc_TypeError, - "'%.200s' object is unsliceable", Py_TYPE(obj)->tp_name); -bad: - return NULL; -} - -/* GetException */ -#if CYTHON_FAST_THREAD_STATE -static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) -#else -static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb) -#endif -{ - PyObject *local_type, *local_value, *local_tb; -#if CYTHON_FAST_THREAD_STATE - PyObject *tmp_type, *tmp_value, *tmp_tb; - local_type = tstate->curexc_type; - local_value = tstate->curexc_value; - local_tb = tstate->curexc_traceback; - tstate->curexc_type = 0; - tstate->curexc_value = 0; - tstate->curexc_traceback = 0; -#else - PyErr_Fetch(&local_type, &local_value, &local_tb); -#endif - PyErr_NormalizeException(&local_type, &local_value, &local_tb); -#if CYTHON_FAST_THREAD_STATE - if (unlikely(tstate->curexc_type)) -#else - if (unlikely(PyErr_Occurred())) -#endif - goto bad; - #if PY_MAJOR_VERSION >= 3 - if (local_tb) { - if (unlikely(PyException_SetTraceback(local_value, local_tb) < 0)) - goto bad; - } - #endif - Py_XINCREF(local_tb); - Py_XINCREF(local_type); - Py_XINCREF(local_value); - *type = local_type; - *value = local_value; - *tb = local_tb; -#if CYTHON_FAST_THREAD_STATE - #if CYTHON_USE_EXC_INFO_STACK - { - _PyErr_StackItem *exc_info = tstate->exc_info; - tmp_type = exc_info->exc_type; - tmp_value = exc_info->exc_value; - tmp_tb = exc_info->exc_traceback; - exc_info->exc_type = local_type; - exc_info->exc_value = local_value; - exc_info->exc_traceback = local_tb; - } - #else - tmp_type = tstate->exc_type; - tmp_value = tstate->exc_value; - tmp_tb = tstate->exc_traceback; - tstate->exc_type = local_type; - tstate->exc_value = local_value; - tstate->exc_traceback = local_tb; - #endif - Py_XDECREF(tmp_type); - Py_XDECREF(tmp_value); - Py_XDECREF(tmp_tb); -#else - PyErr_SetExcInfo(local_type, local_value, local_tb); -#endif - return 0; -bad: - *type = 0; - *value = 0; - *tb = 0; - Py_XDECREF(local_type); - Py_XDECREF(local_value); - Py_XDECREF(local_tb); - return -1; -} - -/* SwapException */ -#if CYTHON_FAST_THREAD_STATE -static CYTHON_INLINE void __Pyx__ExceptionSwap(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { - PyObject *tmp_type, *tmp_value, *tmp_tb; - #if CYTHON_USE_EXC_INFO_STACK - _PyErr_StackItem *exc_info = tstate->exc_info; - tmp_type = exc_info->exc_type; - tmp_value = exc_info->exc_value; - tmp_tb = exc_info->exc_traceback; - exc_info->exc_type = *type; - exc_info->exc_value = *value; - exc_info->exc_traceback = *tb; - #else - tmp_type = tstate->exc_type; - tmp_value = tstate->exc_value; - tmp_tb = tstate->exc_traceback; - tstate->exc_type = *type; - tstate->exc_value = *value; - tstate->exc_traceback = *tb; - #endif - *type = tmp_type; - *value = tmp_value; - *tb = tmp_tb; -} -#else -static CYTHON_INLINE void __Pyx_ExceptionSwap(PyObject **type, PyObject **value, PyObject **tb) { - PyObject *tmp_type, *tmp_value, *tmp_tb; - PyErr_GetExcInfo(&tmp_type, &tmp_value, &tmp_tb); - PyErr_SetExcInfo(*type, *value, *tb); - *type = tmp_type; - *value = tmp_value; - *tb = tmp_tb; -} -#endif - -/* GetTopmostException */ -#if CYTHON_USE_EXC_INFO_STACK -static _PyErr_StackItem * -__Pyx_PyErr_GetTopmostException(PyThreadState *tstate) -{ - _PyErr_StackItem *exc_info = tstate->exc_info; - while ((exc_info->exc_type == NULL || exc_info->exc_type == Py_None) && - exc_info->previous_item != NULL) - { - exc_info = exc_info->previous_item; - } - return exc_info; -} -#endif - -/* SaveResetException */ -#if CYTHON_FAST_THREAD_STATE -static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { - #if CYTHON_USE_EXC_INFO_STACK - _PyErr_StackItem *exc_info = __Pyx_PyErr_GetTopmostException(tstate); - *type = exc_info->exc_type; - *value = exc_info->exc_value; - *tb = exc_info->exc_traceback; - #else - *type = tstate->exc_type; - *value = tstate->exc_value; - *tb = tstate->exc_traceback; - #endif - Py_XINCREF(*type); - Py_XINCREF(*value); - Py_XINCREF(*tb); -} -static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { - PyObject *tmp_type, *tmp_value, *tmp_tb; - #if CYTHON_USE_EXC_INFO_STACK - _PyErr_StackItem *exc_info = tstate->exc_info; - tmp_type = exc_info->exc_type; - tmp_value = exc_info->exc_value; - tmp_tb = exc_info->exc_traceback; - exc_info->exc_type = type; - exc_info->exc_value = value; - exc_info->exc_traceback = tb; - #else - tmp_type = tstate->exc_type; - tmp_value = tstate->exc_value; - tmp_tb = tstate->exc_traceback; - tstate->exc_type = type; - tstate->exc_value = value; - tstate->exc_traceback = tb; - #endif - Py_XDECREF(tmp_type); - Py_XDECREF(tmp_value); - Py_XDECREF(tmp_tb); -} -#endif - -/* decode_c_string */ -static CYTHON_INLINE PyObject* __Pyx_decode_c_string( - const char* cstring, Py_ssize_t start, Py_ssize_t stop, - const char* encoding, const char* errors, - PyObject* (*decode_func)(const char *s, Py_ssize_t size, const char *errors)) { - Py_ssize_t length; - if (unlikely((start < 0) | (stop < 0))) { - size_t slen = strlen(cstring); - if (unlikely(slen > (size_t) PY_SSIZE_T_MAX)) { - PyErr_SetString(PyExc_OverflowError, - "c-string too long to convert to Python"); - return NULL; - } - length = (Py_ssize_t) slen; - if (start < 0) { - start += length; - if (start < 0) - start = 0; - } - if (stop < 0) - stop += length; - } - if (unlikely(stop <= start)) - return __Pyx_NewRef(__pyx_empty_unicode); - length = stop - start; - cstring += start; - if (decode_func) { - return decode_func(cstring, length, errors); - } else { - return PyUnicode_Decode(cstring, length, encoding, errors); - } -} - -/* UnpackUnboundCMethod */ -static int __Pyx_TryUnpackUnboundCMethod(__Pyx_CachedCFunction* target) { - PyObject *method; - method = __Pyx_PyObject_GetAttrStr(target->type, *target->method_name); - if (unlikely(!method)) - return -1; - target->method = method; -#if CYTHON_COMPILING_IN_CPYTHON - #if PY_MAJOR_VERSION >= 3 - if (likely(__Pyx_TypeCheck(method, &PyMethodDescr_Type))) - #endif - { - PyMethodDescrObject *descr = (PyMethodDescrObject*) method; - target->func = descr->d_method->ml_meth; - target->flag = descr->d_method->ml_flags & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_STACKLESS); - } -#endif - return 0; -} - -/* CallUnboundCMethod1 */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_CallUnboundCMethod1(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg) { - if (likely(cfunc->func)) { - int flag = cfunc->flag; - if (flag == METH_O) { - return (*(cfunc->func))(self, arg); - } else if (PY_VERSION_HEX >= 0x030600B1 && flag == METH_FASTCALL) { - if (PY_VERSION_HEX >= 0x030700A0) { - return (*(__Pyx_PyCFunctionFast)(void*)(PyCFunction)cfunc->func)(self, &arg, 1); - } else { - return (*(__Pyx_PyCFunctionFastWithKeywords)(void*)(PyCFunction)cfunc->func)(self, &arg, 1, NULL); - } - } else if (PY_VERSION_HEX >= 0x030700A0 && flag == (METH_FASTCALL | METH_KEYWORDS)) { - return (*(__Pyx_PyCFunctionFastWithKeywords)(void*)(PyCFunction)cfunc->func)(self, &arg, 1, NULL); - } - } - return __Pyx__CallUnboundCMethod1(cfunc, self, arg); -} -#endif -static PyObject* __Pyx__CallUnboundCMethod1(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg){ - PyObject *args, *result = NULL; - if (unlikely(!cfunc->func && !cfunc->method) && unlikely(__Pyx_TryUnpackUnboundCMethod(cfunc) < 0)) return NULL; -#if CYTHON_COMPILING_IN_CPYTHON - if (cfunc->func && (cfunc->flag & METH_VARARGS)) { - args = PyTuple_New(1); - if (unlikely(!args)) goto bad; - Py_INCREF(arg); - PyTuple_SET_ITEM(args, 0, arg); - if (cfunc->flag & METH_KEYWORDS) - result = (*(PyCFunctionWithKeywords)(void*)(PyCFunction)cfunc->func)(self, args, NULL); - else - result = (*cfunc->func)(self, args); - } else { - args = PyTuple_New(2); - if (unlikely(!args)) goto bad; - Py_INCREF(self); - PyTuple_SET_ITEM(args, 0, self); - Py_INCREF(arg); - PyTuple_SET_ITEM(args, 1, arg); - result = __Pyx_PyObject_Call(cfunc->method, args, NULL); - } -#else - args = PyTuple_Pack(2, self, arg); - if (unlikely(!args)) goto bad; - result = __Pyx_PyObject_Call(cfunc->method, args, NULL); -#endif -bad: - Py_XDECREF(args); - return result; -} - -/* Import */ -static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level) { - PyObject *empty_list = 0; - PyObject *module = 0; - PyObject *global_dict = 0; - PyObject *empty_dict = 0; - PyObject *list; - #if PY_MAJOR_VERSION < 3 - PyObject *py_import; - py_import = __Pyx_PyObject_GetAttrStr(__pyx_b, __pyx_n_s_import); - if (!py_import) - goto bad; - #endif - if (from_list) - list = from_list; - else { - empty_list = PyList_New(0); - if (!empty_list) - goto bad; - list = empty_list; - } - global_dict = PyModule_GetDict(__pyx_m); - if (!global_dict) - goto bad; - empty_dict = PyDict_New(); - if (!empty_dict) - goto bad; - { - #if PY_MAJOR_VERSION >= 3 - if (level == -1) { - if ((1) && (strchr(__Pyx_MODULE_NAME, '.'))) { - module = PyImport_ImportModuleLevelObject( - name, global_dict, empty_dict, list, 1); - if (!module) { - if (!PyErr_ExceptionMatches(PyExc_ImportError)) - goto bad; - PyErr_Clear(); - } - } - level = 0; - } - #endif - if (!module) { - #if PY_MAJOR_VERSION < 3 - PyObject *py_level = PyInt_FromLong(level); - if (!py_level) - goto bad; - module = PyObject_CallFunctionObjArgs(py_import, - name, global_dict, empty_dict, list, py_level, (PyObject *)NULL); - Py_DECREF(py_level); - #else - module = PyImport_ImportModuleLevelObject( - name, global_dict, empty_dict, list, level); - #endif - } - } -bad: - #if PY_MAJOR_VERSION < 3 - Py_XDECREF(py_import); - #endif - Py_XDECREF(empty_list); - Py_XDECREF(empty_dict); - return module; -} - -/* ImportFrom */ -static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name) { - PyObject* value = __Pyx_PyObject_GetAttrStr(module, name); - if (unlikely(!value) && PyErr_ExceptionMatches(PyExc_AttributeError)) { - PyErr_Format(PyExc_ImportError, - #if PY_MAJOR_VERSION < 3 - "cannot import name %.230s", PyString_AS_STRING(name)); - #else - "cannot import name %S", name); - #endif - } - return value; -} - -/* HasAttr */ -static CYTHON_INLINE int __Pyx_HasAttr(PyObject *o, PyObject *n) { - PyObject *r; - if (unlikely(!__Pyx_PyBaseString_Check(n))) { - PyErr_SetString(PyExc_TypeError, - "hasattr(): attribute name must be string"); - return -1; - } - r = __Pyx_GetAttr(o, n); - if (unlikely(!r)) { - PyErr_Clear(); - return 0; - } else { - Py_DECREF(r); - return 1; - } -} - -/* PyObject_GenericGetAttrNoDict */ -#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 -static PyObject *__Pyx_RaiseGenericGetAttributeError(PyTypeObject *tp, PyObject *attr_name) { - PyErr_Format(PyExc_AttributeError, -#if PY_MAJOR_VERSION >= 3 - "'%.50s' object has no attribute '%U'", - tp->tp_name, attr_name); -#else - "'%.50s' object has no attribute '%.400s'", - tp->tp_name, PyString_AS_STRING(attr_name)); -#endif - return NULL; -} -static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name) { - PyObject *descr; - PyTypeObject *tp = Py_TYPE(obj); - if (unlikely(!PyString_Check(attr_name))) { - return PyObject_GenericGetAttr(obj, attr_name); - } - assert(!tp->tp_dictoffset); - descr = _PyType_Lookup(tp, attr_name); - if (unlikely(!descr)) { - return __Pyx_RaiseGenericGetAttributeError(tp, attr_name); - } - Py_INCREF(descr); - #if PY_MAJOR_VERSION < 3 - if (likely(PyType_HasFeature(Py_TYPE(descr), Py_TPFLAGS_HAVE_CLASS))) - #endif - { - descrgetfunc f = Py_TYPE(descr)->tp_descr_get; - if (unlikely(f)) { - PyObject *res = f(descr, obj, (PyObject *)tp); - Py_DECREF(descr); - return res; - } - } - return descr; -} -#endif - -/* PyObject_GenericGetAttr */ -#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 -static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name) { - if (unlikely(Py_TYPE(obj)->tp_dictoffset)) { - return PyObject_GenericGetAttr(obj, attr_name); - } - return __Pyx_PyObject_GenericGetAttrNoDict(obj, attr_name); -} -#endif - -/* PyObjectGetAttrStrNoError */ -static void __Pyx_PyObject_GetAttrStr_ClearAttributeError(void) { - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - if (likely(__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError))) - __Pyx_PyErr_Clear(); -} -static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name) { - PyObject *result; -#if CYTHON_COMPILING_IN_CPYTHON && CYTHON_USE_TYPE_SLOTS && PY_VERSION_HEX >= 0x030700B1 - PyTypeObject* tp = Py_TYPE(obj); - if (likely(tp->tp_getattro == PyObject_GenericGetAttr)) { - return _PyObject_GenericGetAttrWithDict(obj, attr_name, NULL, 1); - } -#endif - result = __Pyx_PyObject_GetAttrStr(obj, attr_name); - if (unlikely(!result)) { - __Pyx_PyObject_GetAttrStr_ClearAttributeError(); - } - return result; -} - -/* SetupReduce */ -static int __Pyx_setup_reduce_is_named(PyObject* meth, PyObject* name) { - int ret; - PyObject *name_attr; - name_attr = __Pyx_PyObject_GetAttrStr(meth, __pyx_n_s_name); - if (likely(name_attr)) { - ret = PyObject_RichCompareBool(name_attr, name, Py_EQ); - } else { - ret = -1; - } - if (unlikely(ret < 0)) { - PyErr_Clear(); - ret = 0; - } - Py_XDECREF(name_attr); - return ret; -} -static int __Pyx_setup_reduce(PyObject* type_obj) { - int ret = 0; - PyObject *object_reduce = NULL; - PyObject *object_reduce_ex = NULL; - PyObject *reduce = NULL; - PyObject *reduce_ex = NULL; - PyObject *reduce_cython = NULL; - PyObject *setstate = NULL; - PyObject *setstate_cython = NULL; -#if CYTHON_USE_PYTYPE_LOOKUP - if (_PyType_Lookup((PyTypeObject*)type_obj, __pyx_n_s_getstate)) goto __PYX_GOOD; -#else - if (PyObject_HasAttr(type_obj, __pyx_n_s_getstate)) goto __PYX_GOOD; -#endif -#if CYTHON_USE_PYTYPE_LOOKUP - object_reduce_ex = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto __PYX_BAD; -#else - object_reduce_ex = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto __PYX_BAD; -#endif - reduce_ex = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce_ex); if (unlikely(!reduce_ex)) goto __PYX_BAD; - if (reduce_ex == object_reduce_ex) { -#if CYTHON_USE_PYTYPE_LOOKUP - object_reduce = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto __PYX_BAD; -#else - object_reduce = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto __PYX_BAD; -#endif - reduce = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce); if (unlikely(!reduce)) goto __PYX_BAD; - if (reduce == object_reduce || __Pyx_setup_reduce_is_named(reduce, __pyx_n_s_reduce_cython)) { - reduce_cython = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_reduce_cython); - if (likely(reduce_cython)) { - ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce, reduce_cython); if (unlikely(ret < 0)) goto __PYX_BAD; - ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce_cython); if (unlikely(ret < 0)) goto __PYX_BAD; - } else if (reduce == object_reduce || PyErr_Occurred()) { - goto __PYX_BAD; - } - setstate = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_setstate); - if (!setstate) PyErr_Clear(); - if (!setstate || __Pyx_setup_reduce_is_named(setstate, __pyx_n_s_setstate_cython)) { - setstate_cython = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_setstate_cython); - if (likely(setstate_cython)) { - ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate, setstate_cython); if (unlikely(ret < 0)) goto __PYX_BAD; - ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate_cython); if (unlikely(ret < 0)) goto __PYX_BAD; - } else if (!setstate || PyErr_Occurred()) { - goto __PYX_BAD; - } - } - PyType_Modified((PyTypeObject*)type_obj); - } - } - goto __PYX_GOOD; -__PYX_BAD: - if (!PyErr_Occurred()) - PyErr_Format(PyExc_RuntimeError, "Unable to initialize pickling for %s", ((PyTypeObject*)type_obj)->tp_name); - ret = -1; -__PYX_GOOD: -#if !CYTHON_USE_PYTYPE_LOOKUP - Py_XDECREF(object_reduce); - Py_XDECREF(object_reduce_ex); -#endif - Py_XDECREF(reduce); - Py_XDECREF(reduce_ex); - Py_XDECREF(reduce_cython); - Py_XDECREF(setstate); - Py_XDECREF(setstate_cython); - return ret; -} - -/* SetVTable */ -static int __Pyx_SetVtable(PyObject *dict, void *vtable) { -#if PY_VERSION_HEX >= 0x02070000 - PyObject *ob = PyCapsule_New(vtable, 0, 0); -#else - PyObject *ob = PyCObject_FromVoidPtr(vtable, 0); -#endif - if (!ob) - goto bad; - if (PyDict_SetItem(dict, __pyx_n_s_pyx_vtable, ob) < 0) - goto bad; - Py_DECREF(ob); - return 0; -bad: - Py_XDECREF(ob); - return -1; -} - -/* TypeImport */ -#ifndef __PYX_HAVE_RT_ImportType -#define __PYX_HAVE_RT_ImportType -static PyTypeObject *__Pyx_ImportType(PyObject *module, const char *module_name, const char *class_name, - size_t size, enum __Pyx_ImportType_CheckSize check_size) -{ - PyObject *result = 0; - char warning[200]; - Py_ssize_t basicsize; -#ifdef Py_LIMITED_API - PyObject *py_basicsize; -#endif - result = PyObject_GetAttrString(module, class_name); - if (!result) - goto bad; - if (!PyType_Check(result)) { - PyErr_Format(PyExc_TypeError, - "%.200s.%.200s is not a type object", - module_name, class_name); - goto bad; - } -#ifndef Py_LIMITED_API - basicsize = ((PyTypeObject *)result)->tp_basicsize; -#else - py_basicsize = PyObject_GetAttrString(result, "__basicsize__"); - if (!py_basicsize) - goto bad; - basicsize = PyLong_AsSsize_t(py_basicsize); - Py_DECREF(py_basicsize); - py_basicsize = 0; - if (basicsize == (Py_ssize_t)-1 && PyErr_Occurred()) - goto bad; -#endif - if ((size_t)basicsize < size) { - PyErr_Format(PyExc_ValueError, - "%.200s.%.200s size changed, may indicate binary incompatibility. " - "Expected %zd from C header, got %zd from PyObject", - module_name, class_name, size, basicsize); - goto bad; - } - if (check_size == __Pyx_ImportType_CheckSize_Error && (size_t)basicsize != size) { - PyErr_Format(PyExc_ValueError, - "%.200s.%.200s size changed, may indicate binary incompatibility. " - "Expected %zd from C header, got %zd from PyObject", - module_name, class_name, size, basicsize); - goto bad; - } - else if (check_size == __Pyx_ImportType_CheckSize_Warn && (size_t)basicsize > size) { - PyOS_snprintf(warning, sizeof(warning), - "%s.%s size changed, may indicate binary incompatibility. " - "Expected %zd from C header, got %zd from PyObject", - module_name, class_name, size, basicsize); - if (PyErr_WarnEx(NULL, warning, 0) < 0) goto bad; - } - return (PyTypeObject *)result; -bad: - Py_XDECREF(result); - return NULL; -} -#endif - -/* CLineInTraceback */ -#ifndef CYTHON_CLINE_IN_TRACEBACK -static int __Pyx_CLineForTraceback(CYTHON_NCP_UNUSED PyThreadState *tstate, int c_line) { - PyObject *use_cline; - PyObject *ptype, *pvalue, *ptraceback; -#if CYTHON_COMPILING_IN_CPYTHON - PyObject **cython_runtime_dict; -#endif - if (unlikely(!__pyx_cython_runtime)) { - return c_line; - } - __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); -#if CYTHON_COMPILING_IN_CPYTHON - cython_runtime_dict = _PyObject_GetDictPtr(__pyx_cython_runtime); - if (likely(cython_runtime_dict)) { - __PYX_PY_DICT_LOOKUP_IF_MODIFIED( - use_cline, *cython_runtime_dict, - __Pyx_PyDict_GetItemStr(*cython_runtime_dict, __pyx_n_s_cline_in_traceback)) - } else -#endif - { - PyObject *use_cline_obj = __Pyx_PyObject_GetAttrStr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback); - if (use_cline_obj) { - use_cline = PyObject_Not(use_cline_obj) ? Py_False : Py_True; - Py_DECREF(use_cline_obj); - } else { - PyErr_Clear(); - use_cline = NULL; - } - } - if (!use_cline) { - c_line = 0; - PyObject_SetAttr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback, Py_False); - } - else if (use_cline == Py_False || (use_cline != Py_True && PyObject_Not(use_cline) != 0)) { - c_line = 0; - } - __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); - return c_line; -} -#endif - -/* CodeObjectCache */ -static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) { - int start = 0, mid = 0, end = count - 1; - if (end >= 0 && code_line > entries[end].code_line) { - return count; - } - while (start < end) { - mid = start + (end - start) / 2; - if (code_line < entries[mid].code_line) { - end = mid; - } else if (code_line > entries[mid].code_line) { - start = mid + 1; - } else { - return mid; - } - } - if (code_line <= entries[mid].code_line) { - return mid; - } else { - return mid + 1; - } -} -static PyCodeObject *__pyx_find_code_object(int code_line) { - PyCodeObject* code_object; - int pos; - if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) { - return NULL; - } - pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); - if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) { - return NULL; - } - code_object = __pyx_code_cache.entries[pos].code_object; - Py_INCREF(code_object); - return code_object; -} -static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) { - int pos, i; - __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries; - if (unlikely(!code_line)) { - return; - } - if (unlikely(!entries)) { - entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry)); - if (likely(entries)) { - __pyx_code_cache.entries = entries; - __pyx_code_cache.max_count = 64; - __pyx_code_cache.count = 1; - entries[0].code_line = code_line; - entries[0].code_object = code_object; - Py_INCREF(code_object); - } - return; - } - pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); - if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) { - PyCodeObject* tmp = entries[pos].code_object; - entries[pos].code_object = code_object; - Py_DECREF(tmp); - return; - } - if (__pyx_code_cache.count == __pyx_code_cache.max_count) { - int new_max = __pyx_code_cache.max_count + 64; - entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc( - __pyx_code_cache.entries, ((size_t)new_max) * sizeof(__Pyx_CodeObjectCacheEntry)); - if (unlikely(!entries)) { - return; - } - __pyx_code_cache.entries = entries; - __pyx_code_cache.max_count = new_max; - } - for (i=__pyx_code_cache.count; i>pos; i--) { - entries[i] = entries[i-1]; - } - entries[pos].code_line = code_line; - entries[pos].code_object = code_object; - __pyx_code_cache.count++; - Py_INCREF(code_object); -} - -/* AddTraceback */ -#include "compile.h" -#include "frameobject.h" -#include "traceback.h" -static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( - const char *funcname, int c_line, - int py_line, const char *filename) { - PyCodeObject *py_code = 0; - PyObject *py_srcfile = 0; - PyObject *py_funcname = 0; - #if PY_MAJOR_VERSION < 3 - py_srcfile = PyString_FromString(filename); - #else - py_srcfile = PyUnicode_FromString(filename); - #endif - if (!py_srcfile) goto bad; - if (c_line) { - #if PY_MAJOR_VERSION < 3 - py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); - #else - py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); - #endif - } - else { - #if PY_MAJOR_VERSION < 3 - py_funcname = PyString_FromString(funcname); - #else - py_funcname = PyUnicode_FromString(funcname); - #endif - } - if (!py_funcname) goto bad; - py_code = __Pyx_PyCode_New( - 0, - 0, - 0, - 0, - 0, - __pyx_empty_bytes, /*PyObject *code,*/ - __pyx_empty_tuple, /*PyObject *consts,*/ - __pyx_empty_tuple, /*PyObject *names,*/ - __pyx_empty_tuple, /*PyObject *varnames,*/ - __pyx_empty_tuple, /*PyObject *freevars,*/ - __pyx_empty_tuple, /*PyObject *cellvars,*/ - py_srcfile, /*PyObject *filename,*/ - py_funcname, /*PyObject *name,*/ - py_line, - __pyx_empty_bytes /*PyObject *lnotab*/ - ); - Py_DECREF(py_srcfile); - Py_DECREF(py_funcname); - return py_code; -bad: - Py_XDECREF(py_srcfile); - Py_XDECREF(py_funcname); - return NULL; -} -static void __Pyx_AddTraceback(const char *funcname, int c_line, - int py_line, const char *filename) { - PyCodeObject *py_code = 0; - PyFrameObject *py_frame = 0; - PyThreadState *tstate = __Pyx_PyThreadState_Current; - if (c_line) { - c_line = __Pyx_CLineForTraceback(tstate, c_line); - } - py_code = __pyx_find_code_object(c_line ? -c_line : py_line); - if (!py_code) { - py_code = __Pyx_CreateCodeObjectForTraceback( - funcname, c_line, py_line, filename); - if (!py_code) goto bad; - __pyx_insert_code_object(c_line ? -c_line : py_line, py_code); - } - py_frame = PyFrame_New( - tstate, /*PyThreadState *tstate,*/ - py_code, /*PyCodeObject *code,*/ - __pyx_d, /*PyObject *globals,*/ - 0 /*PyObject *locals*/ - ); - if (!py_frame) goto bad; - __Pyx_PyFrame_SetLineNumber(py_frame, py_line); - PyTraceBack_Here(py_frame); -bad: - Py_XDECREF(py_code); - Py_XDECREF(py_frame); -} - -/* CIntToPy */ -static CYTHON_INLINE PyObject* __Pyx_PyInt_From_int(int value) { - const int neg_one = (int) ((int) 0 - (int) 1), const_zero = (int) 0; - const int is_unsigned = neg_one > const_zero; - if (is_unsigned) { - if (sizeof(int) < sizeof(long)) { - return PyInt_FromLong((long) value); - } else if (sizeof(int) <= sizeof(unsigned long)) { - return PyLong_FromUnsignedLong((unsigned long) value); -#ifdef HAVE_LONG_LONG - } else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) { - return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); -#endif - } - } else { - if (sizeof(int) <= sizeof(long)) { - return PyInt_FromLong((long) value); -#ifdef HAVE_LONG_LONG - } else if (sizeof(int) <= sizeof(PY_LONG_LONG)) { - return PyLong_FromLongLong((PY_LONG_LONG) value); -#endif - } - } - { - int one = 1; int little = (int)*(unsigned char *)&one; - unsigned char *bytes = (unsigned char *)&value; - return _PyLong_FromByteArray(bytes, sizeof(int), - little, !is_unsigned); - } -} - -/* CIntFromPyVerify */ -#define __PYX_VERIFY_RETURN_INT(target_type, func_type, func_value)\ - __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 0) -#define __PYX_VERIFY_RETURN_INT_EXC(target_type, func_type, func_value)\ - __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 1) -#define __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, exc)\ - {\ - func_type value = func_value;\ - if (sizeof(target_type) < sizeof(func_type)) {\ - if (unlikely(value != (func_type) (target_type) value)) {\ - func_type zero = 0;\ - if (exc && unlikely(value == (func_type)-1 && PyErr_Occurred()))\ - return (target_type) -1;\ - if (is_unsigned && unlikely(value < zero))\ - goto raise_neg_overflow;\ - else\ - goto raise_overflow;\ - }\ - }\ - return (target_type) value;\ - } - -/* CIntToPy */ -static CYTHON_INLINE PyObject* __Pyx_PyInt_From_unsigned_int(unsigned int value) { - const unsigned int neg_one = (unsigned int) ((unsigned int) 0 - (unsigned int) 1), const_zero = (unsigned int) 0; - const int is_unsigned = neg_one > const_zero; - if (is_unsigned) { - if (sizeof(unsigned int) < sizeof(long)) { - return PyInt_FromLong((long) value); - } else if (sizeof(unsigned int) <= sizeof(unsigned long)) { - return PyLong_FromUnsignedLong((unsigned long) value); -#ifdef HAVE_LONG_LONG - } else if (sizeof(unsigned int) <= sizeof(unsigned PY_LONG_LONG)) { - return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); -#endif - } - } else { - if (sizeof(unsigned int) <= sizeof(long)) { - return PyInt_FromLong((long) value); -#ifdef HAVE_LONG_LONG - } else if (sizeof(unsigned int) <= sizeof(PY_LONG_LONG)) { - return PyLong_FromLongLong((PY_LONG_LONG) value); -#endif - } - } - { - int one = 1; int little = (int)*(unsigned char *)&one; - unsigned char *bytes = (unsigned char *)&value; - return _PyLong_FromByteArray(bytes, sizeof(unsigned int), - little, !is_unsigned); - } -} - -/* CIntToPy */ -static CYTHON_INLINE PyObject* __Pyx_PyInt_From_unsigned_short(unsigned short value) { - const unsigned short neg_one = (unsigned short) ((unsigned short) 0 - (unsigned short) 1), const_zero = (unsigned short) 0; - const int is_unsigned = neg_one > const_zero; - if (is_unsigned) { - if (sizeof(unsigned short) < sizeof(long)) { - return PyInt_FromLong((long) value); - } else if (sizeof(unsigned short) <= sizeof(unsigned long)) { - return PyLong_FromUnsignedLong((unsigned long) value); -#ifdef HAVE_LONG_LONG - } else if (sizeof(unsigned short) <= sizeof(unsigned PY_LONG_LONG)) { - return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); -#endif - } - } else { - if (sizeof(unsigned short) <= sizeof(long)) { - return PyInt_FromLong((long) value); -#ifdef HAVE_LONG_LONG - } else if (sizeof(unsigned short) <= sizeof(PY_LONG_LONG)) { - return PyLong_FromLongLong((PY_LONG_LONG) value); -#endif - } - } - { - int one = 1; int little = (int)*(unsigned char *)&one; - unsigned char *bytes = (unsigned char *)&value; - return _PyLong_FromByteArray(bytes, sizeof(unsigned short), - little, !is_unsigned); - } -} - -/* CIntToPy */ -static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) { - const long neg_one = (long) ((long) 0 - (long) 1), const_zero = (long) 0; - const int is_unsigned = neg_one > const_zero; - if (is_unsigned) { - if (sizeof(long) < sizeof(long)) { - return PyInt_FromLong((long) value); - } else if (sizeof(long) <= sizeof(unsigned long)) { - return PyLong_FromUnsignedLong((unsigned long) value); -#ifdef HAVE_LONG_LONG - } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { - return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); -#endif - } - } else { - if (sizeof(long) <= sizeof(long)) { - return PyInt_FromLong((long) value); -#ifdef HAVE_LONG_LONG - } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { - return PyLong_FromLongLong((PY_LONG_LONG) value); -#endif - } - } - { - int one = 1; int little = (int)*(unsigned char *)&one; - unsigned char *bytes = (unsigned char *)&value; - return _PyLong_FromByteArray(bytes, sizeof(long), - little, !is_unsigned); - } -} - -/* CIntToPy */ -static CYTHON_INLINE PyObject* __Pyx_PyInt_From_uint16_t(uint16_t value) { - const uint16_t neg_one = (uint16_t) ((uint16_t) 0 - (uint16_t) 1), const_zero = (uint16_t) 0; - const int is_unsigned = neg_one > const_zero; - if (is_unsigned) { - if (sizeof(uint16_t) < sizeof(long)) { - return PyInt_FromLong((long) value); - } else if (sizeof(uint16_t) <= sizeof(unsigned long)) { - return PyLong_FromUnsignedLong((unsigned long) value); -#ifdef HAVE_LONG_LONG - } else if (sizeof(uint16_t) <= sizeof(unsigned PY_LONG_LONG)) { - return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); -#endif - } - } else { - if (sizeof(uint16_t) <= sizeof(long)) { - return PyInt_FromLong((long) value); -#ifdef HAVE_LONG_LONG - } else if (sizeof(uint16_t) <= sizeof(PY_LONG_LONG)) { - return PyLong_FromLongLong((PY_LONG_LONG) value); -#endif - } - } - { - int one = 1; int little = (int)*(unsigned char *)&one; - unsigned char *bytes = (unsigned char *)&value; - return _PyLong_FromByteArray(bytes, sizeof(uint16_t), - little, !is_unsigned); - } -} - -/* CIntFromPy */ -static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *x) { - const int neg_one = (int) ((int) 0 - (int) 1), const_zero = (int) 0; - const int is_unsigned = neg_one > const_zero; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_Check(x))) { - if (sizeof(int) < sizeof(long)) { - __PYX_VERIFY_RETURN_INT(int, long, PyInt_AS_LONG(x)) - } else { - long val = PyInt_AS_LONG(x); - if (is_unsigned && unlikely(val < 0)) { - goto raise_neg_overflow; - } - return (int) val; - } - } else -#endif - if (likely(PyLong_Check(x))) { - if (is_unsigned) { -#if CYTHON_USE_PYLONG_INTERNALS - const digit* digits = ((PyLongObject*)x)->ob_digit; - switch (Py_SIZE(x)) { - case 0: return (int) 0; - case 1: __PYX_VERIFY_RETURN_INT(int, digit, digits[0]) - case 2: - if (8 * sizeof(int) > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) >= 2 * PyLong_SHIFT) { - return (int) (((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); - } - } - break; - case 3: - if (8 * sizeof(int) > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) >= 3 * PyLong_SHIFT) { - return (int) (((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); - } - } - break; - case 4: - if (8 * sizeof(int) > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) >= 4 * PyLong_SHIFT) { - return (int) (((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); - } - } - break; - } -#endif -#if CYTHON_COMPILING_IN_CPYTHON - if (unlikely(Py_SIZE(x) < 0)) { - goto raise_neg_overflow; - } -#else - { - int result = PyObject_RichCompareBool(x, Py_False, Py_LT); - if (unlikely(result < 0)) - return (int) -1; - if (unlikely(result == 1)) - goto raise_neg_overflow; - } -#endif - if (sizeof(int) <= sizeof(unsigned long)) { - __PYX_VERIFY_RETURN_INT_EXC(int, unsigned long, PyLong_AsUnsignedLong(x)) -#ifdef HAVE_LONG_LONG - } else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) { - __PYX_VERIFY_RETURN_INT_EXC(int, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) -#endif - } - } else { -#if CYTHON_USE_PYLONG_INTERNALS - const digit* digits = ((PyLongObject*)x)->ob_digit; - switch (Py_SIZE(x)) { - case 0: return (int) 0; - case -1: __PYX_VERIFY_RETURN_INT(int, sdigit, (sdigit) (-(sdigit)digits[0])) - case 1: __PYX_VERIFY_RETURN_INT(int, digit, +digits[0]) - case -2: - if (8 * sizeof(int) - 1 > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { - return (int) (((int)-1)*(((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case 2: - if (8 * sizeof(int) > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { - return (int) ((((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case -3: - if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { - return (int) (((int)-1)*(((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case 3: - if (8 * sizeof(int) > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { - return (int) ((((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case -4: - if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { - return (int) (((int)-1)*(((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case 4: - if (8 * sizeof(int) > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { - return (int) ((((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - } -#endif - if (sizeof(int) <= sizeof(long)) { - __PYX_VERIFY_RETURN_INT_EXC(int, long, PyLong_AsLong(x)) -#ifdef HAVE_LONG_LONG - } else if (sizeof(int) <= sizeof(PY_LONG_LONG)) { - __PYX_VERIFY_RETURN_INT_EXC(int, PY_LONG_LONG, PyLong_AsLongLong(x)) -#endif - } - } - { -#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) - PyErr_SetString(PyExc_RuntimeError, - "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); -#else - int val; - PyObject *v = __Pyx_PyNumber_IntOrLong(x); - #if PY_MAJOR_VERSION < 3 - if (likely(v) && !PyLong_Check(v)) { - PyObject *tmp = v; - v = PyNumber_Long(tmp); - Py_DECREF(tmp); - } - #endif - if (likely(v)) { - int one = 1; int is_little = (int)*(unsigned char *)&one; - unsigned char *bytes = (unsigned char *)&val; - int ret = _PyLong_AsByteArray((PyLongObject *)v, - bytes, sizeof(val), - is_little, !is_unsigned); - Py_DECREF(v); - if (likely(!ret)) - return val; - } -#endif - return (int) -1; - } - } else { - int val; - PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); - if (!tmp) return (int) -1; - val = __Pyx_PyInt_As_int(tmp); - Py_DECREF(tmp); - return val; - } -raise_overflow: - PyErr_SetString(PyExc_OverflowError, - "value too large to convert to int"); - return (int) -1; -raise_neg_overflow: - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to int"); - return (int) -1; -} - -/* CIntFromPy */ -static CYTHON_INLINE enum http_method __Pyx_PyInt_As_enum__http_method(PyObject *x) { - const enum http_method neg_one = (enum http_method) ((enum http_method) 0 - (enum http_method) 1), const_zero = (enum http_method) 0; - const int is_unsigned = neg_one > const_zero; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_Check(x))) { - if (sizeof(enum http_method) < sizeof(long)) { - __PYX_VERIFY_RETURN_INT(enum http_method, long, PyInt_AS_LONG(x)) - } else { - long val = PyInt_AS_LONG(x); - if (is_unsigned && unlikely(val < 0)) { - goto raise_neg_overflow; - } - return (enum http_method) val; - } - } else -#endif - if (likely(PyLong_Check(x))) { - if (is_unsigned) { -#if CYTHON_USE_PYLONG_INTERNALS - const digit* digits = ((PyLongObject*)x)->ob_digit; - switch (Py_SIZE(x)) { - case 0: return (enum http_method) 0; - case 1: __PYX_VERIFY_RETURN_INT(enum http_method, digit, digits[0]) - case 2: - if (8 * sizeof(enum http_method) > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(enum http_method, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(enum http_method) >= 2 * PyLong_SHIFT) { - return (enum http_method) (((((enum http_method)digits[1]) << PyLong_SHIFT) | (enum http_method)digits[0])); - } - } - break; - case 3: - if (8 * sizeof(enum http_method) > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(enum http_method, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(enum http_method) >= 3 * PyLong_SHIFT) { - return (enum http_method) (((((((enum http_method)digits[2]) << PyLong_SHIFT) | (enum http_method)digits[1]) << PyLong_SHIFT) | (enum http_method)digits[0])); - } - } - break; - case 4: - if (8 * sizeof(enum http_method) > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(enum http_method, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(enum http_method) >= 4 * PyLong_SHIFT) { - return (enum http_method) (((((((((enum http_method)digits[3]) << PyLong_SHIFT) | (enum http_method)digits[2]) << PyLong_SHIFT) | (enum http_method)digits[1]) << PyLong_SHIFT) | (enum http_method)digits[0])); - } - } - break; - } -#endif -#if CYTHON_COMPILING_IN_CPYTHON - if (unlikely(Py_SIZE(x) < 0)) { - goto raise_neg_overflow; - } -#else - { - int result = PyObject_RichCompareBool(x, Py_False, Py_LT); - if (unlikely(result < 0)) - return (enum http_method) -1; - if (unlikely(result == 1)) - goto raise_neg_overflow; - } -#endif - if (sizeof(enum http_method) <= sizeof(unsigned long)) { - __PYX_VERIFY_RETURN_INT_EXC(enum http_method, unsigned long, PyLong_AsUnsignedLong(x)) -#ifdef HAVE_LONG_LONG - } else if (sizeof(enum http_method) <= sizeof(unsigned PY_LONG_LONG)) { - __PYX_VERIFY_RETURN_INT_EXC(enum http_method, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) -#endif - } - } else { -#if CYTHON_USE_PYLONG_INTERNALS - const digit* digits = ((PyLongObject*)x)->ob_digit; - switch (Py_SIZE(x)) { - case 0: return (enum http_method) 0; - case -1: __PYX_VERIFY_RETURN_INT(enum http_method, sdigit, (sdigit) (-(sdigit)digits[0])) - case 1: __PYX_VERIFY_RETURN_INT(enum http_method, digit, +digits[0]) - case -2: - if (8 * sizeof(enum http_method) - 1 > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(enum http_method, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(enum http_method) - 1 > 2 * PyLong_SHIFT) { - return (enum http_method) (((enum http_method)-1)*(((((enum http_method)digits[1]) << PyLong_SHIFT) | (enum http_method)digits[0]))); - } - } - break; - case 2: - if (8 * sizeof(enum http_method) > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(enum http_method, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(enum http_method) - 1 > 2 * PyLong_SHIFT) { - return (enum http_method) ((((((enum http_method)digits[1]) << PyLong_SHIFT) | (enum http_method)digits[0]))); - } - } - break; - case -3: - if (8 * sizeof(enum http_method) - 1 > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(enum http_method, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(enum http_method) - 1 > 3 * PyLong_SHIFT) { - return (enum http_method) (((enum http_method)-1)*(((((((enum http_method)digits[2]) << PyLong_SHIFT) | (enum http_method)digits[1]) << PyLong_SHIFT) | (enum http_method)digits[0]))); - } - } - break; - case 3: - if (8 * sizeof(enum http_method) > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(enum http_method, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(enum http_method) - 1 > 3 * PyLong_SHIFT) { - return (enum http_method) ((((((((enum http_method)digits[2]) << PyLong_SHIFT) | (enum http_method)digits[1]) << PyLong_SHIFT) | (enum http_method)digits[0]))); - } - } - break; - case -4: - if (8 * sizeof(enum http_method) - 1 > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(enum http_method, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(enum http_method) - 1 > 4 * PyLong_SHIFT) { - return (enum http_method) (((enum http_method)-1)*(((((((((enum http_method)digits[3]) << PyLong_SHIFT) | (enum http_method)digits[2]) << PyLong_SHIFT) | (enum http_method)digits[1]) << PyLong_SHIFT) | (enum http_method)digits[0]))); - } - } - break; - case 4: - if (8 * sizeof(enum http_method) > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(enum http_method, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(enum http_method) - 1 > 4 * PyLong_SHIFT) { - return (enum http_method) ((((((((((enum http_method)digits[3]) << PyLong_SHIFT) | (enum http_method)digits[2]) << PyLong_SHIFT) | (enum http_method)digits[1]) << PyLong_SHIFT) | (enum http_method)digits[0]))); - } - } - break; - } -#endif - if (sizeof(enum http_method) <= sizeof(long)) { - __PYX_VERIFY_RETURN_INT_EXC(enum http_method, long, PyLong_AsLong(x)) -#ifdef HAVE_LONG_LONG - } else if (sizeof(enum http_method) <= sizeof(PY_LONG_LONG)) { - __PYX_VERIFY_RETURN_INT_EXC(enum http_method, PY_LONG_LONG, PyLong_AsLongLong(x)) -#endif - } - } - { -#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) - PyErr_SetString(PyExc_RuntimeError, - "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); -#else - enum http_method val; - PyObject *v = __Pyx_PyNumber_IntOrLong(x); - #if PY_MAJOR_VERSION < 3 - if (likely(v) && !PyLong_Check(v)) { - PyObject *tmp = v; - v = PyNumber_Long(tmp); - Py_DECREF(tmp); - } - #endif - if (likely(v)) { - int one = 1; int is_little = (int)*(unsigned char *)&one; - unsigned char *bytes = (unsigned char *)&val; - int ret = _PyLong_AsByteArray((PyLongObject *)v, - bytes, sizeof(val), - is_little, !is_unsigned); - Py_DECREF(v); - if (likely(!ret)) - return val; - } -#endif - return (enum http_method) -1; - } - } else { - enum http_method val; - PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); - if (!tmp) return (enum http_method) -1; - val = __Pyx_PyInt_As_enum__http_method(tmp); - Py_DECREF(tmp); - return val; - } -raise_overflow: - PyErr_SetString(PyExc_OverflowError, - "value too large to convert to enum http_method"); - return (enum http_method) -1; -raise_neg_overflow: - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to enum http_method"); - return (enum http_method) -1; -} - -/* CIntFromPy */ -static CYTHON_INLINE size_t __Pyx_PyInt_As_size_t(PyObject *x) { - const size_t neg_one = (size_t) ((size_t) 0 - (size_t) 1), const_zero = (size_t) 0; - const int is_unsigned = neg_one > const_zero; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_Check(x))) { - if (sizeof(size_t) < sizeof(long)) { - __PYX_VERIFY_RETURN_INT(size_t, long, PyInt_AS_LONG(x)) - } else { - long val = PyInt_AS_LONG(x); - if (is_unsigned && unlikely(val < 0)) { - goto raise_neg_overflow; - } - return (size_t) val; - } - } else -#endif - if (likely(PyLong_Check(x))) { - if (is_unsigned) { -#if CYTHON_USE_PYLONG_INTERNALS - const digit* digits = ((PyLongObject*)x)->ob_digit; - switch (Py_SIZE(x)) { - case 0: return (size_t) 0; - case 1: __PYX_VERIFY_RETURN_INT(size_t, digit, digits[0]) - case 2: - if (8 * sizeof(size_t) > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(size_t, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(size_t) >= 2 * PyLong_SHIFT) { - return (size_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - } - break; - case 3: - if (8 * sizeof(size_t) > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(size_t, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(size_t) >= 3 * PyLong_SHIFT) { - return (size_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - } - break; - case 4: - if (8 * sizeof(size_t) > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(size_t, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(size_t) >= 4 * PyLong_SHIFT) { - return (size_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - } - break; - } -#endif -#if CYTHON_COMPILING_IN_CPYTHON - if (unlikely(Py_SIZE(x) < 0)) { - goto raise_neg_overflow; - } -#else - { - int result = PyObject_RichCompareBool(x, Py_False, Py_LT); - if (unlikely(result < 0)) - return (size_t) -1; - if (unlikely(result == 1)) - goto raise_neg_overflow; - } -#endif - if (sizeof(size_t) <= sizeof(unsigned long)) { - __PYX_VERIFY_RETURN_INT_EXC(size_t, unsigned long, PyLong_AsUnsignedLong(x)) -#ifdef HAVE_LONG_LONG - } else if (sizeof(size_t) <= sizeof(unsigned PY_LONG_LONG)) { - __PYX_VERIFY_RETURN_INT_EXC(size_t, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) -#endif - } - } else { -#if CYTHON_USE_PYLONG_INTERNALS - const digit* digits = ((PyLongObject*)x)->ob_digit; - switch (Py_SIZE(x)) { - case 0: return (size_t) 0; - case -1: __PYX_VERIFY_RETURN_INT(size_t, sdigit, (sdigit) (-(sdigit)digits[0])) - case 1: __PYX_VERIFY_RETURN_INT(size_t, digit, +digits[0]) - case -2: - if (8 * sizeof(size_t) - 1 > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(size_t, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(size_t) - 1 > 2 * PyLong_SHIFT) { - return (size_t) (((size_t)-1)*(((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]))); - } - } - break; - case 2: - if (8 * sizeof(size_t) > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(size_t, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(size_t) - 1 > 2 * PyLong_SHIFT) { - return (size_t) ((((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]))); - } - } - break; - case -3: - if (8 * sizeof(size_t) - 1 > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(size_t, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(size_t) - 1 > 3 * PyLong_SHIFT) { - return (size_t) (((size_t)-1)*(((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]))); - } - } - break; - case 3: - if (8 * sizeof(size_t) > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(size_t, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(size_t) - 1 > 3 * PyLong_SHIFT) { - return (size_t) ((((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]))); - } - } - break; - case -4: - if (8 * sizeof(size_t) - 1 > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(size_t, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(size_t) - 1 > 4 * PyLong_SHIFT) { - return (size_t) (((size_t)-1)*(((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]))); - } - } - break; - case 4: - if (8 * sizeof(size_t) > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(size_t, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(size_t) - 1 > 4 * PyLong_SHIFT) { - return (size_t) ((((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]))); - } - } - break; - } -#endif - if (sizeof(size_t) <= sizeof(long)) { - __PYX_VERIFY_RETURN_INT_EXC(size_t, long, PyLong_AsLong(x)) -#ifdef HAVE_LONG_LONG - } else if (sizeof(size_t) <= sizeof(PY_LONG_LONG)) { - __PYX_VERIFY_RETURN_INT_EXC(size_t, PY_LONG_LONG, PyLong_AsLongLong(x)) -#endif - } - } - { -#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) - PyErr_SetString(PyExc_RuntimeError, - "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); -#else - size_t val; - PyObject *v = __Pyx_PyNumber_IntOrLong(x); - #if PY_MAJOR_VERSION < 3 - if (likely(v) && !PyLong_Check(v)) { - PyObject *tmp = v; - v = PyNumber_Long(tmp); - Py_DECREF(tmp); - } - #endif - if (likely(v)) { - int one = 1; int is_little = (int)*(unsigned char *)&one; - unsigned char *bytes = (unsigned char *)&val; - int ret = _PyLong_AsByteArray((PyLongObject *)v, - bytes, sizeof(val), - is_little, !is_unsigned); - Py_DECREF(v); - if (likely(!ret)) - return val; - } -#endif - return (size_t) -1; - } - } else { - size_t val; - PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); - if (!tmp) return (size_t) -1; - val = __Pyx_PyInt_As_size_t(tmp); - Py_DECREF(tmp); - return val; - } -raise_overflow: - PyErr_SetString(PyExc_OverflowError, - "value too large to convert to size_t"); - return (size_t) -1; -raise_neg_overflow: - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to size_t"); - return (size_t) -1; -} - -/* CIntFromPy */ -static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) { - const long neg_one = (long) ((long) 0 - (long) 1), const_zero = (long) 0; - const int is_unsigned = neg_one > const_zero; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_Check(x))) { - if (sizeof(long) < sizeof(long)) { - __PYX_VERIFY_RETURN_INT(long, long, PyInt_AS_LONG(x)) - } else { - long val = PyInt_AS_LONG(x); - if (is_unsigned && unlikely(val < 0)) { - goto raise_neg_overflow; - } - return (long) val; - } - } else -#endif - if (likely(PyLong_Check(x))) { - if (is_unsigned) { -#if CYTHON_USE_PYLONG_INTERNALS - const digit* digits = ((PyLongObject*)x)->ob_digit; - switch (Py_SIZE(x)) { - case 0: return (long) 0; - case 1: __PYX_VERIFY_RETURN_INT(long, digit, digits[0]) - case 2: - if (8 * sizeof(long) > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) >= 2 * PyLong_SHIFT) { - return (long) (((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); - } - } - break; - case 3: - if (8 * sizeof(long) > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) >= 3 * PyLong_SHIFT) { - return (long) (((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); - } - } - break; - case 4: - if (8 * sizeof(long) > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) >= 4 * PyLong_SHIFT) { - return (long) (((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); - } - } - break; - } -#endif -#if CYTHON_COMPILING_IN_CPYTHON - if (unlikely(Py_SIZE(x) < 0)) { - goto raise_neg_overflow; - } -#else - { - int result = PyObject_RichCompareBool(x, Py_False, Py_LT); - if (unlikely(result < 0)) - return (long) -1; - if (unlikely(result == 1)) - goto raise_neg_overflow; - } -#endif - if (sizeof(long) <= sizeof(unsigned long)) { - __PYX_VERIFY_RETURN_INT_EXC(long, unsigned long, PyLong_AsUnsignedLong(x)) -#ifdef HAVE_LONG_LONG - } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { - __PYX_VERIFY_RETURN_INT_EXC(long, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) -#endif - } - } else { -#if CYTHON_USE_PYLONG_INTERNALS - const digit* digits = ((PyLongObject*)x)->ob_digit; - switch (Py_SIZE(x)) { - case 0: return (long) 0; - case -1: __PYX_VERIFY_RETURN_INT(long, sdigit, (sdigit) (-(sdigit)digits[0])) - case 1: __PYX_VERIFY_RETURN_INT(long, digit, +digits[0]) - case -2: - if (8 * sizeof(long) - 1 > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { - return (long) (((long)-1)*(((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case 2: - if (8 * sizeof(long) > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { - return (long) ((((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case -3: - if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { - return (long) (((long)-1)*(((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case 3: - if (8 * sizeof(long) > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { - return (long) ((((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case -4: - if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { - return (long) (((long)-1)*(((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case 4: - if (8 * sizeof(long) > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { - return (long) ((((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - } -#endif - if (sizeof(long) <= sizeof(long)) { - __PYX_VERIFY_RETURN_INT_EXC(long, long, PyLong_AsLong(x)) -#ifdef HAVE_LONG_LONG - } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { - __PYX_VERIFY_RETURN_INT_EXC(long, PY_LONG_LONG, PyLong_AsLongLong(x)) -#endif - } - } - { -#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) - PyErr_SetString(PyExc_RuntimeError, - "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); -#else - long val; - PyObject *v = __Pyx_PyNumber_IntOrLong(x); - #if PY_MAJOR_VERSION < 3 - if (likely(v) && !PyLong_Check(v)) { - PyObject *tmp = v; - v = PyNumber_Long(tmp); - Py_DECREF(tmp); - } - #endif - if (likely(v)) { - int one = 1; int is_little = (int)*(unsigned char *)&one; - unsigned char *bytes = (unsigned char *)&val; - int ret = _PyLong_AsByteArray((PyLongObject *)v, - bytes, sizeof(val), - is_little, !is_unsigned); - Py_DECREF(v); - if (likely(!ret)) - return val; - } -#endif - return (long) -1; - } - } else { - long val; - PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); - if (!tmp) return (long) -1; - val = __Pyx_PyInt_As_long(tmp); - Py_DECREF(tmp); - return val; - } -raise_overflow: - PyErr_SetString(PyExc_OverflowError, - "value too large to convert to long"); - return (long) -1; -raise_neg_overflow: - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to long"); - return (long) -1; -} - -/* FastTypeChecks */ -#if CYTHON_COMPILING_IN_CPYTHON -static int __Pyx_InBases(PyTypeObject *a, PyTypeObject *b) { - while (a) { - a = a->tp_base; - if (a == b) - return 1; - } - return b == &PyBaseObject_Type; -} -static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b) { - PyObject *mro; - if (a == b) return 1; - mro = a->tp_mro; - if (likely(mro)) { - Py_ssize_t i, n; - n = PyTuple_GET_SIZE(mro); - for (i = 0; i < n; i++) { - if (PyTuple_GET_ITEM(mro, i) == (PyObject *)b) - return 1; - } - return 0; - } - return __Pyx_InBases(a, b); -} -#if PY_MAJOR_VERSION == 2 -static int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject* exc_type2) { - PyObject *exception, *value, *tb; - int res; - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - __Pyx_ErrFetch(&exception, &value, &tb); - res = exc_type1 ? PyObject_IsSubclass(err, exc_type1) : 0; - if (unlikely(res == -1)) { - PyErr_WriteUnraisable(err); - res = 0; - } - if (!res) { - res = PyObject_IsSubclass(err, exc_type2); - if (unlikely(res == -1)) { - PyErr_WriteUnraisable(err); - res = 0; - } - } - __Pyx_ErrRestore(exception, value, tb); - return res; -} -#else -static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject *exc_type2) { - int res = exc_type1 ? __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type1) : 0; - if (!res) { - res = __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type2); - } - return res; -} -#endif -static int __Pyx_PyErr_GivenExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { - Py_ssize_t i, n; - assert(PyExceptionClass_Check(exc_type)); - n = PyTuple_GET_SIZE(tuple); -#if PY_MAJOR_VERSION >= 3 - for (i=0; itp_name); - if (cached_type) { - if (!PyType_Check((PyObject*)cached_type)) { - PyErr_Format(PyExc_TypeError, - "Shared Cython type %.200s is not a type object", - type->tp_name); - goto bad; - } - if (cached_type->tp_basicsize != type->tp_basicsize) { - PyErr_Format(PyExc_TypeError, - "Shared Cython type %.200s has the wrong size, try recompiling", - type->tp_name); - goto bad; - } - } else { - if (!PyErr_ExceptionMatches(PyExc_AttributeError)) goto bad; - PyErr_Clear(); - if (PyType_Ready(type) < 0) goto bad; - if (PyObject_SetAttrString(fake_module, type->tp_name, (PyObject*) type) < 0) - goto bad; - Py_INCREF(type); - cached_type = type; - } -done: - Py_DECREF(fake_module); - return cached_type; -bad: - Py_XDECREF(cached_type); - cached_type = NULL; - goto done; -} - -/* PyObjectGetMethod */ -static int __Pyx_PyObject_GetMethod(PyObject *obj, PyObject *name, PyObject **method) { - PyObject *attr; -#if CYTHON_UNPACK_METHODS && CYTHON_COMPILING_IN_CPYTHON && CYTHON_USE_PYTYPE_LOOKUP - PyTypeObject *tp = Py_TYPE(obj); - PyObject *descr; - descrgetfunc f = NULL; - PyObject **dictptr, *dict; - int meth_found = 0; - assert (*method == NULL); - if (unlikely(tp->tp_getattro != PyObject_GenericGetAttr)) { - attr = __Pyx_PyObject_GetAttrStr(obj, name); - goto try_unpack; - } - if (unlikely(tp->tp_dict == NULL) && unlikely(PyType_Ready(tp) < 0)) { - return 0; - } - descr = _PyType_Lookup(tp, name); - if (likely(descr != NULL)) { - Py_INCREF(descr); -#if PY_MAJOR_VERSION >= 3 - #ifdef __Pyx_CyFunction_USED - if (likely(PyFunction_Check(descr) || (Py_TYPE(descr) == &PyMethodDescr_Type) || __Pyx_CyFunction_Check(descr))) - #else - if (likely(PyFunction_Check(descr) || (Py_TYPE(descr) == &PyMethodDescr_Type))) - #endif -#else - #ifdef __Pyx_CyFunction_USED - if (likely(PyFunction_Check(descr) || __Pyx_CyFunction_Check(descr))) - #else - if (likely(PyFunction_Check(descr))) - #endif -#endif - { - meth_found = 1; - } else { - f = Py_TYPE(descr)->tp_descr_get; - if (f != NULL && PyDescr_IsData(descr)) { - attr = f(descr, obj, (PyObject *)Py_TYPE(obj)); - Py_DECREF(descr); - goto try_unpack; - } - } - } - dictptr = _PyObject_GetDictPtr(obj); - if (dictptr != NULL && (dict = *dictptr) != NULL) { - Py_INCREF(dict); - attr = __Pyx_PyDict_GetItemStr(dict, name); - if (attr != NULL) { - Py_INCREF(attr); - Py_DECREF(dict); - Py_XDECREF(descr); - goto try_unpack; - } - Py_DECREF(dict); - } - if (meth_found) { - *method = descr; - return 1; - } - if (f != NULL) { - attr = f(descr, obj, (PyObject *)Py_TYPE(obj)); - Py_DECREF(descr); - goto try_unpack; - } - if (descr != NULL) { - *method = descr; - return 0; - } - PyErr_Format(PyExc_AttributeError, -#if PY_MAJOR_VERSION >= 3 - "'%.50s' object has no attribute '%U'", - tp->tp_name, name); -#else - "'%.50s' object has no attribute '%.400s'", - tp->tp_name, PyString_AS_STRING(name)); -#endif - return 0; -#else - attr = __Pyx_PyObject_GetAttrStr(obj, name); - goto try_unpack; -#endif -try_unpack: -#if CYTHON_UNPACK_METHODS - if (likely(attr) && PyMethod_Check(attr) && likely(PyMethod_GET_SELF(attr) == obj)) { - PyObject *function = PyMethod_GET_FUNCTION(attr); - Py_INCREF(function); - Py_DECREF(attr); - *method = function; - return 1; - } -#endif - *method = attr; - return 0; -} - -/* PyObjectCallMethod1 */ -static PyObject* __Pyx__PyObject_CallMethod1(PyObject* method, PyObject* arg) { - PyObject *result = __Pyx_PyObject_CallOneArg(method, arg); - Py_DECREF(method); - return result; -} -static PyObject* __Pyx_PyObject_CallMethod1(PyObject* obj, PyObject* method_name, PyObject* arg) { - PyObject *method = NULL, *result; - int is_method = __Pyx_PyObject_GetMethod(obj, method_name, &method); - if (likely(is_method)) { - result = __Pyx_PyObject_Call2Args(method, obj, arg); - Py_DECREF(method); - return result; - } - if (unlikely(!method)) return NULL; - return __Pyx__PyObject_CallMethod1(method, arg); -} - -/* CoroutineBase */ -#include -#include -#define __Pyx_Coroutine_Undelegate(gen) Py_CLEAR((gen)->yieldfrom) -static int __Pyx_PyGen__FetchStopIterationValue(CYTHON_UNUSED PyThreadState *__pyx_tstate, PyObject **pvalue) { - PyObject *et, *ev, *tb; - PyObject *value = NULL; - __Pyx_ErrFetch(&et, &ev, &tb); - if (!et) { - Py_XDECREF(tb); - Py_XDECREF(ev); - Py_INCREF(Py_None); - *pvalue = Py_None; - return 0; - } - if (likely(et == PyExc_StopIteration)) { - if (!ev) { - Py_INCREF(Py_None); - value = Py_None; - } -#if PY_VERSION_HEX >= 0x030300A0 - else if (Py_TYPE(ev) == (PyTypeObject*)PyExc_StopIteration) { - value = ((PyStopIterationObject *)ev)->value; - Py_INCREF(value); - Py_DECREF(ev); - } -#endif - else if (unlikely(PyTuple_Check(ev))) { - if (PyTuple_GET_SIZE(ev) >= 1) { -#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - value = PyTuple_GET_ITEM(ev, 0); - Py_INCREF(value); -#else - value = PySequence_ITEM(ev, 0); -#endif - } else { - Py_INCREF(Py_None); - value = Py_None; - } - Py_DECREF(ev); - } - else if (!__Pyx_TypeCheck(ev, (PyTypeObject*)PyExc_StopIteration)) { - value = ev; - } - if (likely(value)) { - Py_XDECREF(tb); - Py_DECREF(et); - *pvalue = value; - return 0; - } - } else if (!__Pyx_PyErr_GivenExceptionMatches(et, PyExc_StopIteration)) { - __Pyx_ErrRestore(et, ev, tb); - return -1; - } - PyErr_NormalizeException(&et, &ev, &tb); - if (unlikely(!PyObject_TypeCheck(ev, (PyTypeObject*)PyExc_StopIteration))) { - __Pyx_ErrRestore(et, ev, tb); - return -1; - } - Py_XDECREF(tb); - Py_DECREF(et); -#if PY_VERSION_HEX >= 0x030300A0 - value = ((PyStopIterationObject *)ev)->value; - Py_INCREF(value); - Py_DECREF(ev); -#else - { - PyObject* args = __Pyx_PyObject_GetAttrStr(ev, __pyx_n_s_args); - Py_DECREF(ev); - if (likely(args)) { - value = PySequence_GetItem(args, 0); - Py_DECREF(args); - } - if (unlikely(!value)) { - __Pyx_ErrRestore(NULL, NULL, NULL); - Py_INCREF(Py_None); - value = Py_None; - } - } -#endif - *pvalue = value; - return 0; -} -static CYTHON_INLINE -void __Pyx_Coroutine_ExceptionClear(__Pyx_ExcInfoStruct *exc_state) { - PyObject *t, *v, *tb; - t = exc_state->exc_type; - v = exc_state->exc_value; - tb = exc_state->exc_traceback; - exc_state->exc_type = NULL; - exc_state->exc_value = NULL; - exc_state->exc_traceback = NULL; - Py_XDECREF(t); - Py_XDECREF(v); - Py_XDECREF(tb); -} -#define __Pyx_Coroutine_AlreadyRunningError(gen) (__Pyx__Coroutine_AlreadyRunningError(gen), (PyObject*)NULL) -static void __Pyx__Coroutine_AlreadyRunningError(CYTHON_UNUSED __pyx_CoroutineObject *gen) { - const char *msg; - if ((0)) { - #ifdef __Pyx_Coroutine_USED - } else if (__Pyx_Coroutine_Check((PyObject*)gen)) { - msg = "coroutine already executing"; - #endif - #ifdef __Pyx_AsyncGen_USED - } else if (__Pyx_AsyncGen_CheckExact((PyObject*)gen)) { - msg = "async generator already executing"; - #endif - } else { - msg = "generator already executing"; - } - PyErr_SetString(PyExc_ValueError, msg); -} -#define __Pyx_Coroutine_NotStartedError(gen) (__Pyx__Coroutine_NotStartedError(gen), (PyObject*)NULL) -static void __Pyx__Coroutine_NotStartedError(CYTHON_UNUSED PyObject *gen) { - const char *msg; - if ((0)) { - #ifdef __Pyx_Coroutine_USED - } else if (__Pyx_Coroutine_Check(gen)) { - msg = "can't send non-None value to a just-started coroutine"; - #endif - #ifdef __Pyx_AsyncGen_USED - } else if (__Pyx_AsyncGen_CheckExact(gen)) { - msg = "can't send non-None value to a just-started async generator"; - #endif - } else { - msg = "can't send non-None value to a just-started generator"; - } - PyErr_SetString(PyExc_TypeError, msg); -} -#define __Pyx_Coroutine_AlreadyTerminatedError(gen, value, closing) (__Pyx__Coroutine_AlreadyTerminatedError(gen, value, closing), (PyObject*)NULL) -static void __Pyx__Coroutine_AlreadyTerminatedError(CYTHON_UNUSED PyObject *gen, PyObject *value, CYTHON_UNUSED int closing) { - #ifdef __Pyx_Coroutine_USED - if (!closing && __Pyx_Coroutine_Check(gen)) { - PyErr_SetString(PyExc_RuntimeError, "cannot reuse already awaited coroutine"); - } else - #endif - if (value) { - #ifdef __Pyx_AsyncGen_USED - if (__Pyx_AsyncGen_CheckExact(gen)) - PyErr_SetNone(__Pyx_PyExc_StopAsyncIteration); - else - #endif - PyErr_SetNone(PyExc_StopIteration); - } -} -static -PyObject *__Pyx_Coroutine_SendEx(__pyx_CoroutineObject *self, PyObject *value, int closing) { - __Pyx_PyThreadState_declare - PyThreadState *tstate; - __Pyx_ExcInfoStruct *exc_state; - PyObject *retval; - assert(!self->is_running); - if (unlikely(self->resume_label == 0)) { - if (unlikely(value && value != Py_None)) { - return __Pyx_Coroutine_NotStartedError((PyObject*)self); - } - } - if (unlikely(self->resume_label == -1)) { - return __Pyx_Coroutine_AlreadyTerminatedError((PyObject*)self, value, closing); - } -#if CYTHON_FAST_THREAD_STATE - __Pyx_PyThreadState_assign - tstate = __pyx_tstate; -#else - tstate = __Pyx_PyThreadState_Current; -#endif - exc_state = &self->gi_exc_state; - if (exc_state->exc_type) { - #if CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_PYSTON - #else - if (exc_state->exc_traceback) { - PyTracebackObject *tb = (PyTracebackObject *) exc_state->exc_traceback; - PyFrameObject *f = tb->tb_frame; - Py_XINCREF(tstate->frame); - assert(f->f_back == NULL); - f->f_back = tstate->frame; - } - #endif - } -#if CYTHON_USE_EXC_INFO_STACK - exc_state->previous_item = tstate->exc_info; - tstate->exc_info = exc_state; -#else - if (exc_state->exc_type) { - __Pyx_ExceptionSwap(&exc_state->exc_type, &exc_state->exc_value, &exc_state->exc_traceback); - } else { - __Pyx_Coroutine_ExceptionClear(exc_state); - __Pyx_ExceptionSave(&exc_state->exc_type, &exc_state->exc_value, &exc_state->exc_traceback); - } -#endif - self->is_running = 1; - retval = self->body((PyObject *) self, tstate, value); - self->is_running = 0; -#if CYTHON_USE_EXC_INFO_STACK - exc_state = &self->gi_exc_state; - tstate->exc_info = exc_state->previous_item; - exc_state->previous_item = NULL; - __Pyx_Coroutine_ResetFrameBackpointer(exc_state); -#endif - return retval; -} -static CYTHON_INLINE void __Pyx_Coroutine_ResetFrameBackpointer(__Pyx_ExcInfoStruct *exc_state) { - PyObject *exc_tb = exc_state->exc_traceback; - if (likely(exc_tb)) { -#if CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_PYSTON -#else - PyTracebackObject *tb = (PyTracebackObject *) exc_tb; - PyFrameObject *f = tb->tb_frame; - Py_CLEAR(f->f_back); -#endif - } -} -static CYTHON_INLINE -PyObject *__Pyx_Coroutine_MethodReturn(CYTHON_UNUSED PyObject* gen, PyObject *retval) { - if (unlikely(!retval)) { - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - if (!__Pyx_PyErr_Occurred()) { - PyObject *exc = PyExc_StopIteration; - #ifdef __Pyx_AsyncGen_USED - if (__Pyx_AsyncGen_CheckExact(gen)) - exc = __Pyx_PyExc_StopAsyncIteration; - #endif - __Pyx_PyErr_SetNone(exc); - } - } - return retval; -} -static CYTHON_INLINE -PyObject *__Pyx_Coroutine_FinishDelegation(__pyx_CoroutineObject *gen) { - PyObject *ret; - PyObject *val = NULL; - __Pyx_Coroutine_Undelegate(gen); - __Pyx_PyGen__FetchStopIterationValue(__Pyx_PyThreadState_Current, &val); - ret = __Pyx_Coroutine_SendEx(gen, val, 0); - Py_XDECREF(val); - return ret; -} -static PyObject *__Pyx_Coroutine_Send(PyObject *self, PyObject *value) { - PyObject *retval; - __pyx_CoroutineObject *gen = (__pyx_CoroutineObject*) self; - PyObject *yf = gen->yieldfrom; - if (unlikely(gen->is_running)) - return __Pyx_Coroutine_AlreadyRunningError(gen); - if (yf) { - PyObject *ret; - gen->is_running = 1; - #ifdef __Pyx_Generator_USED - if (__Pyx_Generator_CheckExact(yf)) { - ret = __Pyx_Coroutine_Send(yf, value); - } else - #endif - #ifdef __Pyx_Coroutine_USED - if (__Pyx_Coroutine_Check(yf)) { - ret = __Pyx_Coroutine_Send(yf, value); - } else - #endif - #ifdef __Pyx_AsyncGen_USED - if (__pyx_PyAsyncGenASend_CheckExact(yf)) { - ret = __Pyx_async_gen_asend_send(yf, value); - } else - #endif - #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x03030000 && (defined(__linux__) || PY_VERSION_HEX >= 0x030600B3) - if (PyGen_CheckExact(yf)) { - ret = _PyGen_Send((PyGenObject*)yf, value == Py_None ? NULL : value); - } else - #endif - #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x03050000 && defined(PyCoro_CheckExact) && (defined(__linux__) || PY_VERSION_HEX >= 0x030600B3) - if (PyCoro_CheckExact(yf)) { - ret = _PyGen_Send((PyGenObject*)yf, value == Py_None ? NULL : value); - } else - #endif - { - if (value == Py_None) - ret = Py_TYPE(yf)->tp_iternext(yf); - else - ret = __Pyx_PyObject_CallMethod1(yf, __pyx_n_s_send, value); - } - gen->is_running = 0; - if (likely(ret)) { - return ret; - } - retval = __Pyx_Coroutine_FinishDelegation(gen); - } else { - retval = __Pyx_Coroutine_SendEx(gen, value, 0); - } - return __Pyx_Coroutine_MethodReturn(self, retval); -} -static int __Pyx_Coroutine_CloseIter(__pyx_CoroutineObject *gen, PyObject *yf) { - PyObject *retval = NULL; - int err = 0; - #ifdef __Pyx_Generator_USED - if (__Pyx_Generator_CheckExact(yf)) { - retval = __Pyx_Coroutine_Close(yf); - if (!retval) - return -1; - } else - #endif - #ifdef __Pyx_Coroutine_USED - if (__Pyx_Coroutine_Check(yf)) { - retval = __Pyx_Coroutine_Close(yf); - if (!retval) - return -1; - } else - if (__Pyx_CoroutineAwait_CheckExact(yf)) { - retval = __Pyx_CoroutineAwait_Close((__pyx_CoroutineAwaitObject*)yf, NULL); - if (!retval) - return -1; - } else - #endif - #ifdef __Pyx_AsyncGen_USED - if (__pyx_PyAsyncGenASend_CheckExact(yf)) { - retval = __Pyx_async_gen_asend_close(yf, NULL); - } else - if (__pyx_PyAsyncGenAThrow_CheckExact(yf)) { - retval = __Pyx_async_gen_athrow_close(yf, NULL); - } else - #endif - { - PyObject *meth; - gen->is_running = 1; - meth = __Pyx_PyObject_GetAttrStr(yf, __pyx_n_s_close); - if (unlikely(!meth)) { - if (!PyErr_ExceptionMatches(PyExc_AttributeError)) { - PyErr_WriteUnraisable(yf); - } - PyErr_Clear(); - } else { - retval = PyObject_CallFunction(meth, NULL); - Py_DECREF(meth); - if (!retval) - err = -1; - } - gen->is_running = 0; - } - Py_XDECREF(retval); - return err; -} -static PyObject *__Pyx_Generator_Next(PyObject *self) { - __pyx_CoroutineObject *gen = (__pyx_CoroutineObject*) self; - PyObject *yf = gen->yieldfrom; - if (unlikely(gen->is_running)) - return __Pyx_Coroutine_AlreadyRunningError(gen); - if (yf) { - PyObject *ret; - gen->is_running = 1; - #ifdef __Pyx_Generator_USED - if (__Pyx_Generator_CheckExact(yf)) { - ret = __Pyx_Generator_Next(yf); - } else - #endif - #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x03030000 && (defined(__linux__) || PY_VERSION_HEX >= 0x030600B3) - if (PyGen_CheckExact(yf)) { - ret = _PyGen_Send((PyGenObject*)yf, NULL); - } else - #endif - #ifdef __Pyx_Coroutine_USED - if (__Pyx_Coroutine_Check(yf)) { - ret = __Pyx_Coroutine_Send(yf, Py_None); - } else - #endif - ret = Py_TYPE(yf)->tp_iternext(yf); - gen->is_running = 0; - if (likely(ret)) { - return ret; - } - return __Pyx_Coroutine_FinishDelegation(gen); - } - return __Pyx_Coroutine_SendEx(gen, Py_None, 0); -} -static PyObject *__Pyx_Coroutine_Close_Method(PyObject *self, CYTHON_UNUSED PyObject *arg) { - return __Pyx_Coroutine_Close(self); -} -static PyObject *__Pyx_Coroutine_Close(PyObject *self) { - __pyx_CoroutineObject *gen = (__pyx_CoroutineObject *) self; - PyObject *retval, *raised_exception; - PyObject *yf = gen->yieldfrom; - int err = 0; - if (unlikely(gen->is_running)) - return __Pyx_Coroutine_AlreadyRunningError(gen); - if (yf) { - Py_INCREF(yf); - err = __Pyx_Coroutine_CloseIter(gen, yf); - __Pyx_Coroutine_Undelegate(gen); - Py_DECREF(yf); - } - if (err == 0) - PyErr_SetNone(PyExc_GeneratorExit); - retval = __Pyx_Coroutine_SendEx(gen, NULL, 1); - if (unlikely(retval)) { - const char *msg; - Py_DECREF(retval); - if ((0)) { - #ifdef __Pyx_Coroutine_USED - } else if (__Pyx_Coroutine_Check(self)) { - msg = "coroutine ignored GeneratorExit"; - #endif - #ifdef __Pyx_AsyncGen_USED - } else if (__Pyx_AsyncGen_CheckExact(self)) { -#if PY_VERSION_HEX < 0x03060000 - msg = "async generator ignored GeneratorExit - might require Python 3.6+ finalisation (PEP 525)"; -#else - msg = "async generator ignored GeneratorExit"; -#endif - #endif - } else { - msg = "generator ignored GeneratorExit"; - } - PyErr_SetString(PyExc_RuntimeError, msg); - return NULL; - } - raised_exception = PyErr_Occurred(); - if (likely(!raised_exception || __Pyx_PyErr_GivenExceptionMatches2(raised_exception, PyExc_GeneratorExit, PyExc_StopIteration))) { - if (raised_exception) PyErr_Clear(); - Py_INCREF(Py_None); - return Py_None; - } - return NULL; -} -static PyObject *__Pyx__Coroutine_Throw(PyObject *self, PyObject *typ, PyObject *val, PyObject *tb, - PyObject *args, int close_on_genexit) { - __pyx_CoroutineObject *gen = (__pyx_CoroutineObject *) self; - PyObject *yf = gen->yieldfrom; - if (unlikely(gen->is_running)) - return __Pyx_Coroutine_AlreadyRunningError(gen); - if (yf) { - PyObject *ret; - Py_INCREF(yf); - if (__Pyx_PyErr_GivenExceptionMatches(typ, PyExc_GeneratorExit) && close_on_genexit) { - int err = __Pyx_Coroutine_CloseIter(gen, yf); - Py_DECREF(yf); - __Pyx_Coroutine_Undelegate(gen); - if (err < 0) - return __Pyx_Coroutine_MethodReturn(self, __Pyx_Coroutine_SendEx(gen, NULL, 0)); - goto throw_here; - } - gen->is_running = 1; - if (0 - #ifdef __Pyx_Generator_USED - || __Pyx_Generator_CheckExact(yf) - #endif - #ifdef __Pyx_Coroutine_USED - || __Pyx_Coroutine_Check(yf) - #endif - ) { - ret = __Pyx__Coroutine_Throw(yf, typ, val, tb, args, close_on_genexit); - #ifdef __Pyx_Coroutine_USED - } else if (__Pyx_CoroutineAwait_CheckExact(yf)) { - ret = __Pyx__Coroutine_Throw(((__pyx_CoroutineAwaitObject*)yf)->coroutine, typ, val, tb, args, close_on_genexit); - #endif - } else { - PyObject *meth = __Pyx_PyObject_GetAttrStr(yf, __pyx_n_s_throw); - if (unlikely(!meth)) { - Py_DECREF(yf); - if (!PyErr_ExceptionMatches(PyExc_AttributeError)) { - gen->is_running = 0; - return NULL; - } - PyErr_Clear(); - __Pyx_Coroutine_Undelegate(gen); - gen->is_running = 0; - goto throw_here; - } - if (likely(args)) { - ret = PyObject_CallObject(meth, args); - } else { - ret = PyObject_CallFunctionObjArgs(meth, typ, val, tb, NULL); - } - Py_DECREF(meth); - } - gen->is_running = 0; - Py_DECREF(yf); - if (!ret) { - ret = __Pyx_Coroutine_FinishDelegation(gen); - } - return __Pyx_Coroutine_MethodReturn(self, ret); - } -throw_here: - __Pyx_Raise(typ, val, tb, NULL); - return __Pyx_Coroutine_MethodReturn(self, __Pyx_Coroutine_SendEx(gen, NULL, 0)); -} -static PyObject *__Pyx_Coroutine_Throw(PyObject *self, PyObject *args) { - PyObject *typ; - PyObject *val = NULL; - PyObject *tb = NULL; - if (!PyArg_UnpackTuple(args, (char *)"throw", 1, 3, &typ, &val, &tb)) - return NULL; - return __Pyx__Coroutine_Throw(self, typ, val, tb, args, 1); -} -static CYTHON_INLINE int __Pyx_Coroutine_traverse_excstate(__Pyx_ExcInfoStruct *exc_state, visitproc visit, void *arg) { - Py_VISIT(exc_state->exc_type); - Py_VISIT(exc_state->exc_value); - Py_VISIT(exc_state->exc_traceback); - return 0; -} -static int __Pyx_Coroutine_traverse(__pyx_CoroutineObject *gen, visitproc visit, void *arg) { - Py_VISIT(gen->closure); - Py_VISIT(gen->classobj); - Py_VISIT(gen->yieldfrom); - return __Pyx_Coroutine_traverse_excstate(&gen->gi_exc_state, visit, arg); -} -static int __Pyx_Coroutine_clear(PyObject *self) { - __pyx_CoroutineObject *gen = (__pyx_CoroutineObject *) self; - Py_CLEAR(gen->closure); - Py_CLEAR(gen->classobj); - Py_CLEAR(gen->yieldfrom); - __Pyx_Coroutine_ExceptionClear(&gen->gi_exc_state); -#ifdef __Pyx_AsyncGen_USED - if (__Pyx_AsyncGen_CheckExact(self)) { - Py_CLEAR(((__pyx_PyAsyncGenObject*)gen)->ag_finalizer); - } -#endif - Py_CLEAR(gen->gi_code); - Py_CLEAR(gen->gi_name); - Py_CLEAR(gen->gi_qualname); - Py_CLEAR(gen->gi_modulename); - return 0; -} -static void __Pyx_Coroutine_dealloc(PyObject *self) { - __pyx_CoroutineObject *gen = (__pyx_CoroutineObject *) self; - PyObject_GC_UnTrack(gen); - if (gen->gi_weakreflist != NULL) - PyObject_ClearWeakRefs(self); - if (gen->resume_label >= 0) { - PyObject_GC_Track(self); -#if PY_VERSION_HEX >= 0x030400a1 && CYTHON_USE_TP_FINALIZE - if (PyObject_CallFinalizerFromDealloc(self)) -#else - Py_TYPE(gen)->tp_del(self); - if (self->ob_refcnt > 0) -#endif - { - return; - } - PyObject_GC_UnTrack(self); - } -#ifdef __Pyx_AsyncGen_USED - if (__Pyx_AsyncGen_CheckExact(self)) { - /* We have to handle this case for asynchronous generators - right here, because this code has to be between UNTRACK - and GC_Del. */ - Py_CLEAR(((__pyx_PyAsyncGenObject*)self)->ag_finalizer); - } -#endif - __Pyx_Coroutine_clear(self); - PyObject_GC_Del(gen); -} -static void __Pyx_Coroutine_del(PyObject *self) { - PyObject *error_type, *error_value, *error_traceback; - __pyx_CoroutineObject *gen = (__pyx_CoroutineObject *) self; - __Pyx_PyThreadState_declare - if (gen->resume_label < 0) { - return; - } -#if !CYTHON_USE_TP_FINALIZE - assert(self->ob_refcnt == 0); - self->ob_refcnt = 1; -#endif - __Pyx_PyThreadState_assign - __Pyx_ErrFetch(&error_type, &error_value, &error_traceback); -#ifdef __Pyx_AsyncGen_USED - if (__Pyx_AsyncGen_CheckExact(self)) { - __pyx_PyAsyncGenObject *agen = (__pyx_PyAsyncGenObject*)self; - PyObject *finalizer = agen->ag_finalizer; - if (finalizer && !agen->ag_closed) { - PyObject *res = __Pyx_PyObject_CallOneArg(finalizer, self); - if (unlikely(!res)) { - PyErr_WriteUnraisable(self); - } else { - Py_DECREF(res); - } - __Pyx_ErrRestore(error_type, error_value, error_traceback); - return; - } - } -#endif - if (unlikely(gen->resume_label == 0 && !error_value)) { -#ifdef __Pyx_Coroutine_USED -#ifdef __Pyx_Generator_USED - if (!__Pyx_Generator_CheckExact(self)) -#endif - { - PyObject_GC_UnTrack(self); -#if PY_MAJOR_VERSION >= 3 || defined(PyErr_WarnFormat) - if (unlikely(PyErr_WarnFormat(PyExc_RuntimeWarning, 1, "coroutine '%.50S' was never awaited", gen->gi_qualname) < 0)) - PyErr_WriteUnraisable(self); -#else - {PyObject *msg; - char *cmsg; - #if CYTHON_COMPILING_IN_PYPY - msg = NULL; - cmsg = (char*) "coroutine was never awaited"; - #else - char *cname; - PyObject *qualname; - qualname = gen->gi_qualname; - cname = PyString_AS_STRING(qualname); - msg = PyString_FromFormat("coroutine '%.50s' was never awaited", cname); - if (unlikely(!msg)) { - PyErr_Clear(); - cmsg = (char*) "coroutine was never awaited"; - } else { - cmsg = PyString_AS_STRING(msg); - } - #endif - if (unlikely(PyErr_WarnEx(PyExc_RuntimeWarning, cmsg, 1) < 0)) - PyErr_WriteUnraisable(self); - Py_XDECREF(msg);} -#endif - PyObject_GC_Track(self); - } -#endif - } else { - PyObject *res = __Pyx_Coroutine_Close(self); - if (unlikely(!res)) { - if (PyErr_Occurred()) - PyErr_WriteUnraisable(self); - } else { - Py_DECREF(res); - } - } - __Pyx_ErrRestore(error_type, error_value, error_traceback); -#if !CYTHON_USE_TP_FINALIZE - assert(self->ob_refcnt > 0); - if (--self->ob_refcnt == 0) { - return; - } - { - Py_ssize_t refcnt = self->ob_refcnt; - _Py_NewReference(self); - self->ob_refcnt = refcnt; - } -#if CYTHON_COMPILING_IN_CPYTHON - assert(PyType_IS_GC(self->ob_type) && - _Py_AS_GC(self)->gc.gc_refs != _PyGC_REFS_UNTRACKED); - _Py_DEC_REFTOTAL; -#endif -#ifdef COUNT_ALLOCS - --Py_TYPE(self)->tp_frees; - --Py_TYPE(self)->tp_allocs; -#endif -#endif -} -static PyObject * -__Pyx_Coroutine_get_name(__pyx_CoroutineObject *self, CYTHON_UNUSED void *context) -{ - PyObject *name = self->gi_name; - if (unlikely(!name)) name = Py_None; - Py_INCREF(name); - return name; -} -static int -__Pyx_Coroutine_set_name(__pyx_CoroutineObject *self, PyObject *value, CYTHON_UNUSED void *context) -{ - PyObject *tmp; -#if PY_MAJOR_VERSION >= 3 - if (unlikely(value == NULL || !PyUnicode_Check(value))) -#else - if (unlikely(value == NULL || !PyString_Check(value))) -#endif - { - PyErr_SetString(PyExc_TypeError, - "__name__ must be set to a string object"); - return -1; - } - tmp = self->gi_name; - Py_INCREF(value); - self->gi_name = value; - Py_XDECREF(tmp); - return 0; -} -static PyObject * -__Pyx_Coroutine_get_qualname(__pyx_CoroutineObject *self, CYTHON_UNUSED void *context) -{ - PyObject *name = self->gi_qualname; - if (unlikely(!name)) name = Py_None; - Py_INCREF(name); - return name; -} -static int -__Pyx_Coroutine_set_qualname(__pyx_CoroutineObject *self, PyObject *value, CYTHON_UNUSED void *context) -{ - PyObject *tmp; -#if PY_MAJOR_VERSION >= 3 - if (unlikely(value == NULL || !PyUnicode_Check(value))) -#else - if (unlikely(value == NULL || !PyString_Check(value))) -#endif - { - PyErr_SetString(PyExc_TypeError, - "__qualname__ must be set to a string object"); - return -1; - } - tmp = self->gi_qualname; - Py_INCREF(value); - self->gi_qualname = value; - Py_XDECREF(tmp); - return 0; -} -static __pyx_CoroutineObject *__Pyx__Coroutine_New( - PyTypeObject* type, __pyx_coroutine_body_t body, PyObject *code, PyObject *closure, - PyObject *name, PyObject *qualname, PyObject *module_name) { - __pyx_CoroutineObject *gen = PyObject_GC_New(__pyx_CoroutineObject, type); - if (unlikely(!gen)) - return NULL; - return __Pyx__Coroutine_NewInit(gen, body, code, closure, name, qualname, module_name); -} -static __pyx_CoroutineObject *__Pyx__Coroutine_NewInit( - __pyx_CoroutineObject *gen, __pyx_coroutine_body_t body, PyObject *code, PyObject *closure, - PyObject *name, PyObject *qualname, PyObject *module_name) { - gen->body = body; - gen->closure = closure; - Py_XINCREF(closure); - gen->is_running = 0; - gen->resume_label = 0; - gen->classobj = NULL; - gen->yieldfrom = NULL; - gen->gi_exc_state.exc_type = NULL; - gen->gi_exc_state.exc_value = NULL; - gen->gi_exc_state.exc_traceback = NULL; -#if CYTHON_USE_EXC_INFO_STACK - gen->gi_exc_state.previous_item = NULL; -#endif - gen->gi_weakreflist = NULL; - Py_XINCREF(qualname); - gen->gi_qualname = qualname; - Py_XINCREF(name); - gen->gi_name = name; - Py_XINCREF(module_name); - gen->gi_modulename = module_name; - Py_XINCREF(code); - gen->gi_code = code; - PyObject_GC_Track(gen); - return gen; -} - -/* PatchModuleWithCoroutine */ -static PyObject* __Pyx_Coroutine_patch_module(PyObject* module, const char* py_code) { -#if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) - int result; - PyObject *globals, *result_obj; - globals = PyDict_New(); if (unlikely(!globals)) goto ignore; - result = PyDict_SetItemString(globals, "_cython_coroutine_type", - #ifdef __Pyx_Coroutine_USED - (PyObject*)__pyx_CoroutineType); - #else - Py_None); - #endif - if (unlikely(result < 0)) goto ignore; - result = PyDict_SetItemString(globals, "_cython_generator_type", - #ifdef __Pyx_Generator_USED - (PyObject*)__pyx_GeneratorType); - #else - Py_None); - #endif - if (unlikely(result < 0)) goto ignore; - if (unlikely(PyDict_SetItemString(globals, "_module", module) < 0)) goto ignore; - if (unlikely(PyDict_SetItemString(globals, "__builtins__", __pyx_b) < 0)) goto ignore; - result_obj = PyRun_String(py_code, Py_file_input, globals, globals); - if (unlikely(!result_obj)) goto ignore; - Py_DECREF(result_obj); - Py_DECREF(globals); - return module; -ignore: - Py_XDECREF(globals); - PyErr_WriteUnraisable(module); - if (unlikely(PyErr_WarnEx(PyExc_RuntimeWarning, "Cython module failed to patch module with custom type", 1) < 0)) { - Py_DECREF(module); - module = NULL; - } -#else - py_code++; -#endif - return module; -} - -/* PatchGeneratorABC */ -#ifndef CYTHON_REGISTER_ABCS -#define CYTHON_REGISTER_ABCS 1 -#endif -#if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) -static PyObject* __Pyx_patch_abc_module(PyObject *module); -static PyObject* __Pyx_patch_abc_module(PyObject *module) { - module = __Pyx_Coroutine_patch_module( - module, "" -"if _cython_generator_type is not None:\n" -" try: Generator = _module.Generator\n" -" except AttributeError: pass\n" -" else: Generator.register(_cython_generator_type)\n" -"if _cython_coroutine_type is not None:\n" -" try: Coroutine = _module.Coroutine\n" -" except AttributeError: pass\n" -" else: Coroutine.register(_cython_coroutine_type)\n" - ); - return module; -} -#endif -static int __Pyx_patch_abc(void) { -#if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) - static int abc_patched = 0; - if (CYTHON_REGISTER_ABCS && !abc_patched) { - PyObject *module; - module = PyImport_ImportModule((PY_MAJOR_VERSION >= 3) ? "collections.abc" : "collections"); - if (!module) { - PyErr_WriteUnraisable(NULL); - if (unlikely(PyErr_WarnEx(PyExc_RuntimeWarning, - ((PY_MAJOR_VERSION >= 3) ? - "Cython module failed to register with collections.abc module" : - "Cython module failed to register with collections module"), 1) < 0)) { - return -1; - } - } else { - module = __Pyx_patch_abc_module(module); - abc_patched = 1; - if (unlikely(!module)) - return -1; - Py_DECREF(module); - } - module = PyImport_ImportModule("backports_abc"); - if (module) { - module = __Pyx_patch_abc_module(module); - Py_XDECREF(module); - } - if (!module) { - PyErr_Clear(); - } - } -#else - if ((0)) __Pyx_Coroutine_patch_module(NULL, NULL); -#endif - return 0; -} - -/* Generator */ -static PyMethodDef __pyx_Generator_methods[] = { - {"send", (PyCFunction) __Pyx_Coroutine_Send, METH_O, - (char*) PyDoc_STR("send(arg) -> send 'arg' into generator,\nreturn next yielded value or raise StopIteration.")}, - {"throw", (PyCFunction) __Pyx_Coroutine_Throw, METH_VARARGS, - (char*) PyDoc_STR("throw(typ[,val[,tb]]) -> raise exception in generator,\nreturn next yielded value or raise StopIteration.")}, - {"close", (PyCFunction) __Pyx_Coroutine_Close_Method, METH_NOARGS, - (char*) PyDoc_STR("close() -> raise GeneratorExit inside generator.")}, - {0, 0, 0, 0} -}; -static PyMemberDef __pyx_Generator_memberlist[] = { - {(char *) "gi_running", T_BOOL, offsetof(__pyx_CoroutineObject, is_running), READONLY, NULL}, - {(char*) "gi_yieldfrom", T_OBJECT, offsetof(__pyx_CoroutineObject, yieldfrom), READONLY, - (char*) PyDoc_STR("object being iterated by 'yield from', or None")}, - {(char*) "gi_code", T_OBJECT, offsetof(__pyx_CoroutineObject, gi_code), READONLY, NULL}, - {0, 0, 0, 0, 0} -}; -static PyGetSetDef __pyx_Generator_getsets[] = { - {(char *) "__name__", (getter)__Pyx_Coroutine_get_name, (setter)__Pyx_Coroutine_set_name, - (char*) PyDoc_STR("name of the generator"), 0}, - {(char *) "__qualname__", (getter)__Pyx_Coroutine_get_qualname, (setter)__Pyx_Coroutine_set_qualname, - (char*) PyDoc_STR("qualified name of the generator"), 0}, - {0, 0, 0, 0, 0} -}; -static PyTypeObject __pyx_GeneratorType_type = { - PyVarObject_HEAD_INIT(0, 0) - "generator", - sizeof(__pyx_CoroutineObject), - 0, - (destructor) __Pyx_Coroutine_dealloc, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_HAVE_FINALIZE, - 0, - (traverseproc) __Pyx_Coroutine_traverse, - 0, - 0, - offsetof(__pyx_CoroutineObject, gi_weakreflist), - 0, - (iternextfunc) __Pyx_Generator_Next, - __pyx_Generator_methods, - __pyx_Generator_memberlist, - __pyx_Generator_getsets, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, -#if CYTHON_USE_TP_FINALIZE - 0, -#else - __Pyx_Coroutine_del, -#endif - 0, -#if CYTHON_USE_TP_FINALIZE - __Pyx_Coroutine_del, -#elif PY_VERSION_HEX >= 0x030400a1 - 0, -#endif -#if PY_VERSION_HEX >= 0x030800b1 - 0, -#endif -#if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 - 0, -#endif -}; -static int __pyx_Generator_init(void) { - __pyx_GeneratorType_type.tp_getattro = __Pyx_PyObject_GenericGetAttrNoDict; - __pyx_GeneratorType_type.tp_iter = PyObject_SelfIter; - __pyx_GeneratorType = __Pyx_FetchCommonType(&__pyx_GeneratorType_type); - if (unlikely(!__pyx_GeneratorType)) { - return -1; - } - return 0; -} - -/* CheckBinaryVersion */ -static int __Pyx_check_binary_version(void) { - char ctversion[4], rtversion[4]; - PyOS_snprintf(ctversion, 4, "%d.%d", PY_MAJOR_VERSION, PY_MINOR_VERSION); - PyOS_snprintf(rtversion, 4, "%s", Py_GetVersion()); - if (ctversion[0] != rtversion[0] || ctversion[2] != rtversion[2]) { - char message[200]; - PyOS_snprintf(message, sizeof(message), - "compiletime version %s of module '%.100s' " - "does not match runtime version %s", - ctversion, __Pyx_MODULE_NAME, rtversion); - return PyErr_WarnEx(NULL, message, 1); - } - return 0; -} - -/* InitStrings */ -static int __Pyx_InitStrings(__Pyx_StringTabEntry *t) { - while (t->p) { - #if PY_MAJOR_VERSION < 3 - if (t->is_unicode) { - *t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL); - } else if (t->intern) { - *t->p = PyString_InternFromString(t->s); - } else { - *t->p = PyString_FromStringAndSize(t->s, t->n - 1); - } - #else - if (t->is_unicode | t->is_str) { - if (t->intern) { - *t->p = PyUnicode_InternFromString(t->s); - } else if (t->encoding) { - *t->p = PyUnicode_Decode(t->s, t->n - 1, t->encoding, NULL); - } else { - *t->p = PyUnicode_FromStringAndSize(t->s, t->n - 1); - } - } else { - *t->p = PyBytes_FromStringAndSize(t->s, t->n - 1); - } - #endif - if (!*t->p) - return -1; - if (PyObject_Hash(*t->p) == -1) - return -1; - ++t; - } - return 0; -} - -static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char* c_str) { - return __Pyx_PyUnicode_FromStringAndSize(c_str, (Py_ssize_t)strlen(c_str)); -} -static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject* o) { - Py_ssize_t ignore; - return __Pyx_PyObject_AsStringAndSize(o, &ignore); -} -#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT -#if !CYTHON_PEP393_ENABLED -static const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { - char* defenc_c; - PyObject* defenc = _PyUnicode_AsDefaultEncodedString(o, NULL); - if (!defenc) return NULL; - defenc_c = PyBytes_AS_STRING(defenc); -#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII - { - char* end = defenc_c + PyBytes_GET_SIZE(defenc); - char* c; - for (c = defenc_c; c < end; c++) { - if ((unsigned char) (*c) >= 128) { - PyUnicode_AsASCIIString(o); - return NULL; - } - } - } -#endif - *length = PyBytes_GET_SIZE(defenc); - return defenc_c; -} -#else -static CYTHON_INLINE const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { - if (unlikely(__Pyx_PyUnicode_READY(o) == -1)) return NULL; -#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII - if (likely(PyUnicode_IS_ASCII(o))) { - *length = PyUnicode_GET_LENGTH(o); - return PyUnicode_AsUTF8(o); - } else { - PyUnicode_AsASCIIString(o); - return NULL; - } -#else - return PyUnicode_AsUTF8AndSize(o, length); -#endif -} -#endif -#endif -static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) { -#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT - if ( -#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII - __Pyx_sys_getdefaultencoding_not_ascii && -#endif - PyUnicode_Check(o)) { - return __Pyx_PyUnicode_AsStringAndSize(o, length); - } else -#endif -#if (!CYTHON_COMPILING_IN_PYPY) || (defined(PyByteArray_AS_STRING) && defined(PyByteArray_GET_SIZE)) - if (PyByteArray_Check(o)) { - *length = PyByteArray_GET_SIZE(o); - return PyByteArray_AS_STRING(o); - } else -#endif - { - char* result; - int r = PyBytes_AsStringAndSize(o, &result, length); - if (unlikely(r < 0)) { - return NULL; - } else { - return result; - } - } -} -static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) { - int is_true = x == Py_True; - if (is_true | (x == Py_False) | (x == Py_None)) return is_true; - else return PyObject_IsTrue(x); -} -static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject* x) { - int retval; - if (unlikely(!x)) return -1; - retval = __Pyx_PyObject_IsTrue(x); - Py_DECREF(x); - return retval; -} -static PyObject* __Pyx_PyNumber_IntOrLongWrongResultType(PyObject* result, const char* type_name) { -#if PY_MAJOR_VERSION >= 3 - if (PyLong_Check(result)) { - if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1, - "__int__ returned non-int (type %.200s). " - "The ability to return an instance of a strict subclass of int " - "is deprecated, and may be removed in a future version of Python.", - Py_TYPE(result)->tp_name)) { - Py_DECREF(result); - return NULL; - } - return result; - } -#endif - PyErr_Format(PyExc_TypeError, - "__%.4s__ returned non-%.4s (type %.200s)", - type_name, type_name, Py_TYPE(result)->tp_name); - Py_DECREF(result); - return NULL; -} -static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x) { -#if CYTHON_USE_TYPE_SLOTS - PyNumberMethods *m; -#endif - const char *name = NULL; - PyObject *res = NULL; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_Check(x) || PyLong_Check(x))) -#else - if (likely(PyLong_Check(x))) -#endif - return __Pyx_NewRef(x); -#if CYTHON_USE_TYPE_SLOTS - m = Py_TYPE(x)->tp_as_number; - #if PY_MAJOR_VERSION < 3 - if (m && m->nb_int) { - name = "int"; - res = m->nb_int(x); - } - else if (m && m->nb_long) { - name = "long"; - res = m->nb_long(x); - } - #else - if (likely(m && m->nb_int)) { - name = "int"; - res = m->nb_int(x); - } - #endif -#else - if (!PyBytes_CheckExact(x) && !PyUnicode_CheckExact(x)) { - res = PyNumber_Int(x); - } -#endif - if (likely(res)) { -#if PY_MAJOR_VERSION < 3 - if (unlikely(!PyInt_Check(res) && !PyLong_Check(res))) { -#else - if (unlikely(!PyLong_CheckExact(res))) { -#endif - return __Pyx_PyNumber_IntOrLongWrongResultType(res, name); - } - } - else if (!PyErr_Occurred()) { - PyErr_SetString(PyExc_TypeError, - "an integer is required"); - } - return res; -} -static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { - Py_ssize_t ival; - PyObject *x; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_CheckExact(b))) { - if (sizeof(Py_ssize_t) >= sizeof(long)) - return PyInt_AS_LONG(b); - else - return PyInt_AsSsize_t(b); - } -#endif - if (likely(PyLong_CheckExact(b))) { - #if CYTHON_USE_PYLONG_INTERNALS - const digit* digits = ((PyLongObject*)b)->ob_digit; - const Py_ssize_t size = Py_SIZE(b); - if (likely(__Pyx_sst_abs(size) <= 1)) { - ival = likely(size) ? digits[0] : 0; - if (size == -1) ival = -ival; - return ival; - } else { - switch (size) { - case 2: - if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { - return (Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case -2: - if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { - return -(Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case 3: - if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { - return (Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case -3: - if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { - return -(Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case 4: - if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { - return (Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case -4: - if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { - return -(Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - } - } - #endif - return PyLong_AsSsize_t(b); - } - x = PyNumber_Index(b); - if (!x) return -1; - ival = PyInt_AsSsize_t(x); - Py_DECREF(x); - return ival; -} -static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b) { - return b ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False); -} -static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { - return PyInt_FromSize_t(ival); -} - - -#endif /* Py_PYTHON_H */ diff --git a/third_party/python/aiohttp/aiohttp/_http_parser.pyx b/third_party/python/aiohttp/aiohttp/_http_parser.pyx deleted file mode 100644 index c24e31057a88..000000000000 --- a/third_party/python/aiohttp/aiohttp/_http_parser.pyx +++ /dev/null @@ -1,875 +0,0 @@ -#cython: language_level=3 -# -# Based on https://github.com/MagicStack/httptools -# -from __future__ import absolute_import, print_function - -from cpython cimport ( - Py_buffer, - PyBUF_SIMPLE, - PyBuffer_Release, - PyBytes_AsString, - PyBytes_AsStringAndSize, - PyObject_GetBuffer, -) -from cpython.mem cimport PyMem_Free, PyMem_Malloc -from libc.limits cimport ULLONG_MAX -from libc.string cimport memcpy - -from multidict import CIMultiDict as _CIMultiDict, CIMultiDictProxy as _CIMultiDictProxy -from yarl import URL as _URL - -from aiohttp import hdrs - -from .http_exceptions import ( - BadHttpMessage, - BadStatusLine, - ContentLengthError, - InvalidHeader, - InvalidURLError, - LineTooLong, - PayloadEncodingError, - TransferEncodingError, -) -from .http_parser import DeflateBuffer as _DeflateBuffer -from .http_writer import ( - HttpVersion as _HttpVersion, - HttpVersion10 as _HttpVersion10, - HttpVersion11 as _HttpVersion11, -) -from .streams import EMPTY_PAYLOAD as _EMPTY_PAYLOAD, StreamReader as _StreamReader - -cimport cython - -from aiohttp cimport _cparser as cparser - -include "_headers.pxi" - -from aiohttp cimport _find_header - -DEF DEFAULT_FREELIST_SIZE = 250 - -cdef extern from "Python.h": - int PyByteArray_Resize(object, Py_ssize_t) except -1 - Py_ssize_t PyByteArray_Size(object) except -1 - char* PyByteArray_AsString(object) - -__all__ = ('HttpRequestParser', 'HttpResponseParser', - 'RawRequestMessage', 'RawResponseMessage') - -cdef object URL = _URL -cdef object URL_build = URL.build -cdef object CIMultiDict = _CIMultiDict -cdef object CIMultiDictProxy = _CIMultiDictProxy -cdef object HttpVersion = _HttpVersion -cdef object HttpVersion10 = _HttpVersion10 -cdef object HttpVersion11 = _HttpVersion11 -cdef object SEC_WEBSOCKET_KEY1 = hdrs.SEC_WEBSOCKET_KEY1 -cdef object CONTENT_ENCODING = hdrs.CONTENT_ENCODING -cdef object EMPTY_PAYLOAD = _EMPTY_PAYLOAD -cdef object StreamReader = _StreamReader -cdef object DeflateBuffer = _DeflateBuffer - - -cdef inline object extend(object buf, const char* at, size_t length): - cdef Py_ssize_t s - cdef char* ptr - s = PyByteArray_Size(buf) - PyByteArray_Resize(buf, s + length) - ptr = PyByteArray_AsString(buf) - memcpy(ptr + s, at, length) - - -DEF METHODS_COUNT = 34; - -cdef list _http_method = [] - -for i in range(METHODS_COUNT): - _http_method.append( - cparser.http_method_str( i).decode('ascii')) - - -cdef inline str http_method_str(int i): - if i < METHODS_COUNT: - return _http_method[i] - else: - return "" - -cdef inline object find_header(bytes raw_header): - cdef Py_ssize_t size - cdef char *buf - cdef int idx - PyBytes_AsStringAndSize(raw_header, &buf, &size) - idx = _find_header.find_header(buf, size) - if idx == -1: - return raw_header.decode('utf-8', 'surrogateescape') - return headers[idx] - - -@cython.freelist(DEFAULT_FREELIST_SIZE) -cdef class RawRequestMessage: - cdef readonly str method - cdef readonly str path - cdef readonly object version # HttpVersion - cdef readonly object headers # CIMultiDict - cdef readonly object raw_headers # tuple - cdef readonly object should_close - cdef readonly object compression - cdef readonly object upgrade - cdef readonly object chunked - cdef readonly object url # yarl.URL - - def __init__(self, method, path, version, headers, raw_headers, - should_close, compression, upgrade, chunked, url): - self.method = method - self.path = path - self.version = version - self.headers = headers - self.raw_headers = raw_headers - self.should_close = should_close - self.compression = compression - self.upgrade = upgrade - self.chunked = chunked - self.url = url - - def __repr__(self): - info = [] - info.append(("method", self.method)) - info.append(("path", self.path)) - info.append(("version", self.version)) - info.append(("headers", self.headers)) - info.append(("raw_headers", self.raw_headers)) - info.append(("should_close", self.should_close)) - info.append(("compression", self.compression)) - info.append(("upgrade", self.upgrade)) - info.append(("chunked", self.chunked)) - info.append(("url", self.url)) - sinfo = ', '.join(name + '=' + repr(val) for name, val in info) - return '' - - def _replace(self, **dct): - cdef RawRequestMessage ret - ret = _new_request_message(self.method, - self.path, - self.version, - self.headers, - self.raw_headers, - self.should_close, - self.compression, - self.upgrade, - self.chunked, - self.url) - if "method" in dct: - ret.method = dct["method"] - if "path" in dct: - ret.path = dct["path"] - if "version" in dct: - ret.version = dct["version"] - if "headers" in dct: - ret.headers = dct["headers"] - if "raw_headers" in dct: - ret.raw_headers = dct["raw_headers"] - if "should_close" in dct: - ret.should_close = dct["should_close"] - if "compression" in dct: - ret.compression = dct["compression"] - if "upgrade" in dct: - ret.upgrade = dct["upgrade"] - if "chunked" in dct: - ret.chunked = dct["chunked"] - if "url" in dct: - ret.url = dct["url"] - return ret - -cdef _new_request_message(str method, - str path, - object version, - object headers, - object raw_headers, - bint should_close, - object compression, - bint upgrade, - bint chunked, - object url): - cdef RawRequestMessage ret - ret = RawRequestMessage.__new__(RawRequestMessage) - ret.method = method - ret.path = path - ret.version = version - ret.headers = headers - ret.raw_headers = raw_headers - ret.should_close = should_close - ret.compression = compression - ret.upgrade = upgrade - ret.chunked = chunked - ret.url = url - return ret - - -@cython.freelist(DEFAULT_FREELIST_SIZE) -cdef class RawResponseMessage: - cdef readonly object version # HttpVersion - cdef readonly int code - cdef readonly str reason - cdef readonly object headers # CIMultiDict - cdef readonly object raw_headers # tuple - cdef readonly object should_close - cdef readonly object compression - cdef readonly object upgrade - cdef readonly object chunked - - def __init__(self, version, code, reason, headers, raw_headers, - should_close, compression, upgrade, chunked): - self.version = version - self.code = code - self.reason = reason - self.headers = headers - self.raw_headers = raw_headers - self.should_close = should_close - self.compression = compression - self.upgrade = upgrade - self.chunked = chunked - - def __repr__(self): - info = [] - info.append(("version", self.version)) - info.append(("code", self.code)) - info.append(("reason", self.reason)) - info.append(("headers", self.headers)) - info.append(("raw_headers", self.raw_headers)) - info.append(("should_close", self.should_close)) - info.append(("compression", self.compression)) - info.append(("upgrade", self.upgrade)) - info.append(("chunked", self.chunked)) - sinfo = ', '.join(name + '=' + repr(val) for name, val in info) - return '' - - -cdef _new_response_message(object version, - int code, - str reason, - object headers, - object raw_headers, - bint should_close, - object compression, - bint upgrade, - bint chunked): - cdef RawResponseMessage ret - ret = RawResponseMessage.__new__(RawResponseMessage) - ret.version = version - ret.code = code - ret.reason = reason - ret.headers = headers - ret.raw_headers = raw_headers - ret.should_close = should_close - ret.compression = compression - ret.upgrade = upgrade - ret.chunked = chunked - return ret - - -@cython.internal -cdef class HttpParser: - - cdef: - cparser.http_parser* _cparser - cparser.http_parser_settings* _csettings - - bytearray _raw_name - bytearray _raw_value - bint _has_value - - object _protocol - object _loop - object _timer - - size_t _max_line_size - size_t _max_field_size - size_t _max_headers - bint _response_with_body - bint _read_until_eof - - bint _started - object _url - bytearray _buf - str _path - str _reason - object _headers - list _raw_headers - bint _upgraded - list _messages - object _payload - bint _payload_error - object _payload_exception - object _last_error - bint _auto_decompress - int _limit - - str _content_encoding - - Py_buffer py_buf - - def __cinit__(self): - self._cparser = \ - PyMem_Malloc(sizeof(cparser.http_parser)) - if self._cparser is NULL: - raise MemoryError() - - self._csettings = \ - PyMem_Malloc(sizeof(cparser.http_parser_settings)) - if self._csettings is NULL: - raise MemoryError() - - def __dealloc__(self): - PyMem_Free(self._cparser) - PyMem_Free(self._csettings) - - cdef _init(self, cparser.http_parser_type mode, - object protocol, object loop, int limit, - object timer=None, - size_t max_line_size=8190, size_t max_headers=32768, - size_t max_field_size=8190, payload_exception=None, - bint response_with_body=True, bint read_until_eof=False, - bint auto_decompress=True): - cparser.http_parser_init(self._cparser, mode) - self._cparser.data = self - self._cparser.content_length = 0 - - cparser.http_parser_settings_init(self._csettings) - - self._protocol = protocol - self._loop = loop - self._timer = timer - - self._buf = bytearray() - self._payload = None - self._payload_error = 0 - self._payload_exception = payload_exception - self._messages = [] - - self._raw_name = bytearray() - self._raw_value = bytearray() - self._has_value = False - - self._max_line_size = max_line_size - self._max_headers = max_headers - self._max_field_size = max_field_size - self._response_with_body = response_with_body - self._read_until_eof = read_until_eof - self._upgraded = False - self._auto_decompress = auto_decompress - self._content_encoding = None - - self._csettings.on_url = cb_on_url - self._csettings.on_status = cb_on_status - self._csettings.on_header_field = cb_on_header_field - self._csettings.on_header_value = cb_on_header_value - self._csettings.on_headers_complete = cb_on_headers_complete - self._csettings.on_body = cb_on_body - self._csettings.on_message_begin = cb_on_message_begin - self._csettings.on_message_complete = cb_on_message_complete - self._csettings.on_chunk_header = cb_on_chunk_header - self._csettings.on_chunk_complete = cb_on_chunk_complete - - self._last_error = None - self._limit = limit - - cdef _process_header(self): - if self._raw_name: - raw_name = bytes(self._raw_name) - raw_value = bytes(self._raw_value) - - name = find_header(raw_name) - value = raw_value.decode('utf-8', 'surrogateescape') - - self._headers.add(name, value) - - if name is CONTENT_ENCODING: - self._content_encoding = value - - PyByteArray_Resize(self._raw_name, 0) - PyByteArray_Resize(self._raw_value, 0) - self._has_value = False - self._raw_headers.append((raw_name, raw_value)) - - cdef _on_header_field(self, char* at, size_t length): - cdef Py_ssize_t size - cdef char *buf - if self._has_value: - self._process_header() - - size = PyByteArray_Size(self._raw_name) - PyByteArray_Resize(self._raw_name, size + length) - buf = PyByteArray_AsString(self._raw_name) - memcpy(buf + size, at, length) - - cdef _on_header_value(self, char* at, size_t length): - cdef Py_ssize_t size - cdef char *buf - - size = PyByteArray_Size(self._raw_value) - PyByteArray_Resize(self._raw_value, size + length) - buf = PyByteArray_AsString(self._raw_value) - memcpy(buf + size, at, length) - self._has_value = True - - cdef _on_headers_complete(self): - self._process_header() - - method = http_method_str(self._cparser.method) - should_close = not cparser.http_should_keep_alive(self._cparser) - upgrade = self._cparser.upgrade - chunked = self._cparser.flags & cparser.F_CHUNKED - - raw_headers = tuple(self._raw_headers) - headers = CIMultiDictProxy(self._headers) - - if upgrade or self._cparser.method == 5: # cparser.CONNECT: - self._upgraded = True - - # do not support old websocket spec - if SEC_WEBSOCKET_KEY1 in headers: - raise InvalidHeader(SEC_WEBSOCKET_KEY1) - - encoding = None - enc = self._content_encoding - if enc is not None: - self._content_encoding = None - enc = enc.lower() - if enc in ('gzip', 'deflate', 'br'): - encoding = enc - - if self._cparser.type == cparser.HTTP_REQUEST: - msg = _new_request_message( - method, self._path, - self.http_version(), headers, raw_headers, - should_close, encoding, upgrade, chunked, self._url) - else: - msg = _new_response_message( - self.http_version(), self._cparser.status_code, self._reason, - headers, raw_headers, should_close, encoding, - upgrade, chunked) - - if (ULLONG_MAX > self._cparser.content_length > 0 or chunked or - self._cparser.method == 5 or # CONNECT: 5 - (self._cparser.status_code >= 199 and - self._cparser.content_length == ULLONG_MAX and - self._read_until_eof) - ): - payload = StreamReader( - self._protocol, timer=self._timer, loop=self._loop, - limit=self._limit) - else: - payload = EMPTY_PAYLOAD - - self._payload = payload - if encoding is not None and self._auto_decompress: - self._payload = DeflateBuffer(payload, encoding) - - if not self._response_with_body: - payload = EMPTY_PAYLOAD - - self._messages.append((msg, payload)) - - cdef _on_message_complete(self): - self._payload.feed_eof() - self._payload = None - - cdef _on_chunk_header(self): - self._payload.begin_http_chunk_receiving() - - cdef _on_chunk_complete(self): - self._payload.end_http_chunk_receiving() - - cdef object _on_status_complete(self): - pass - - cdef inline http_version(self): - cdef cparser.http_parser* parser = self._cparser - - if parser.http_major == 1: - if parser.http_minor == 0: - return HttpVersion10 - elif parser.http_minor == 1: - return HttpVersion11 - - return HttpVersion(parser.http_major, parser.http_minor) - - ### Public API ### - - def feed_eof(self): - cdef bytes desc - - if self._payload is not None: - if self._cparser.flags & cparser.F_CHUNKED: - raise TransferEncodingError( - "Not enough data for satisfy transfer length header.") - elif self._cparser.flags & cparser.F_CONTENTLENGTH: - raise ContentLengthError( - "Not enough data for satisfy content length header.") - elif self._cparser.http_errno != cparser.HPE_OK: - desc = cparser.http_errno_description( - self._cparser.http_errno) - raise PayloadEncodingError(desc.decode('latin-1')) - else: - self._payload.feed_eof() - elif self._started: - self._on_headers_complete() - if self._messages: - return self._messages[-1][0] - - def feed_data(self, data): - cdef: - size_t data_len - size_t nb - - PyObject_GetBuffer(data, &self.py_buf, PyBUF_SIMPLE) - data_len = self.py_buf.len - - nb = cparser.http_parser_execute( - self._cparser, - self._csettings, - self.py_buf.buf, - data_len) - - PyBuffer_Release(&self.py_buf) - - if (self._cparser.http_errno != cparser.HPE_OK): - if self._payload_error == 0: - if self._last_error is not None: - ex = self._last_error - self._last_error = None - else: - ex = parser_error_from_errno( - self._cparser.http_errno) - self._payload = None - raise ex - - if self._messages: - messages = self._messages - self._messages = [] - else: - messages = () - - if self._upgraded: - return messages, True, data[nb:] - else: - return messages, False, b'' - - def set_upgraded(self, val): - self._upgraded = val - - -cdef class HttpRequestParser(HttpParser): - - def __init__(self, protocol, loop, int limit, timer=None, - size_t max_line_size=8190, size_t max_headers=32768, - size_t max_field_size=8190, payload_exception=None, - bint response_with_body=True, bint read_until_eof=False, - ): - self._init(cparser.HTTP_REQUEST, protocol, loop, limit, timer, - max_line_size, max_headers, max_field_size, - payload_exception, response_with_body, read_until_eof) - - cdef object _on_status_complete(self): - cdef Py_buffer py_buf - if not self._buf: - return - self._path = self._buf.decode('utf-8', 'surrogateescape') - if self._cparser.method == 5: # CONNECT - self._url = URL(self._path) - else: - PyObject_GetBuffer(self._buf, &py_buf, PyBUF_SIMPLE) - try: - self._url = _parse_url(py_buf.buf, - py_buf.len) - finally: - PyBuffer_Release(&py_buf) - PyByteArray_Resize(self._buf, 0) - - -cdef class HttpResponseParser(HttpParser): - - def __init__(self, protocol, loop, int limit, timer=None, - size_t max_line_size=8190, size_t max_headers=32768, - size_t max_field_size=8190, payload_exception=None, - bint response_with_body=True, bint read_until_eof=False, - bint auto_decompress=True - ): - self._init(cparser.HTTP_RESPONSE, protocol, loop, limit, timer, - max_line_size, max_headers, max_field_size, - payload_exception, response_with_body, read_until_eof, - auto_decompress) - - cdef object _on_status_complete(self): - if self._buf: - self._reason = self._buf.decode('utf-8', 'surrogateescape') - PyByteArray_Resize(self._buf, 0) - else: - self._reason = self._reason or '' - -cdef int cb_on_message_begin(cparser.http_parser* parser) except -1: - cdef HttpParser pyparser = parser.data - - pyparser._started = True - pyparser._headers = CIMultiDict() - pyparser._raw_headers = [] - PyByteArray_Resize(pyparser._buf, 0) - pyparser._path = None - pyparser._reason = None - return 0 - - -cdef int cb_on_url(cparser.http_parser* parser, - const char *at, size_t length) except -1: - cdef HttpParser pyparser = parser.data - try: - if length > pyparser._max_line_size: - raise LineTooLong( - 'Status line is too long', pyparser._max_line_size, length) - extend(pyparser._buf, at, length) - except BaseException as ex: - pyparser._last_error = ex - return -1 - else: - return 0 - - -cdef int cb_on_status(cparser.http_parser* parser, - const char *at, size_t length) except -1: - cdef HttpParser pyparser = parser.data - cdef str reason - try: - if length > pyparser._max_line_size: - raise LineTooLong( - 'Status line is too long', pyparser._max_line_size, length) - extend(pyparser._buf, at, length) - except BaseException as ex: - pyparser._last_error = ex - return -1 - else: - return 0 - - -cdef int cb_on_header_field(cparser.http_parser* parser, - const char *at, size_t length) except -1: - cdef HttpParser pyparser = parser.data - cdef Py_ssize_t size - try: - pyparser._on_status_complete() - size = len(pyparser._raw_name) + length - if size > pyparser._max_field_size: - raise LineTooLong( - 'Header name is too long', pyparser._max_field_size, size) - pyparser._on_header_field(at, length) - except BaseException as ex: - pyparser._last_error = ex - return -1 - else: - return 0 - - -cdef int cb_on_header_value(cparser.http_parser* parser, - const char *at, size_t length) except -1: - cdef HttpParser pyparser = parser.data - cdef Py_ssize_t size - try: - size = len(pyparser._raw_value) + length - if size > pyparser._max_field_size: - raise LineTooLong( - 'Header value is too long', pyparser._max_field_size, size) - pyparser._on_header_value(at, length) - except BaseException as ex: - pyparser._last_error = ex - return -1 - else: - return 0 - - -cdef int cb_on_headers_complete(cparser.http_parser* parser) except -1: - cdef HttpParser pyparser = parser.data - try: - pyparser._on_status_complete() - pyparser._on_headers_complete() - except BaseException as exc: - pyparser._last_error = exc - return -1 - else: - if pyparser._cparser.upgrade or pyparser._cparser.method == 5: # CONNECT - return 2 - else: - return 0 - - -cdef int cb_on_body(cparser.http_parser* parser, - const char *at, size_t length) except -1: - cdef HttpParser pyparser = parser.data - cdef bytes body = at[:length] - try: - pyparser._payload.feed_data(body, length) - except BaseException as exc: - if pyparser._payload_exception is not None: - pyparser._payload.set_exception(pyparser._payload_exception(str(exc))) - else: - pyparser._payload.set_exception(exc) - pyparser._payload_error = 1 - return -1 - else: - return 0 - - -cdef int cb_on_message_complete(cparser.http_parser* parser) except -1: - cdef HttpParser pyparser = parser.data - try: - pyparser._started = False - pyparser._on_message_complete() - except BaseException as exc: - pyparser._last_error = exc - return -1 - else: - return 0 - - -cdef int cb_on_chunk_header(cparser.http_parser* parser) except -1: - cdef HttpParser pyparser = parser.data - try: - pyparser._on_chunk_header() - except BaseException as exc: - pyparser._last_error = exc - return -1 - else: - return 0 - - -cdef int cb_on_chunk_complete(cparser.http_parser* parser) except -1: - cdef HttpParser pyparser = parser.data - try: - pyparser._on_chunk_complete() - except BaseException as exc: - pyparser._last_error = exc - return -1 - else: - return 0 - - -cdef parser_error_from_errno(cparser.http_errno errno): - cdef bytes desc = cparser.http_errno_description(errno) - - if errno in (cparser.HPE_CB_message_begin, - cparser.HPE_CB_url, - cparser.HPE_CB_header_field, - cparser.HPE_CB_header_value, - cparser.HPE_CB_headers_complete, - cparser.HPE_CB_body, - cparser.HPE_CB_message_complete, - cparser.HPE_CB_status, - cparser.HPE_CB_chunk_header, - cparser.HPE_CB_chunk_complete): - cls = BadHttpMessage - - elif errno == cparser.HPE_INVALID_STATUS: - cls = BadStatusLine - - elif errno == cparser.HPE_INVALID_METHOD: - cls = BadStatusLine - - elif errno == cparser.HPE_INVALID_URL: - cls = InvalidURLError - - else: - cls = BadHttpMessage - - return cls(desc.decode('latin-1')) - - -def parse_url(url): - cdef: - Py_buffer py_buf - char* buf_data - - PyObject_GetBuffer(url, &py_buf, PyBUF_SIMPLE) - try: - buf_data = py_buf.buf - return _parse_url(buf_data, py_buf.len) - finally: - PyBuffer_Release(&py_buf) - - -cdef _parse_url(char* buf_data, size_t length): - cdef: - cparser.http_parser_url* parsed - int res - str schema = None - str host = None - object port = None - str path = None - str query = None - str fragment = None - str user = None - str password = None - str userinfo = None - object result = None - int off - int ln - - parsed = \ - PyMem_Malloc(sizeof(cparser.http_parser_url)) - if parsed is NULL: - raise MemoryError() - cparser.http_parser_url_init(parsed) - try: - res = cparser.http_parser_parse_url(buf_data, length, 0, parsed) - - if res == 0: - if parsed.field_set & (1 << cparser.UF_SCHEMA): - off = parsed.field_data[cparser.UF_SCHEMA].off - ln = parsed.field_data[cparser.UF_SCHEMA].len - schema = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') - else: - schema = '' - - if parsed.field_set & (1 << cparser.UF_HOST): - off = parsed.field_data[cparser.UF_HOST].off - ln = parsed.field_data[cparser.UF_HOST].len - host = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') - else: - host = '' - - if parsed.field_set & (1 << cparser.UF_PORT): - port = parsed.port - - if parsed.field_set & (1 << cparser.UF_PATH): - off = parsed.field_data[cparser.UF_PATH].off - ln = parsed.field_data[cparser.UF_PATH].len - path = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') - else: - path = '' - - if parsed.field_set & (1 << cparser.UF_QUERY): - off = parsed.field_data[cparser.UF_QUERY].off - ln = parsed.field_data[cparser.UF_QUERY].len - query = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') - else: - query = '' - - if parsed.field_set & (1 << cparser.UF_FRAGMENT): - off = parsed.field_data[cparser.UF_FRAGMENT].off - ln = parsed.field_data[cparser.UF_FRAGMENT].len - fragment = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') - else: - fragment = '' - - if parsed.field_set & (1 << cparser.UF_USERINFO): - off = parsed.field_data[cparser.UF_USERINFO].off - ln = parsed.field_data[cparser.UF_USERINFO].len - userinfo = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') - - user, sep, password = userinfo.partition(':') - - return URL_build(scheme=schema, - user=user, password=password, host=host, port=port, - path=path, query_string=query, fragment=fragment, encoded=True) - else: - raise InvalidURLError("invalid url {!r}".format(buf_data)) - finally: - PyMem_Free(parsed) diff --git a/third_party/python/aiohttp/aiohttp/_http_writer.c b/third_party/python/aiohttp/aiohttp/_http_writer.c deleted file mode 100644 index 09e3efa5b0bc..000000000000 --- a/third_party/python/aiohttp/aiohttp/_http_writer.c +++ /dev/null @@ -1,5840 +0,0 @@ -/* Generated by Cython 0.29.21 */ - -#define PY_SSIZE_T_CLEAN -#include "Python.h" -#ifndef Py_PYTHON_H - #error Python headers needed to compile C extensions, please install development version of Python. -#elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000) - #error Cython requires Python 2.6+ or Python 3.3+. -#else -#define CYTHON_ABI "0_29_21" -#define CYTHON_HEX_VERSION 0x001D15F0 -#define CYTHON_FUTURE_DIVISION 1 -#include -#ifndef offsetof - #define offsetof(type, member) ( (size_t) & ((type*)0) -> member ) -#endif -#if !defined(WIN32) && !defined(MS_WINDOWS) - #ifndef __stdcall - #define __stdcall - #endif - #ifndef __cdecl - #define __cdecl - #endif - #ifndef __fastcall - #define __fastcall - #endif -#endif -#ifndef DL_IMPORT - #define DL_IMPORT(t) t -#endif -#ifndef DL_EXPORT - #define DL_EXPORT(t) t -#endif -#define __PYX_COMMA , -#ifndef HAVE_LONG_LONG - #if PY_VERSION_HEX >= 0x02070000 - #define HAVE_LONG_LONG - #endif -#endif -#ifndef PY_LONG_LONG - #define PY_LONG_LONG LONG_LONG -#endif -#ifndef Py_HUGE_VAL - #define Py_HUGE_VAL HUGE_VAL -#endif -#ifdef PYPY_VERSION - #define CYTHON_COMPILING_IN_PYPY 1 - #define CYTHON_COMPILING_IN_PYSTON 0 - #define CYTHON_COMPILING_IN_CPYTHON 0 - #undef CYTHON_USE_TYPE_SLOTS - #define CYTHON_USE_TYPE_SLOTS 0 - #undef CYTHON_USE_PYTYPE_LOOKUP - #define CYTHON_USE_PYTYPE_LOOKUP 0 - #if PY_VERSION_HEX < 0x03050000 - #undef CYTHON_USE_ASYNC_SLOTS - #define CYTHON_USE_ASYNC_SLOTS 0 - #elif !defined(CYTHON_USE_ASYNC_SLOTS) - #define CYTHON_USE_ASYNC_SLOTS 1 - #endif - #undef CYTHON_USE_PYLIST_INTERNALS - #define CYTHON_USE_PYLIST_INTERNALS 0 - #undef CYTHON_USE_UNICODE_INTERNALS - #define CYTHON_USE_UNICODE_INTERNALS 0 - #undef CYTHON_USE_UNICODE_WRITER - #define CYTHON_USE_UNICODE_WRITER 0 - #undef CYTHON_USE_PYLONG_INTERNALS - #define CYTHON_USE_PYLONG_INTERNALS 0 - #undef CYTHON_AVOID_BORROWED_REFS - #define CYTHON_AVOID_BORROWED_REFS 1 - #undef CYTHON_ASSUME_SAFE_MACROS - #define CYTHON_ASSUME_SAFE_MACROS 0 - #undef CYTHON_UNPACK_METHODS - #define CYTHON_UNPACK_METHODS 0 - #undef CYTHON_FAST_THREAD_STATE - #define CYTHON_FAST_THREAD_STATE 0 - #undef CYTHON_FAST_PYCALL - #define CYTHON_FAST_PYCALL 0 - #undef CYTHON_PEP489_MULTI_PHASE_INIT - #define CYTHON_PEP489_MULTI_PHASE_INIT 0 - #undef CYTHON_USE_TP_FINALIZE - #define CYTHON_USE_TP_FINALIZE 0 - #undef CYTHON_USE_DICT_VERSIONS - #define CYTHON_USE_DICT_VERSIONS 0 - #undef CYTHON_USE_EXC_INFO_STACK - #define CYTHON_USE_EXC_INFO_STACK 0 -#elif defined(PYSTON_VERSION) - #define CYTHON_COMPILING_IN_PYPY 0 - #define CYTHON_COMPILING_IN_PYSTON 1 - #define CYTHON_COMPILING_IN_CPYTHON 0 - #ifndef CYTHON_USE_TYPE_SLOTS - #define CYTHON_USE_TYPE_SLOTS 1 - #endif - #undef CYTHON_USE_PYTYPE_LOOKUP - #define CYTHON_USE_PYTYPE_LOOKUP 0 - #undef CYTHON_USE_ASYNC_SLOTS - #define CYTHON_USE_ASYNC_SLOTS 0 - #undef CYTHON_USE_PYLIST_INTERNALS - #define CYTHON_USE_PYLIST_INTERNALS 0 - #ifndef CYTHON_USE_UNICODE_INTERNALS - #define CYTHON_USE_UNICODE_INTERNALS 1 - #endif - #undef CYTHON_USE_UNICODE_WRITER - #define CYTHON_USE_UNICODE_WRITER 0 - #undef CYTHON_USE_PYLONG_INTERNALS - #define CYTHON_USE_PYLONG_INTERNALS 0 - #ifndef CYTHON_AVOID_BORROWED_REFS - #define CYTHON_AVOID_BORROWED_REFS 0 - #endif - #ifndef CYTHON_ASSUME_SAFE_MACROS - #define CYTHON_ASSUME_SAFE_MACROS 1 - #endif - #ifndef CYTHON_UNPACK_METHODS - #define CYTHON_UNPACK_METHODS 1 - #endif - #undef CYTHON_FAST_THREAD_STATE - #define CYTHON_FAST_THREAD_STATE 0 - #undef CYTHON_FAST_PYCALL - #define CYTHON_FAST_PYCALL 0 - #undef CYTHON_PEP489_MULTI_PHASE_INIT - #define CYTHON_PEP489_MULTI_PHASE_INIT 0 - #undef CYTHON_USE_TP_FINALIZE - #define CYTHON_USE_TP_FINALIZE 0 - #undef CYTHON_USE_DICT_VERSIONS - #define CYTHON_USE_DICT_VERSIONS 0 - #undef CYTHON_USE_EXC_INFO_STACK - #define CYTHON_USE_EXC_INFO_STACK 0 -#else - #define CYTHON_COMPILING_IN_PYPY 0 - #define CYTHON_COMPILING_IN_PYSTON 0 - #define CYTHON_COMPILING_IN_CPYTHON 1 - #ifndef CYTHON_USE_TYPE_SLOTS - #define CYTHON_USE_TYPE_SLOTS 1 - #endif - #if PY_VERSION_HEX < 0x02070000 - #undef CYTHON_USE_PYTYPE_LOOKUP - #define CYTHON_USE_PYTYPE_LOOKUP 0 - #elif !defined(CYTHON_USE_PYTYPE_LOOKUP) - #define CYTHON_USE_PYTYPE_LOOKUP 1 - #endif - #if PY_MAJOR_VERSION < 3 - #undef CYTHON_USE_ASYNC_SLOTS - #define CYTHON_USE_ASYNC_SLOTS 0 - #elif !defined(CYTHON_USE_ASYNC_SLOTS) - #define CYTHON_USE_ASYNC_SLOTS 1 - #endif - #if PY_VERSION_HEX < 0x02070000 - #undef CYTHON_USE_PYLONG_INTERNALS - #define CYTHON_USE_PYLONG_INTERNALS 0 - #elif !defined(CYTHON_USE_PYLONG_INTERNALS) - #define CYTHON_USE_PYLONG_INTERNALS 1 - #endif - #ifndef CYTHON_USE_PYLIST_INTERNALS - #define CYTHON_USE_PYLIST_INTERNALS 1 - #endif - #ifndef CYTHON_USE_UNICODE_INTERNALS - #define CYTHON_USE_UNICODE_INTERNALS 1 - #endif - #if PY_VERSION_HEX < 0x030300F0 - #undef CYTHON_USE_UNICODE_WRITER - #define CYTHON_USE_UNICODE_WRITER 0 - #elif !defined(CYTHON_USE_UNICODE_WRITER) - #define CYTHON_USE_UNICODE_WRITER 1 - #endif - #ifndef CYTHON_AVOID_BORROWED_REFS - #define CYTHON_AVOID_BORROWED_REFS 0 - #endif - #ifndef CYTHON_ASSUME_SAFE_MACROS - #define CYTHON_ASSUME_SAFE_MACROS 1 - #endif - #ifndef CYTHON_UNPACK_METHODS - #define CYTHON_UNPACK_METHODS 1 - #endif - #ifndef CYTHON_FAST_THREAD_STATE - #define CYTHON_FAST_THREAD_STATE 1 - #endif - #ifndef CYTHON_FAST_PYCALL - #define CYTHON_FAST_PYCALL 1 - #endif - #ifndef CYTHON_PEP489_MULTI_PHASE_INIT - #define CYTHON_PEP489_MULTI_PHASE_INIT (PY_VERSION_HEX >= 0x03050000) - #endif - #ifndef CYTHON_USE_TP_FINALIZE - #define CYTHON_USE_TP_FINALIZE (PY_VERSION_HEX >= 0x030400a1) - #endif - #ifndef CYTHON_USE_DICT_VERSIONS - #define CYTHON_USE_DICT_VERSIONS (PY_VERSION_HEX >= 0x030600B1) - #endif - #ifndef CYTHON_USE_EXC_INFO_STACK - #define CYTHON_USE_EXC_INFO_STACK (PY_VERSION_HEX >= 0x030700A3) - #endif -#endif -#if !defined(CYTHON_FAST_PYCCALL) -#define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1) -#endif -#if CYTHON_USE_PYLONG_INTERNALS - #include "longintrepr.h" - #undef SHIFT - #undef BASE - #undef MASK - #ifdef SIZEOF_VOID_P - enum { __pyx_check_sizeof_voidp = 1 / (int)(SIZEOF_VOID_P == sizeof(void*)) }; - #endif -#endif -#ifndef __has_attribute - #define __has_attribute(x) 0 -#endif -#ifndef __has_cpp_attribute - #define __has_cpp_attribute(x) 0 -#endif -#ifndef CYTHON_RESTRICT - #if defined(__GNUC__) - #define CYTHON_RESTRICT __restrict__ - #elif defined(_MSC_VER) && _MSC_VER >= 1400 - #define CYTHON_RESTRICT __restrict - #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L - #define CYTHON_RESTRICT restrict - #else - #define CYTHON_RESTRICT - #endif -#endif -#ifndef CYTHON_UNUSED -# if defined(__GNUC__) -# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) -# define CYTHON_UNUSED __attribute__ ((__unused__)) -# else -# define CYTHON_UNUSED -# endif -# elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER)) -# define CYTHON_UNUSED __attribute__ ((__unused__)) -# else -# define CYTHON_UNUSED -# endif -#endif -#ifndef CYTHON_MAYBE_UNUSED_VAR -# if defined(__cplusplus) - template void CYTHON_MAYBE_UNUSED_VAR( const T& ) { } -# else -# define CYTHON_MAYBE_UNUSED_VAR(x) (void)(x) -# endif -#endif -#ifndef CYTHON_NCP_UNUSED -# if CYTHON_COMPILING_IN_CPYTHON -# define CYTHON_NCP_UNUSED -# else -# define CYTHON_NCP_UNUSED CYTHON_UNUSED -# endif -#endif -#define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None) -#ifdef _MSC_VER - #ifndef _MSC_STDINT_H_ - #if _MSC_VER < 1300 - typedef unsigned char uint8_t; - typedef unsigned int uint32_t; - #else - typedef unsigned __int8 uint8_t; - typedef unsigned __int32 uint32_t; - #endif - #endif -#else - #include -#endif -#ifndef CYTHON_FALLTHROUGH - #if defined(__cplusplus) && __cplusplus >= 201103L - #if __has_cpp_attribute(fallthrough) - #define CYTHON_FALLTHROUGH [[fallthrough]] - #elif __has_cpp_attribute(clang::fallthrough) - #define CYTHON_FALLTHROUGH [[clang::fallthrough]] - #elif __has_cpp_attribute(gnu::fallthrough) - #define CYTHON_FALLTHROUGH [[gnu::fallthrough]] - #endif - #endif - #ifndef CYTHON_FALLTHROUGH - #if __has_attribute(fallthrough) - #define CYTHON_FALLTHROUGH __attribute__((fallthrough)) - #else - #define CYTHON_FALLTHROUGH - #endif - #endif - #if defined(__clang__ ) && defined(__apple_build_version__) - #if __apple_build_version__ < 7000000 - #undef CYTHON_FALLTHROUGH - #define CYTHON_FALLTHROUGH - #endif - #endif -#endif - -#ifndef CYTHON_INLINE - #if defined(__clang__) - #define CYTHON_INLINE __inline__ __attribute__ ((__unused__)) - #elif defined(__GNUC__) - #define CYTHON_INLINE __inline__ - #elif defined(_MSC_VER) - #define CYTHON_INLINE __inline - #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L - #define CYTHON_INLINE inline - #else - #define CYTHON_INLINE - #endif -#endif - -#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x02070600 && !defined(Py_OptimizeFlag) - #define Py_OptimizeFlag 0 -#endif -#define __PYX_BUILD_PY_SSIZE_T "n" -#define CYTHON_FORMAT_SSIZE_T "z" -#if PY_MAJOR_VERSION < 3 - #define __Pyx_BUILTIN_MODULE_NAME "__builtin__" - #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ - PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) - #define __Pyx_DefaultClassType PyClass_Type -#else - #define __Pyx_BUILTIN_MODULE_NAME "builtins" -#if PY_VERSION_HEX >= 0x030800A4 && PY_VERSION_HEX < 0x030800B2 - #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ - PyCode_New(a, 0, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) -#else - #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ - PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) -#endif - #define __Pyx_DefaultClassType PyType_Type -#endif -#ifndef Py_TPFLAGS_CHECKTYPES - #define Py_TPFLAGS_CHECKTYPES 0 -#endif -#ifndef Py_TPFLAGS_HAVE_INDEX - #define Py_TPFLAGS_HAVE_INDEX 0 -#endif -#ifndef Py_TPFLAGS_HAVE_NEWBUFFER - #define Py_TPFLAGS_HAVE_NEWBUFFER 0 -#endif -#ifndef Py_TPFLAGS_HAVE_FINALIZE - #define Py_TPFLAGS_HAVE_FINALIZE 0 -#endif -#ifndef METH_STACKLESS - #define METH_STACKLESS 0 -#endif -#if PY_VERSION_HEX <= 0x030700A3 || !defined(METH_FASTCALL) - #ifndef METH_FASTCALL - #define METH_FASTCALL 0x80 - #endif - typedef PyObject *(*__Pyx_PyCFunctionFast) (PyObject *self, PyObject *const *args, Py_ssize_t nargs); - typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject *const *args, - Py_ssize_t nargs, PyObject *kwnames); -#else - #define __Pyx_PyCFunctionFast _PyCFunctionFast - #define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords -#endif -#if CYTHON_FAST_PYCCALL -#define __Pyx_PyFastCFunction_Check(func)\ - ((PyCFunction_Check(func) && (METH_FASTCALL == (PyCFunction_GET_FLAGS(func) & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS | METH_STACKLESS))))) -#else -#define __Pyx_PyFastCFunction_Check(func) 0 -#endif -#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc) - #define PyObject_Malloc(s) PyMem_Malloc(s) - #define PyObject_Free(p) PyMem_Free(p) - #define PyObject_Realloc(p) PyMem_Realloc(p) -#endif -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030400A1 - #define PyMem_RawMalloc(n) PyMem_Malloc(n) - #define PyMem_RawRealloc(p, n) PyMem_Realloc(p, n) - #define PyMem_RawFree(p) PyMem_Free(p) -#endif -#if CYTHON_COMPILING_IN_PYSTON - #define __Pyx_PyCode_HasFreeVars(co) PyCode_HasFreeVars(co) - #define __Pyx_PyFrame_SetLineNumber(frame, lineno) PyFrame_SetLineNumber(frame, lineno) -#else - #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) - #define __Pyx_PyFrame_SetLineNumber(frame, lineno) (frame)->f_lineno = (lineno) -#endif -#if !CYTHON_FAST_THREAD_STATE || PY_VERSION_HEX < 0x02070000 - #define __Pyx_PyThreadState_Current PyThreadState_GET() -#elif PY_VERSION_HEX >= 0x03060000 - #define __Pyx_PyThreadState_Current _PyThreadState_UncheckedGet() -#elif PY_VERSION_HEX >= 0x03000000 - #define __Pyx_PyThreadState_Current PyThreadState_GET() -#else - #define __Pyx_PyThreadState_Current _PyThreadState_Current -#endif -#if PY_VERSION_HEX < 0x030700A2 && !defined(PyThread_tss_create) && !defined(Py_tss_NEEDS_INIT) -#include "pythread.h" -#define Py_tss_NEEDS_INIT 0 -typedef int Py_tss_t; -static CYTHON_INLINE int PyThread_tss_create(Py_tss_t *key) { - *key = PyThread_create_key(); - return 0; -} -static CYTHON_INLINE Py_tss_t * PyThread_tss_alloc(void) { - Py_tss_t *key = (Py_tss_t *)PyObject_Malloc(sizeof(Py_tss_t)); - *key = Py_tss_NEEDS_INIT; - return key; -} -static CYTHON_INLINE void PyThread_tss_free(Py_tss_t *key) { - PyObject_Free(key); -} -static CYTHON_INLINE int PyThread_tss_is_created(Py_tss_t *key) { - return *key != Py_tss_NEEDS_INIT; -} -static CYTHON_INLINE void PyThread_tss_delete(Py_tss_t *key) { - PyThread_delete_key(*key); - *key = Py_tss_NEEDS_INIT; -} -static CYTHON_INLINE int PyThread_tss_set(Py_tss_t *key, void *value) { - return PyThread_set_key_value(*key, value); -} -static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) { - return PyThread_get_key_value(*key); -} -#endif -#if CYTHON_COMPILING_IN_CPYTHON || defined(_PyDict_NewPresized) -#define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n)) -#else -#define __Pyx_PyDict_NewPresized(n) PyDict_New() -#endif -#if PY_MAJOR_VERSION >= 3 || CYTHON_FUTURE_DIVISION - #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y) - #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y) -#else - #define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y) - #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y) -#endif -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 && CYTHON_USE_UNICODE_INTERNALS -#define __Pyx_PyDict_GetItemStr(dict, name) _PyDict_GetItem_KnownHash(dict, name, ((PyASCIIObject *) name)->hash) -#else -#define __Pyx_PyDict_GetItemStr(dict, name) PyDict_GetItem(dict, name) -#endif -#if PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND) - #define CYTHON_PEP393_ENABLED 1 - #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\ - 0 : _PyUnicode_Ready((PyObject *)(op))) - #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u) - #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i) - #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u) - #define __Pyx_PyUnicode_KIND(u) PyUnicode_KIND(u) - #define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u) - #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i) - #define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, ch) - #if defined(PyUnicode_IS_READY) && defined(PyUnicode_GET_SIZE) - #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u))) - #else - #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_LENGTH(u)) - #endif -#else - #define CYTHON_PEP393_ENABLED 0 - #define PyUnicode_1BYTE_KIND 1 - #define PyUnicode_2BYTE_KIND 2 - #define PyUnicode_4BYTE_KIND 4 - #define __Pyx_PyUnicode_READY(op) (0) - #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u) - #define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i])) - #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((sizeof(Py_UNICODE) == 2) ? 65535 : 1114111) - #define __Pyx_PyUnicode_KIND(u) (sizeof(Py_UNICODE)) - #define __Pyx_PyUnicode_DATA(u) ((void*)PyUnicode_AS_UNICODE(u)) - #define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i])) - #define __Pyx_PyUnicode_WRITE(k, d, i, ch) (((void)(k)), ((Py_UNICODE*)d)[i] = ch) - #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_SIZE(u)) -#endif -#if CYTHON_COMPILING_IN_PYPY - #define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b) - #define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b) -#else - #define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b) - #define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ?\ - PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b)) -#endif -#if CYTHON_COMPILING_IN_PYPY && !defined(PyUnicode_Contains) - #define PyUnicode_Contains(u, s) PySequence_Contains(u, s) -#endif -#if CYTHON_COMPILING_IN_PYPY && !defined(PyByteArray_Check) - #define PyByteArray_Check(obj) PyObject_TypeCheck(obj, &PyByteArray_Type) -#endif -#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Format) - #define PyObject_Format(obj, fmt) PyObject_CallMethod(obj, "__format__", "O", fmt) -#endif -#define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyString_Check(b) && !PyString_CheckExact(b)))) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b)) -#define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyUnicode_Check(b) && !PyUnicode_CheckExact(b)))) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b)) -#if PY_MAJOR_VERSION >= 3 - #define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b) -#else - #define __Pyx_PyString_Format(a, b) PyString_Format(a, b) -#endif -#if PY_MAJOR_VERSION < 3 && !defined(PyObject_ASCII) - #define PyObject_ASCII(o) PyObject_Repr(o) -#endif -#if PY_MAJOR_VERSION >= 3 - #define PyBaseString_Type PyUnicode_Type - #define PyStringObject PyUnicodeObject - #define PyString_Type PyUnicode_Type - #define PyString_Check PyUnicode_Check - #define PyString_CheckExact PyUnicode_CheckExact -#ifndef PyObject_Unicode - #define PyObject_Unicode PyObject_Str -#endif -#endif -#if PY_MAJOR_VERSION >= 3 - #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj) - #define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj) -#else - #define __Pyx_PyBaseString_Check(obj) (PyString_Check(obj) || PyUnicode_Check(obj)) - #define __Pyx_PyBaseString_CheckExact(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj)) -#endif -#ifndef PySet_CheckExact - #define PySet_CheckExact(obj) (Py_TYPE(obj) == &PySet_Type) -#endif -#if PY_VERSION_HEX >= 0x030900A4 - #define __Pyx_SET_REFCNT(obj, refcnt) Py_SET_REFCNT(obj, refcnt) - #define __Pyx_SET_SIZE(obj, size) Py_SET_SIZE(obj, size) -#else - #define __Pyx_SET_REFCNT(obj, refcnt) Py_REFCNT(obj) = (refcnt) - #define __Pyx_SET_SIZE(obj, size) Py_SIZE(obj) = (size) -#endif -#if CYTHON_ASSUME_SAFE_MACROS - #define __Pyx_PySequence_SIZE(seq) Py_SIZE(seq) -#else - #define __Pyx_PySequence_SIZE(seq) PySequence_Size(seq) -#endif -#if PY_MAJOR_VERSION >= 3 - #define PyIntObject PyLongObject - #define PyInt_Type PyLong_Type - #define PyInt_Check(op) PyLong_Check(op) - #define PyInt_CheckExact(op) PyLong_CheckExact(op) - #define PyInt_FromString PyLong_FromString - #define PyInt_FromUnicode PyLong_FromUnicode - #define PyInt_FromLong PyLong_FromLong - #define PyInt_FromSize_t PyLong_FromSize_t - #define PyInt_FromSsize_t PyLong_FromSsize_t - #define PyInt_AsLong PyLong_AsLong - #define PyInt_AS_LONG PyLong_AS_LONG - #define PyInt_AsSsize_t PyLong_AsSsize_t - #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask - #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask - #define PyNumber_Int PyNumber_Long -#endif -#if PY_MAJOR_VERSION >= 3 - #define PyBoolObject PyLongObject -#endif -#if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_PYPY - #ifndef PyUnicode_InternFromString - #define PyUnicode_InternFromString(s) PyUnicode_FromString(s) - #endif -#endif -#if PY_VERSION_HEX < 0x030200A4 - typedef long Py_hash_t; - #define __Pyx_PyInt_FromHash_t PyInt_FromLong - #define __Pyx_PyInt_AsHash_t PyInt_AsLong -#else - #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t - #define __Pyx_PyInt_AsHash_t PyInt_AsSsize_t -#endif -#if PY_MAJOR_VERSION >= 3 - #define __Pyx_PyMethod_New(func, self, klass) ((self) ? ((void)(klass), PyMethod_New(func, self)) : __Pyx_NewRef(func)) -#else - #define __Pyx_PyMethod_New(func, self, klass) PyMethod_New(func, self, klass) -#endif -#if CYTHON_USE_ASYNC_SLOTS - #if PY_VERSION_HEX >= 0x030500B1 - #define __Pyx_PyAsyncMethodsStruct PyAsyncMethods - #define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async) - #else - #define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved)) - #endif -#else - #define __Pyx_PyType_AsAsync(obj) NULL -#endif -#ifndef __Pyx_PyAsyncMethodsStruct - typedef struct { - unaryfunc am_await; - unaryfunc am_aiter; - unaryfunc am_anext; - } __Pyx_PyAsyncMethodsStruct; -#endif - -#if defined(WIN32) || defined(MS_WINDOWS) - #define _USE_MATH_DEFINES -#endif -#include -#ifdef NAN -#define __PYX_NAN() ((float) NAN) -#else -static CYTHON_INLINE float __PYX_NAN() { - float value; - memset(&value, 0xFF, sizeof(value)); - return value; -} -#endif -#if defined(__CYGWIN__) && defined(_LDBL_EQ_DBL) -#define __Pyx_truncl trunc -#else -#define __Pyx_truncl truncl -#endif - -#define __PYX_MARK_ERR_POS(f_index, lineno) \ - { __pyx_filename = __pyx_f[f_index]; (void)__pyx_filename; __pyx_lineno = lineno; (void)__pyx_lineno; __pyx_clineno = __LINE__; (void)__pyx_clineno; } -#define __PYX_ERR(f_index, lineno, Ln_error) \ - { __PYX_MARK_ERR_POS(f_index, lineno) goto Ln_error; } - -#ifndef __PYX_EXTERN_C - #ifdef __cplusplus - #define __PYX_EXTERN_C extern "C" - #else - #define __PYX_EXTERN_C extern - #endif -#endif - -#define __PYX_HAVE__aiohttp___http_writer -#define __PYX_HAVE_API__aiohttp___http_writer -/* Early includes */ -#include -#include -#include -#ifdef _OPENMP -#include -#endif /* _OPENMP */ - -#if defined(PYREX_WITHOUT_ASSERTIONS) && !defined(CYTHON_WITHOUT_ASSERTIONS) -#define CYTHON_WITHOUT_ASSERTIONS -#endif - -typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* encoding; - const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; - -#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0 -#define __PYX_DEFAULT_STRING_ENCODING_IS_UTF8 0 -#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT (PY_MAJOR_VERSION >= 3 && __PYX_DEFAULT_STRING_ENCODING_IS_UTF8) -#define __PYX_DEFAULT_STRING_ENCODING "" -#define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString -#define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize -#define __Pyx_uchar_cast(c) ((unsigned char)c) -#define __Pyx_long_cast(x) ((long)x) -#define __Pyx_fits_Py_ssize_t(v, type, is_signed) (\ - (sizeof(type) < sizeof(Py_ssize_t)) ||\ - (sizeof(type) > sizeof(Py_ssize_t) &&\ - likely(v < (type)PY_SSIZE_T_MAX ||\ - v == (type)PY_SSIZE_T_MAX) &&\ - (!is_signed || likely(v > (type)PY_SSIZE_T_MIN ||\ - v == (type)PY_SSIZE_T_MIN))) ||\ - (sizeof(type) == sizeof(Py_ssize_t) &&\ - (is_signed || likely(v < (type)PY_SSIZE_T_MAX ||\ - v == (type)PY_SSIZE_T_MAX))) ) -static CYTHON_INLINE int __Pyx_is_valid_index(Py_ssize_t i, Py_ssize_t limit) { - return (size_t) i < (size_t) limit; -} -#if defined (__cplusplus) && __cplusplus >= 201103L - #include - #define __Pyx_sst_abs(value) std::abs(value) -#elif SIZEOF_INT >= SIZEOF_SIZE_T - #define __Pyx_sst_abs(value) abs(value) -#elif SIZEOF_LONG >= SIZEOF_SIZE_T - #define __Pyx_sst_abs(value) labs(value) -#elif defined (_MSC_VER) - #define __Pyx_sst_abs(value) ((Py_ssize_t)_abs64(value)) -#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L - #define __Pyx_sst_abs(value) llabs(value) -#elif defined (__GNUC__) - #define __Pyx_sst_abs(value) __builtin_llabs(value) -#else - #define __Pyx_sst_abs(value) ((value<0) ? -value : value) -#endif -static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject*); -static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length); -#define __Pyx_PyByteArray_FromString(s) PyByteArray_FromStringAndSize((const char*)s, strlen((const char*)s)) -#define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l) -#define __Pyx_PyBytes_FromString PyBytes_FromString -#define __Pyx_PyBytes_FromStringAndSize PyBytes_FromStringAndSize -static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*); -#if PY_MAJOR_VERSION < 3 - #define __Pyx_PyStr_FromString __Pyx_PyBytes_FromString - #define __Pyx_PyStr_FromStringAndSize __Pyx_PyBytes_FromStringAndSize -#else - #define __Pyx_PyStr_FromString __Pyx_PyUnicode_FromString - #define __Pyx_PyStr_FromStringAndSize __Pyx_PyUnicode_FromStringAndSize -#endif -#define __Pyx_PyBytes_AsWritableString(s) ((char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsWritableSString(s) ((signed char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsWritableUString(s) ((unsigned char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsString(s) ((const char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsSString(s) ((const signed char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsUString(s) ((const unsigned char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyObject_AsWritableString(s) ((char*) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_AsWritableSString(s) ((signed char*) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_AsWritableUString(s) ((unsigned char*) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_AsSString(s) ((const signed char*) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_AsUString(s) ((const unsigned char*) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_FromCString(s) __Pyx_PyObject_FromString((const char*)s) -#define __Pyx_PyBytes_FromCString(s) __Pyx_PyBytes_FromString((const char*)s) -#define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s) -#define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s) -#define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s) -static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u) { - const Py_UNICODE *u_end = u; - while (*u_end++) ; - return (size_t)(u_end - u - 1); -} -#define __Pyx_PyUnicode_FromUnicode(u) PyUnicode_FromUnicode(u, __Pyx_Py_UNICODE_strlen(u)) -#define __Pyx_PyUnicode_FromUnicodeAndLength PyUnicode_FromUnicode -#define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode -#define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) -#define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) -static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b); -static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); -static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject*); -static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x); -#define __Pyx_PySequence_Tuple(obj)\ - (likely(PyTuple_CheckExact(obj)) ? __Pyx_NewRef(obj) : PySequence_Tuple(obj)) -static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*); -static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t); -#if CYTHON_ASSUME_SAFE_MACROS -#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x)) -#else -#define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x) -#endif -#define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x)) -#if PY_MAJOR_VERSION >= 3 -#define __Pyx_PyNumber_Int(x) (PyLong_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Long(x)) -#else -#define __Pyx_PyNumber_Int(x) (PyInt_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Int(x)) -#endif -#define __Pyx_PyNumber_Float(x) (PyFloat_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Float(x)) -#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII -static int __Pyx_sys_getdefaultencoding_not_ascii; -static int __Pyx_init_sys_getdefaultencoding_params(void) { - PyObject* sys; - PyObject* default_encoding = NULL; - PyObject* ascii_chars_u = NULL; - PyObject* ascii_chars_b = NULL; - const char* default_encoding_c; - sys = PyImport_ImportModule("sys"); - if (!sys) goto bad; - default_encoding = PyObject_CallMethod(sys, (char*) "getdefaultencoding", NULL); - Py_DECREF(sys); - if (!default_encoding) goto bad; - default_encoding_c = PyBytes_AsString(default_encoding); - if (!default_encoding_c) goto bad; - if (strcmp(default_encoding_c, "ascii") == 0) { - __Pyx_sys_getdefaultencoding_not_ascii = 0; - } else { - char ascii_chars[128]; - int c; - for (c = 0; c < 128; c++) { - ascii_chars[c] = c; - } - __Pyx_sys_getdefaultencoding_not_ascii = 1; - ascii_chars_u = PyUnicode_DecodeASCII(ascii_chars, 128, NULL); - if (!ascii_chars_u) goto bad; - ascii_chars_b = PyUnicode_AsEncodedString(ascii_chars_u, default_encoding_c, NULL); - if (!ascii_chars_b || !PyBytes_Check(ascii_chars_b) || memcmp(ascii_chars, PyBytes_AS_STRING(ascii_chars_b), 128) != 0) { - PyErr_Format( - PyExc_ValueError, - "This module compiled with c_string_encoding=ascii, but default encoding '%.200s' is not a superset of ascii.", - default_encoding_c); - goto bad; - } - Py_DECREF(ascii_chars_u); - Py_DECREF(ascii_chars_b); - } - Py_DECREF(default_encoding); - return 0; -bad: - Py_XDECREF(default_encoding); - Py_XDECREF(ascii_chars_u); - Py_XDECREF(ascii_chars_b); - return -1; -} -#endif -#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT && PY_MAJOR_VERSION >= 3 -#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeUTF8(c_str, size, NULL) -#else -#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_Decode(c_str, size, __PYX_DEFAULT_STRING_ENCODING, NULL) -#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT -static char* __PYX_DEFAULT_STRING_ENCODING; -static int __Pyx_init_sys_getdefaultencoding_params(void) { - PyObject* sys; - PyObject* default_encoding = NULL; - char* default_encoding_c; - sys = PyImport_ImportModule("sys"); - if (!sys) goto bad; - default_encoding = PyObject_CallMethod(sys, (char*) (const char*) "getdefaultencoding", NULL); - Py_DECREF(sys); - if (!default_encoding) goto bad; - default_encoding_c = PyBytes_AsString(default_encoding); - if (!default_encoding_c) goto bad; - __PYX_DEFAULT_STRING_ENCODING = (char*) malloc(strlen(default_encoding_c) + 1); - if (!__PYX_DEFAULT_STRING_ENCODING) goto bad; - strcpy(__PYX_DEFAULT_STRING_ENCODING, default_encoding_c); - Py_DECREF(default_encoding); - return 0; -bad: - Py_XDECREF(default_encoding); - return -1; -} -#endif -#endif - - -/* Test for GCC > 2.95 */ -#if defined(__GNUC__) && (__GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95))) - #define likely(x) __builtin_expect(!!(x), 1) - #define unlikely(x) __builtin_expect(!!(x), 0) -#else /* !__GNUC__ or GCC < 2.95 */ - #define likely(x) (x) - #define unlikely(x) (x) -#endif /* __GNUC__ */ -static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; } - -static PyObject *__pyx_m = NULL; -static PyObject *__pyx_d; -static PyObject *__pyx_b; -static PyObject *__pyx_cython_runtime = NULL; -static PyObject *__pyx_empty_tuple; -static PyObject *__pyx_empty_bytes; -static PyObject *__pyx_empty_unicode; -static int __pyx_lineno; -static int __pyx_clineno = 0; -static const char * __pyx_cfilenm= __FILE__; -static const char *__pyx_filename; - - -static const char *__pyx_f[] = { - "aiohttp/_http_writer.pyx", - "type.pxd", -}; - -/*--- Type declarations ---*/ -struct __pyx_t_7aiohttp_12_http_writer_Writer; - -/* "aiohttp/_http_writer.pyx":18 - * # ----------------- writer --------------------------- - * - * cdef struct Writer: # <<<<<<<<<<<<<< - * char *buf - * Py_ssize_t size - */ -struct __pyx_t_7aiohttp_12_http_writer_Writer { - char *buf; - Py_ssize_t size; - Py_ssize_t pos; -}; - -/* --- Runtime support code (head) --- */ -/* Refnanny.proto */ -#ifndef CYTHON_REFNANNY - #define CYTHON_REFNANNY 0 -#endif -#if CYTHON_REFNANNY - typedef struct { - void (*INCREF)(void*, PyObject*, int); - void (*DECREF)(void*, PyObject*, int); - void (*GOTREF)(void*, PyObject*, int); - void (*GIVEREF)(void*, PyObject*, int); - void* (*SetupContext)(const char*, int, const char*); - void (*FinishContext)(void**); - } __Pyx_RefNannyAPIStruct; - static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL; - static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname); - #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL; -#ifdef WITH_THREAD - #define __Pyx_RefNannySetupContext(name, acquire_gil)\ - if (acquire_gil) {\ - PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ - __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ - PyGILState_Release(__pyx_gilstate_save);\ - } else {\ - __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ - } -#else - #define __Pyx_RefNannySetupContext(name, acquire_gil)\ - __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__) -#endif - #define __Pyx_RefNannyFinishContext()\ - __Pyx_RefNanny->FinishContext(&__pyx_refnanny) - #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), __LINE__) - #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), __LINE__) - #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), __LINE__) - #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), __LINE__) - #define __Pyx_XINCREF(r) do { if((r) != NULL) {__Pyx_INCREF(r); }} while(0) - #define __Pyx_XDECREF(r) do { if((r) != NULL) {__Pyx_DECREF(r); }} while(0) - #define __Pyx_XGOTREF(r) do { if((r) != NULL) {__Pyx_GOTREF(r); }} while(0) - #define __Pyx_XGIVEREF(r) do { if((r) != NULL) {__Pyx_GIVEREF(r);}} while(0) -#else - #define __Pyx_RefNannyDeclarations - #define __Pyx_RefNannySetupContext(name, acquire_gil) - #define __Pyx_RefNannyFinishContext() - #define __Pyx_INCREF(r) Py_INCREF(r) - #define __Pyx_DECREF(r) Py_DECREF(r) - #define __Pyx_GOTREF(r) - #define __Pyx_GIVEREF(r) - #define __Pyx_XINCREF(r) Py_XINCREF(r) - #define __Pyx_XDECREF(r) Py_XDECREF(r) - #define __Pyx_XGOTREF(r) - #define __Pyx_XGIVEREF(r) -#endif -#define __Pyx_XDECREF_SET(r, v) do {\ - PyObject *tmp = (PyObject *) r;\ - r = v; __Pyx_XDECREF(tmp);\ - } while (0) -#define __Pyx_DECREF_SET(r, v) do {\ - PyObject *tmp = (PyObject *) r;\ - r = v; __Pyx_DECREF(tmp);\ - } while (0) -#define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0) -#define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0) - -/* PyObjectGetAttrStr.proto */ -#if CYTHON_USE_TYPE_SLOTS -static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name); -#else -#define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n) -#endif - -/* GetBuiltinName.proto */ -static PyObject *__Pyx_GetBuiltinName(PyObject *name); - -/* PyThreadStateGet.proto */ -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_PyThreadState_declare PyThreadState *__pyx_tstate; -#define __Pyx_PyThreadState_assign __pyx_tstate = __Pyx_PyThreadState_Current; -#define __Pyx_PyErr_Occurred() __pyx_tstate->curexc_type -#else -#define __Pyx_PyThreadState_declare -#define __Pyx_PyThreadState_assign -#define __Pyx_PyErr_Occurred() PyErr_Occurred() -#endif - -/* PyErrFetchRestore.proto */ -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_PyErr_Clear() __Pyx_ErrRestore(NULL, NULL, NULL) -#define __Pyx_ErrRestoreWithState(type, value, tb) __Pyx_ErrRestoreInState(PyThreadState_GET(), type, value, tb) -#define __Pyx_ErrFetchWithState(type, value, tb) __Pyx_ErrFetchInState(PyThreadState_GET(), type, value, tb) -#define __Pyx_ErrRestore(type, value, tb) __Pyx_ErrRestoreInState(__pyx_tstate, type, value, tb) -#define __Pyx_ErrFetch(type, value, tb) __Pyx_ErrFetchInState(__pyx_tstate, type, value, tb) -static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); -static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); -#if CYTHON_COMPILING_IN_CPYTHON -#define __Pyx_PyErr_SetNone(exc) (Py_INCREF(exc), __Pyx_ErrRestore((exc), NULL, NULL)) -#else -#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) -#endif -#else -#define __Pyx_PyErr_Clear() PyErr_Clear() -#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) -#define __Pyx_ErrRestoreWithState(type, value, tb) PyErr_Restore(type, value, tb) -#define __Pyx_ErrFetchWithState(type, value, tb) PyErr_Fetch(type, value, tb) -#define __Pyx_ErrRestoreInState(tstate, type, value, tb) PyErr_Restore(type, value, tb) -#define __Pyx_ErrFetchInState(tstate, type, value, tb) PyErr_Fetch(type, value, tb) -#define __Pyx_ErrRestore(type, value, tb) PyErr_Restore(type, value, tb) -#define __Pyx_ErrFetch(type, value, tb) PyErr_Fetch(type, value, tb) -#endif - -/* WriteUnraisableException.proto */ -static void __Pyx_WriteUnraisable(const char *name, int clineno, - int lineno, const char *filename, - int full_traceback, int nogil); - -/* unicode_iter.proto */ -static CYTHON_INLINE int __Pyx_init_unicode_iteration( - PyObject* ustring, Py_ssize_t *length, void** data, int *kind); - -/* PyCFunctionFastCall.proto */ -#if CYTHON_FAST_PYCCALL -static CYTHON_INLINE PyObject *__Pyx_PyCFunction_FastCall(PyObject *func, PyObject **args, Py_ssize_t nargs); -#else -#define __Pyx_PyCFunction_FastCall(func, args, nargs) (assert(0), NULL) -#endif - -/* PyFunctionFastCall.proto */ -#if CYTHON_FAST_PYCALL -#define __Pyx_PyFunction_FastCall(func, args, nargs)\ - __Pyx_PyFunction_FastCallDict((func), (args), (nargs), NULL) -#if 1 || PY_VERSION_HEX < 0x030600B1 -static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs); -#else -#define __Pyx_PyFunction_FastCallDict(func, args, nargs, kwargs) _PyFunction_FastCallDict(func, args, nargs, kwargs) -#endif -#define __Pyx_BUILD_ASSERT_EXPR(cond)\ - (sizeof(char [1 - 2*!(cond)]) - 1) -#ifndef Py_MEMBER_SIZE -#define Py_MEMBER_SIZE(type, member) sizeof(((type *)0)->member) -#endif - static size_t __pyx_pyframe_localsplus_offset = 0; - #include "frameobject.h" - #define __Pxy_PyFrame_Initialize_Offsets()\ - ((void)__Pyx_BUILD_ASSERT_EXPR(sizeof(PyFrameObject) == offsetof(PyFrameObject, f_localsplus) + Py_MEMBER_SIZE(PyFrameObject, f_localsplus)),\ - (void)(__pyx_pyframe_localsplus_offset = ((size_t)PyFrame_Type.tp_basicsize) - Py_MEMBER_SIZE(PyFrameObject, f_localsplus))) - #define __Pyx_PyFrame_GetLocalsplus(frame)\ - (assert(__pyx_pyframe_localsplus_offset), (PyObject **)(((char *)(frame)) + __pyx_pyframe_localsplus_offset)) -#endif - -/* PyObjectCall.proto */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw); -#else -#define __Pyx_PyObject_Call(func, arg, kw) PyObject_Call(func, arg, kw) -#endif - -/* PyObjectCall2Args.proto */ -static CYTHON_UNUSED PyObject* __Pyx_PyObject_Call2Args(PyObject* function, PyObject* arg1, PyObject* arg2); - -/* PyObjectCallMethO.proto */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg); -#endif - -/* PyObjectCallOneArg.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg); - -/* RaiseException.proto */ -static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause); - -/* RaiseArgTupleInvalid.proto */ -static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact, - Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found); - -/* RaiseDoubleKeywords.proto */ -static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name); - -/* ParseKeywords.proto */ -static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject **argnames[],\ - PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args,\ - const char* function_name); - -/* ArgTypeTest.proto */ -#define __Pyx_ArgTypeTest(obj, type, none_allowed, name, exact)\ - ((likely((Py_TYPE(obj) == type) | (none_allowed && (obj == Py_None)))) ? 1 :\ - __Pyx__ArgTypeTest(obj, type, name, exact)) -static int __Pyx__ArgTypeTest(PyObject *obj, PyTypeObject *type, const char *name, int exact); - -/* GetTopmostException.proto */ -#if CYTHON_USE_EXC_INFO_STACK -static _PyErr_StackItem * __Pyx_PyErr_GetTopmostException(PyThreadState *tstate); -#endif - -/* ReRaiseException.proto */ -static CYTHON_INLINE void __Pyx_ReraiseException(void); - -/* IterFinish.proto */ -static CYTHON_INLINE int __Pyx_IterFinish(void); - -/* PyObjectCallNoArg.proto */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func); -#else -#define __Pyx_PyObject_CallNoArg(func) __Pyx_PyObject_Call(func, __pyx_empty_tuple, NULL) -#endif - -/* PyObjectGetMethod.proto */ -static int __Pyx_PyObject_GetMethod(PyObject *obj, PyObject *name, PyObject **method); - -/* PyObjectCallMethod0.proto */ -static PyObject* __Pyx_PyObject_CallMethod0(PyObject* obj, PyObject* method_name); - -/* RaiseNeedMoreValuesToUnpack.proto */ -static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index); - -/* RaiseTooManyValuesToUnpack.proto */ -static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected); - -/* UnpackItemEndCheck.proto */ -static int __Pyx_IternextUnpackEndCheck(PyObject *retval, Py_ssize_t expected); - -/* RaiseNoneIterError.proto */ -static CYTHON_INLINE void __Pyx_RaiseNoneNotIterableError(void); - -/* UnpackTupleError.proto */ -static void __Pyx_UnpackTupleError(PyObject *, Py_ssize_t index); - -/* UnpackTuple2.proto */ -#define __Pyx_unpack_tuple2(tuple, value1, value2, is_tuple, has_known_size, decref_tuple)\ - (likely(is_tuple || PyTuple_Check(tuple)) ?\ - (likely(has_known_size || PyTuple_GET_SIZE(tuple) == 2) ?\ - __Pyx_unpack_tuple2_exact(tuple, value1, value2, decref_tuple) :\ - (__Pyx_UnpackTupleError(tuple, 2), -1)) :\ - __Pyx_unpack_tuple2_generic(tuple, value1, value2, has_known_size, decref_tuple)) -static CYTHON_INLINE int __Pyx_unpack_tuple2_exact( - PyObject* tuple, PyObject** value1, PyObject** value2, int decref_tuple); -static int __Pyx_unpack_tuple2_generic( - PyObject* tuple, PyObject** value1, PyObject** value2, int has_known_size, int decref_tuple); - -/* dict_iter.proto */ -static CYTHON_INLINE PyObject* __Pyx_dict_iterator(PyObject* dict, int is_dict, PyObject* method_name, - Py_ssize_t* p_orig_length, int* p_is_dict); -static CYTHON_INLINE int __Pyx_dict_iter_next(PyObject* dict_or_iter, Py_ssize_t orig_length, Py_ssize_t* ppos, - PyObject** pkey, PyObject** pvalue, PyObject** pitem, int is_dict); - -/* GetException.proto */ -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_GetException(type, value, tb) __Pyx__GetException(__pyx_tstate, type, value, tb) -static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); -#else -static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb); -#endif - -/* SwapException.proto */ -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_ExceptionSwap(type, value, tb) __Pyx__ExceptionSwap(__pyx_tstate, type, value, tb) -static CYTHON_INLINE void __Pyx__ExceptionSwap(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); -#else -static CYTHON_INLINE void __Pyx_ExceptionSwap(PyObject **type, PyObject **value, PyObject **tb); -#endif - -/* SaveResetException.proto */ -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_ExceptionSave(type, value, tb) __Pyx__ExceptionSave(__pyx_tstate, type, value, tb) -static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); -#define __Pyx_ExceptionReset(type, value, tb) __Pyx__ExceptionReset(__pyx_tstate, type, value, tb) -static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); -#else -#define __Pyx_ExceptionSave(type, value, tb) PyErr_GetExcInfo(type, value, tb) -#define __Pyx_ExceptionReset(type, value, tb) PyErr_SetExcInfo(type, value, tb) -#endif - -/* TypeImport.proto */ -#ifndef __PYX_HAVE_RT_ImportType_proto -#define __PYX_HAVE_RT_ImportType_proto -enum __Pyx_ImportType_CheckSize { - __Pyx_ImportType_CheckSize_Error = 0, - __Pyx_ImportType_CheckSize_Warn = 1, - __Pyx_ImportType_CheckSize_Ignore = 2 -}; -static PyTypeObject *__Pyx_ImportType(PyObject* module, const char *module_name, const char *class_name, size_t size, enum __Pyx_ImportType_CheckSize check_size); -#endif - -/* Import.proto */ -static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level); - -/* ImportFrom.proto */ -static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name); - -/* PyDictVersioning.proto */ -#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS -#define __PYX_DICT_VERSION_INIT ((PY_UINT64_T) -1) -#define __PYX_GET_DICT_VERSION(dict) (((PyDictObject*)(dict))->ma_version_tag) -#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)\ - (version_var) = __PYX_GET_DICT_VERSION(dict);\ - (cache_var) = (value); -#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) {\ - static PY_UINT64_T __pyx_dict_version = 0;\ - static PyObject *__pyx_dict_cached_value = NULL;\ - if (likely(__PYX_GET_DICT_VERSION(DICT) == __pyx_dict_version)) {\ - (VAR) = __pyx_dict_cached_value;\ - } else {\ - (VAR) = __pyx_dict_cached_value = (LOOKUP);\ - __pyx_dict_version = __PYX_GET_DICT_VERSION(DICT);\ - }\ -} -static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj); -static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj); -static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version); -#else -#define __PYX_GET_DICT_VERSION(dict) (0) -#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var) -#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) (VAR) = (LOOKUP); -#endif - -/* GetModuleGlobalName.proto */ -#if CYTHON_USE_DICT_VERSIONS -#define __Pyx_GetModuleGlobalName(var, name) {\ - static PY_UINT64_T __pyx_dict_version = 0;\ - static PyObject *__pyx_dict_cached_value = NULL;\ - (var) = (likely(__pyx_dict_version == __PYX_GET_DICT_VERSION(__pyx_d))) ?\ - (likely(__pyx_dict_cached_value) ? __Pyx_NewRef(__pyx_dict_cached_value) : __Pyx_GetBuiltinName(name)) :\ - __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\ -} -#define __Pyx_GetModuleGlobalNameUncached(var, name) {\ - PY_UINT64_T __pyx_dict_version;\ - PyObject *__pyx_dict_cached_value;\ - (var) = __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\ -} -static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value); -#else -#define __Pyx_GetModuleGlobalName(var, name) (var) = __Pyx__GetModuleGlobalName(name) -#define __Pyx_GetModuleGlobalNameUncached(var, name) (var) = __Pyx__GetModuleGlobalName(name) -static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name); -#endif - -/* CLineInTraceback.proto */ -#ifdef CYTHON_CLINE_IN_TRACEBACK -#define __Pyx_CLineForTraceback(tstate, c_line) (((CYTHON_CLINE_IN_TRACEBACK)) ? c_line : 0) -#else -static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line); -#endif - -/* CodeObjectCache.proto */ -typedef struct { - PyCodeObject* code_object; - int code_line; -} __Pyx_CodeObjectCacheEntry; -struct __Pyx_CodeObjectCache { - int count; - int max_count; - __Pyx_CodeObjectCacheEntry* entries; -}; -static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL}; -static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line); -static PyCodeObject *__pyx_find_code_object(int code_line); -static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object); - -/* AddTraceback.proto */ -static void __Pyx_AddTraceback(const char *funcname, int c_line, - int py_line, const char *filename); - -/* CIntToPy.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value); - -/* CIntFromPy.proto */ -static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *); - -/* CIntFromPy.proto */ -static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *); - -/* FastTypeChecks.proto */ -#if CYTHON_COMPILING_IN_CPYTHON -#define __Pyx_TypeCheck(obj, type) __Pyx_IsSubtype(Py_TYPE(obj), (PyTypeObject *)type) -static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b); -static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject *type); -static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *type1, PyObject *type2); -#else -#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type) -#define __Pyx_PyErr_GivenExceptionMatches(err, type) PyErr_GivenExceptionMatches(err, type) -#define __Pyx_PyErr_GivenExceptionMatches2(err, type1, type2) (PyErr_GivenExceptionMatches(err, type1) || PyErr_GivenExceptionMatches(err, type2)) -#endif -#define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception) - -/* CheckBinaryVersion.proto */ -static int __Pyx_check_binary_version(void); - -/* InitStrings.proto */ -static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); - - -/* Module declarations from 'libc.string' */ - -/* Module declarations from 'libc.stdio' */ - -/* Module declarations from '__builtin__' */ - -/* Module declarations from 'cpython.type' */ -static PyTypeObject *__pyx_ptype_7cpython_4type_type = 0; - -/* Module declarations from 'cpython' */ - -/* Module declarations from 'cpython.object' */ - -/* Module declarations from 'cpython.bytes' */ - -/* Module declarations from 'cpython.exc' */ - -/* Module declarations from 'cpython.mem' */ - -/* Module declarations from 'libc.stdint' */ - -/* Module declarations from 'aiohttp._http_writer' */ -static char __pyx_v_7aiohttp_12_http_writer_BUFFER[0x4000]; -static PyObject *__pyx_v_7aiohttp_12_http_writer__istr = 0; -static CYTHON_INLINE void __pyx_f_7aiohttp_12_http_writer__init_writer(struct __pyx_t_7aiohttp_12_http_writer_Writer *); /*proto*/ -static CYTHON_INLINE void __pyx_f_7aiohttp_12_http_writer__release_writer(struct __pyx_t_7aiohttp_12_http_writer_Writer *); /*proto*/ -static CYTHON_INLINE int __pyx_f_7aiohttp_12_http_writer__write_byte(struct __pyx_t_7aiohttp_12_http_writer_Writer *, uint8_t); /*proto*/ -static CYTHON_INLINE int __pyx_f_7aiohttp_12_http_writer__write_utf8(struct __pyx_t_7aiohttp_12_http_writer_Writer *, Py_UCS4); /*proto*/ -static CYTHON_INLINE int __pyx_f_7aiohttp_12_http_writer__write_str(struct __pyx_t_7aiohttp_12_http_writer_Writer *, PyObject *); /*proto*/ -static PyObject *__pyx_f_7aiohttp_12_http_writer_to_str(PyObject *); /*proto*/ -#define __Pyx_MODULE_NAME "aiohttp._http_writer" -extern int __pyx_module_is_main_aiohttp___http_writer; -int __pyx_module_is_main_aiohttp___http_writer = 0; - -/* Implementation of 'aiohttp._http_writer' */ -static PyObject *__pyx_builtin_TypeError; -static const char __pyx_k_key[] = "key"; -static const char __pyx_k_ret[] = "ret"; -static const char __pyx_k_val[] = "val"; -static const char __pyx_k_istr[] = "istr"; -static const char __pyx_k_main[] = "__main__"; -static const char __pyx_k_name[] = "__name__"; -static const char __pyx_k_test[] = "__test__"; -static const char __pyx_k_items[] = "items"; -static const char __pyx_k_format[] = "format"; -static const char __pyx_k_import[] = "__import__"; -static const char __pyx_k_writer[] = "writer"; -static const char __pyx_k_headers[] = "headers"; -static const char __pyx_k_TypeError[] = "TypeError"; -static const char __pyx_k_multidict[] = "multidict"; -static const char __pyx_k_status_line[] = "status_line"; -static const char __pyx_k_serialize_headers[] = "_serialize_headers"; -static const char __pyx_k_cline_in_traceback[] = "cline_in_traceback"; -static const char __pyx_k_aiohttp__http_writer[] = "aiohttp._http_writer"; -static const char __pyx_k_aiohttp__http_writer_pyx[] = "aiohttp/_http_writer.pyx"; -static const char __pyx_k_Cannot_serialize_non_str_key_r[] = "Cannot serialize non-str key {!r}"; -static PyObject *__pyx_kp_u_Cannot_serialize_non_str_key_r; -static PyObject *__pyx_n_s_TypeError; -static PyObject *__pyx_n_s_aiohttp__http_writer; -static PyObject *__pyx_kp_s_aiohttp__http_writer_pyx; -static PyObject *__pyx_n_s_cline_in_traceback; -static PyObject *__pyx_n_s_format; -static PyObject *__pyx_n_s_headers; -static PyObject *__pyx_n_s_import; -static PyObject *__pyx_n_s_istr; -static PyObject *__pyx_n_s_items; -static PyObject *__pyx_n_s_key; -static PyObject *__pyx_n_s_main; -static PyObject *__pyx_n_s_multidict; -static PyObject *__pyx_n_s_name; -static PyObject *__pyx_n_s_ret; -static PyObject *__pyx_n_s_serialize_headers; -static PyObject *__pyx_n_s_status_line; -static PyObject *__pyx_n_s_test; -static PyObject *__pyx_n_s_val; -static PyObject *__pyx_n_s_writer; -static PyObject *__pyx_pf_7aiohttp_12_http_writer__serialize_headers(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_status_line, PyObject *__pyx_v_headers); /* proto */ -static PyObject *__pyx_tuple_; -static PyObject *__pyx_codeobj__2; -/* Late includes */ - -/* "aiohttp/_http_writer.pyx":24 - * - * - * cdef inline void _init_writer(Writer* writer): # <<<<<<<<<<<<<< - * writer.buf = &BUFFER[0] - * writer.size = BUF_SIZE - */ - -static CYTHON_INLINE void __pyx_f_7aiohttp_12_http_writer__init_writer(struct __pyx_t_7aiohttp_12_http_writer_Writer *__pyx_v_writer) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("_init_writer", 0); - - /* "aiohttp/_http_writer.pyx":25 - * - * cdef inline void _init_writer(Writer* writer): - * writer.buf = &BUFFER[0] # <<<<<<<<<<<<<< - * writer.size = BUF_SIZE - * writer.pos = 0 - */ - __pyx_v_writer->buf = (&(__pyx_v_7aiohttp_12_http_writer_BUFFER[0])); - - /* "aiohttp/_http_writer.pyx":26 - * cdef inline void _init_writer(Writer* writer): - * writer.buf = &BUFFER[0] - * writer.size = BUF_SIZE # <<<<<<<<<<<<<< - * writer.pos = 0 - * - */ - __pyx_v_writer->size = 0x4000; - - /* "aiohttp/_http_writer.pyx":27 - * writer.buf = &BUFFER[0] - * writer.size = BUF_SIZE - * writer.pos = 0 # <<<<<<<<<<<<<< - * - * - */ - __pyx_v_writer->pos = 0; - - /* "aiohttp/_http_writer.pyx":24 - * - * - * cdef inline void _init_writer(Writer* writer): # <<<<<<<<<<<<<< - * writer.buf = &BUFFER[0] - * writer.size = BUF_SIZE - */ - - /* function exit code */ - __Pyx_RefNannyFinishContext(); -} - -/* "aiohttp/_http_writer.pyx":30 - * - * - * cdef inline void _release_writer(Writer* writer): # <<<<<<<<<<<<<< - * if writer.buf != BUFFER: - * PyMem_Free(writer.buf) - */ - -static CYTHON_INLINE void __pyx_f_7aiohttp_12_http_writer__release_writer(struct __pyx_t_7aiohttp_12_http_writer_Writer *__pyx_v_writer) { - __Pyx_RefNannyDeclarations - int __pyx_t_1; - __Pyx_RefNannySetupContext("_release_writer", 0); - - /* "aiohttp/_http_writer.pyx":31 - * - * cdef inline void _release_writer(Writer* writer): - * if writer.buf != BUFFER: # <<<<<<<<<<<<<< - * PyMem_Free(writer.buf) - * - */ - __pyx_t_1 = ((__pyx_v_writer->buf != __pyx_v_7aiohttp_12_http_writer_BUFFER) != 0); - if (__pyx_t_1) { - - /* "aiohttp/_http_writer.pyx":32 - * cdef inline void _release_writer(Writer* writer): - * if writer.buf != BUFFER: - * PyMem_Free(writer.buf) # <<<<<<<<<<<<<< - * - * - */ - PyMem_Free(__pyx_v_writer->buf); - - /* "aiohttp/_http_writer.pyx":31 - * - * cdef inline void _release_writer(Writer* writer): - * if writer.buf != BUFFER: # <<<<<<<<<<<<<< - * PyMem_Free(writer.buf) - * - */ - } - - /* "aiohttp/_http_writer.pyx":30 - * - * - * cdef inline void _release_writer(Writer* writer): # <<<<<<<<<<<<<< - * if writer.buf != BUFFER: - * PyMem_Free(writer.buf) - */ - - /* function exit code */ - __Pyx_RefNannyFinishContext(); -} - -/* "aiohttp/_http_writer.pyx":35 - * - * - * cdef inline int _write_byte(Writer* writer, uint8_t ch): # <<<<<<<<<<<<<< - * cdef char * buf - * cdef Py_ssize_t size - */ - -static CYTHON_INLINE int __pyx_f_7aiohttp_12_http_writer__write_byte(struct __pyx_t_7aiohttp_12_http_writer_Writer *__pyx_v_writer, uint8_t __pyx_v_ch) { - char *__pyx_v_buf; - Py_ssize_t __pyx_v_size; - int __pyx_r; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - PyObject *__pyx_t_2; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("_write_byte", 0); - - /* "aiohttp/_http_writer.pyx":39 - * cdef Py_ssize_t size - * - * if writer.pos == writer.size: # <<<<<<<<<<<<<< - * # reallocate - * size = writer.size + BUF_SIZE - */ - __pyx_t_1 = ((__pyx_v_writer->pos == __pyx_v_writer->size) != 0); - if (__pyx_t_1) { - - /* "aiohttp/_http_writer.pyx":41 - * if writer.pos == writer.size: - * # reallocate - * size = writer.size + BUF_SIZE # <<<<<<<<<<<<<< - * if writer.buf == BUFFER: - * buf = PyMem_Malloc(size) - */ - __pyx_v_size = (__pyx_v_writer->size + 0x4000); - - /* "aiohttp/_http_writer.pyx":42 - * # reallocate - * size = writer.size + BUF_SIZE - * if writer.buf == BUFFER: # <<<<<<<<<<<<<< - * buf = PyMem_Malloc(size) - * if buf == NULL: - */ - __pyx_t_1 = ((__pyx_v_writer->buf == __pyx_v_7aiohttp_12_http_writer_BUFFER) != 0); - if (__pyx_t_1) { - - /* "aiohttp/_http_writer.pyx":43 - * size = writer.size + BUF_SIZE - * if writer.buf == BUFFER: - * buf = PyMem_Malloc(size) # <<<<<<<<<<<<<< - * if buf == NULL: - * PyErr_NoMemory() - */ - __pyx_v_buf = ((char *)PyMem_Malloc(__pyx_v_size)); - - /* "aiohttp/_http_writer.pyx":44 - * if writer.buf == BUFFER: - * buf = PyMem_Malloc(size) - * if buf == NULL: # <<<<<<<<<<<<<< - * PyErr_NoMemory() - * return -1 - */ - __pyx_t_1 = ((__pyx_v_buf == NULL) != 0); - if (__pyx_t_1) { - - /* "aiohttp/_http_writer.pyx":45 - * buf = PyMem_Malloc(size) - * if buf == NULL: - * PyErr_NoMemory() # <<<<<<<<<<<<<< - * return -1 - * memcpy(buf, writer.buf, writer.size) - */ - __pyx_t_2 = PyErr_NoMemory(); if (unlikely(__pyx_t_2 == ((PyObject *)NULL))) __PYX_ERR(0, 45, __pyx_L1_error) - - /* "aiohttp/_http_writer.pyx":46 - * if buf == NULL: - * PyErr_NoMemory() - * return -1 # <<<<<<<<<<<<<< - * memcpy(buf, writer.buf, writer.size) - * else: - */ - __pyx_r = -1; - goto __pyx_L0; - - /* "aiohttp/_http_writer.pyx":44 - * if writer.buf == BUFFER: - * buf = PyMem_Malloc(size) - * if buf == NULL: # <<<<<<<<<<<<<< - * PyErr_NoMemory() - * return -1 - */ - } - - /* "aiohttp/_http_writer.pyx":47 - * PyErr_NoMemory() - * return -1 - * memcpy(buf, writer.buf, writer.size) # <<<<<<<<<<<<<< - * else: - * buf = PyMem_Realloc(writer.buf, size) - */ - (void)(memcpy(__pyx_v_buf, __pyx_v_writer->buf, __pyx_v_writer->size)); - - /* "aiohttp/_http_writer.pyx":42 - * # reallocate - * size = writer.size + BUF_SIZE - * if writer.buf == BUFFER: # <<<<<<<<<<<<<< - * buf = PyMem_Malloc(size) - * if buf == NULL: - */ - goto __pyx_L4; - } - - /* "aiohttp/_http_writer.pyx":49 - * memcpy(buf, writer.buf, writer.size) - * else: - * buf = PyMem_Realloc(writer.buf, size) # <<<<<<<<<<<<<< - * if buf == NULL: - * PyErr_NoMemory() - */ - /*else*/ { - __pyx_v_buf = ((char *)PyMem_Realloc(__pyx_v_writer->buf, __pyx_v_size)); - - /* "aiohttp/_http_writer.pyx":50 - * else: - * buf = PyMem_Realloc(writer.buf, size) - * if buf == NULL: # <<<<<<<<<<<<<< - * PyErr_NoMemory() - * return -1 - */ - __pyx_t_1 = ((__pyx_v_buf == NULL) != 0); - if (__pyx_t_1) { - - /* "aiohttp/_http_writer.pyx":51 - * buf = PyMem_Realloc(writer.buf, size) - * if buf == NULL: - * PyErr_NoMemory() # <<<<<<<<<<<<<< - * return -1 - * writer.buf = buf - */ - __pyx_t_2 = PyErr_NoMemory(); if (unlikely(__pyx_t_2 == ((PyObject *)NULL))) __PYX_ERR(0, 51, __pyx_L1_error) - - /* "aiohttp/_http_writer.pyx":52 - * if buf == NULL: - * PyErr_NoMemory() - * return -1 # <<<<<<<<<<<<<< - * writer.buf = buf - * writer.size = size - */ - __pyx_r = -1; - goto __pyx_L0; - - /* "aiohttp/_http_writer.pyx":50 - * else: - * buf = PyMem_Realloc(writer.buf, size) - * if buf == NULL: # <<<<<<<<<<<<<< - * PyErr_NoMemory() - * return -1 - */ - } - } - __pyx_L4:; - - /* "aiohttp/_http_writer.pyx":53 - * PyErr_NoMemory() - * return -1 - * writer.buf = buf # <<<<<<<<<<<<<< - * writer.size = size - * writer.buf[writer.pos] = ch - */ - __pyx_v_writer->buf = __pyx_v_buf; - - /* "aiohttp/_http_writer.pyx":54 - * return -1 - * writer.buf = buf - * writer.size = size # <<<<<<<<<<<<<< - * writer.buf[writer.pos] = ch - * writer.pos += 1 - */ - __pyx_v_writer->size = __pyx_v_size; - - /* "aiohttp/_http_writer.pyx":39 - * cdef Py_ssize_t size - * - * if writer.pos == writer.size: # <<<<<<<<<<<<<< - * # reallocate - * size = writer.size + BUF_SIZE - */ - } - - /* "aiohttp/_http_writer.pyx":55 - * writer.buf = buf - * writer.size = size - * writer.buf[writer.pos] = ch # <<<<<<<<<<<<<< - * writer.pos += 1 - * return 0 - */ - (__pyx_v_writer->buf[__pyx_v_writer->pos]) = ((char)__pyx_v_ch); - - /* "aiohttp/_http_writer.pyx":56 - * writer.size = size - * writer.buf[writer.pos] = ch - * writer.pos += 1 # <<<<<<<<<<<<<< - * return 0 - * - */ - __pyx_v_writer->pos = (__pyx_v_writer->pos + 1); - - /* "aiohttp/_http_writer.pyx":57 - * writer.buf[writer.pos] = ch - * writer.pos += 1 - * return 0 # <<<<<<<<<<<<<< - * - * - */ - __pyx_r = 0; - goto __pyx_L0; - - /* "aiohttp/_http_writer.pyx":35 - * - * - * cdef inline int _write_byte(Writer* writer, uint8_t ch): # <<<<<<<<<<<<<< - * cdef char * buf - * cdef Py_ssize_t size - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_WriteUnraisable("aiohttp._http_writer._write_byte", __pyx_clineno, __pyx_lineno, __pyx_filename, 1, 0); - __pyx_r = 0; - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_writer.pyx":60 - * - * - * cdef inline int _write_utf8(Writer* writer, Py_UCS4 symbol): # <<<<<<<<<<<<<< - * cdef uint64_t utf = symbol - * - */ - -static CYTHON_INLINE int __pyx_f_7aiohttp_12_http_writer__write_utf8(struct __pyx_t_7aiohttp_12_http_writer_Writer *__pyx_v_writer, Py_UCS4 __pyx_v_symbol) { - uint64_t __pyx_v_utf; - int __pyx_r; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - int __pyx_t_2; - __Pyx_RefNannySetupContext("_write_utf8", 0); - - /* "aiohttp/_http_writer.pyx":61 - * - * cdef inline int _write_utf8(Writer* writer, Py_UCS4 symbol): - * cdef uint64_t utf = symbol # <<<<<<<<<<<<<< - * - * if utf < 0x80: - */ - __pyx_v_utf = ((uint64_t)__pyx_v_symbol); - - /* "aiohttp/_http_writer.pyx":63 - * cdef uint64_t utf = symbol - * - * if utf < 0x80: # <<<<<<<<<<<<<< - * return _write_byte(writer, utf) - * elif utf < 0x800: - */ - __pyx_t_1 = ((__pyx_v_utf < 0x80) != 0); - if (__pyx_t_1) { - - /* "aiohttp/_http_writer.pyx":64 - * - * if utf < 0x80: - * return _write_byte(writer, utf) # <<<<<<<<<<<<<< - * elif utf < 0x800: - * if _write_byte(writer, (0xc0 | (utf >> 6))) < 0: - */ - __pyx_r = __pyx_f_7aiohttp_12_http_writer__write_byte(__pyx_v_writer, ((uint8_t)__pyx_v_utf)); - goto __pyx_L0; - - /* "aiohttp/_http_writer.pyx":63 - * cdef uint64_t utf = symbol - * - * if utf < 0x80: # <<<<<<<<<<<<<< - * return _write_byte(writer, utf) - * elif utf < 0x800: - */ - } - - /* "aiohttp/_http_writer.pyx":65 - * if utf < 0x80: - * return _write_byte(writer, utf) - * elif utf < 0x800: # <<<<<<<<<<<<<< - * if _write_byte(writer, (0xc0 | (utf >> 6))) < 0: - * return -1 - */ - __pyx_t_1 = ((__pyx_v_utf < 0x800) != 0); - if (__pyx_t_1) { - - /* "aiohttp/_http_writer.pyx":66 - * return _write_byte(writer, utf) - * elif utf < 0x800: - * if _write_byte(writer, (0xc0 | (utf >> 6))) < 0: # <<<<<<<<<<<<<< - * return -1 - * return _write_byte(writer, (0x80 | (utf & 0x3f))) - */ - __pyx_t_1 = ((__pyx_f_7aiohttp_12_http_writer__write_byte(__pyx_v_writer, ((uint8_t)(0xc0 | (__pyx_v_utf >> 6)))) < 0) != 0); - if (__pyx_t_1) { - - /* "aiohttp/_http_writer.pyx":67 - * elif utf < 0x800: - * if _write_byte(writer, (0xc0 | (utf >> 6))) < 0: - * return -1 # <<<<<<<<<<<<<< - * return _write_byte(writer, (0x80 | (utf & 0x3f))) - * elif 0xD800 <= utf <= 0xDFFF: - */ - __pyx_r = -1; - goto __pyx_L0; - - /* "aiohttp/_http_writer.pyx":66 - * return _write_byte(writer, utf) - * elif utf < 0x800: - * if _write_byte(writer, (0xc0 | (utf >> 6))) < 0: # <<<<<<<<<<<<<< - * return -1 - * return _write_byte(writer, (0x80 | (utf & 0x3f))) - */ - } - - /* "aiohttp/_http_writer.pyx":68 - * if _write_byte(writer, (0xc0 | (utf >> 6))) < 0: - * return -1 - * return _write_byte(writer, (0x80 | (utf & 0x3f))) # <<<<<<<<<<<<<< - * elif 0xD800 <= utf <= 0xDFFF: - * # surogate pair, ignored - */ - __pyx_r = __pyx_f_7aiohttp_12_http_writer__write_byte(__pyx_v_writer, ((uint8_t)(0x80 | (__pyx_v_utf & 0x3f)))); - goto __pyx_L0; - - /* "aiohttp/_http_writer.pyx":65 - * if utf < 0x80: - * return _write_byte(writer, utf) - * elif utf < 0x800: # <<<<<<<<<<<<<< - * if _write_byte(writer, (0xc0 | (utf >> 6))) < 0: - * return -1 - */ - } - - /* "aiohttp/_http_writer.pyx":69 - * return -1 - * return _write_byte(writer, (0x80 | (utf & 0x3f))) - * elif 0xD800 <= utf <= 0xDFFF: # <<<<<<<<<<<<<< - * # surogate pair, ignored - * return 0 - */ - __pyx_t_1 = (0xD800 <= __pyx_v_utf); - if (__pyx_t_1) { - __pyx_t_1 = (__pyx_v_utf <= 0xDFFF); - } - __pyx_t_2 = (__pyx_t_1 != 0); - if (__pyx_t_2) { - - /* "aiohttp/_http_writer.pyx":71 - * elif 0xD800 <= utf <= 0xDFFF: - * # surogate pair, ignored - * return 0 # <<<<<<<<<<<<<< - * elif utf < 0x10000: - * if _write_byte(writer, (0xe0 | (utf >> 12))) < 0: - */ - __pyx_r = 0; - goto __pyx_L0; - - /* "aiohttp/_http_writer.pyx":69 - * return -1 - * return _write_byte(writer, (0x80 | (utf & 0x3f))) - * elif 0xD800 <= utf <= 0xDFFF: # <<<<<<<<<<<<<< - * # surogate pair, ignored - * return 0 - */ - } - - /* "aiohttp/_http_writer.pyx":72 - * # surogate pair, ignored - * return 0 - * elif utf < 0x10000: # <<<<<<<<<<<<<< - * if _write_byte(writer, (0xe0 | (utf >> 12))) < 0: - * return -1 - */ - __pyx_t_2 = ((__pyx_v_utf < 0x10000) != 0); - if (__pyx_t_2) { - - /* "aiohttp/_http_writer.pyx":73 - * return 0 - * elif utf < 0x10000: - * if _write_byte(writer, (0xe0 | (utf >> 12))) < 0: # <<<<<<<<<<<<<< - * return -1 - * if _write_byte(writer, (0x80 | ((utf >> 6) & 0x3f))) < 0: - */ - __pyx_t_2 = ((__pyx_f_7aiohttp_12_http_writer__write_byte(__pyx_v_writer, ((uint8_t)(0xe0 | (__pyx_v_utf >> 12)))) < 0) != 0); - if (__pyx_t_2) { - - /* "aiohttp/_http_writer.pyx":74 - * elif utf < 0x10000: - * if _write_byte(writer, (0xe0 | (utf >> 12))) < 0: - * return -1 # <<<<<<<<<<<<<< - * if _write_byte(writer, (0x80 | ((utf >> 6) & 0x3f))) < 0: - * return -1 - */ - __pyx_r = -1; - goto __pyx_L0; - - /* "aiohttp/_http_writer.pyx":73 - * return 0 - * elif utf < 0x10000: - * if _write_byte(writer, (0xe0 | (utf >> 12))) < 0: # <<<<<<<<<<<<<< - * return -1 - * if _write_byte(writer, (0x80 | ((utf >> 6) & 0x3f))) < 0: - */ - } - - /* "aiohttp/_http_writer.pyx":75 - * if _write_byte(writer, (0xe0 | (utf >> 12))) < 0: - * return -1 - * if _write_byte(writer, (0x80 | ((utf >> 6) & 0x3f))) < 0: # <<<<<<<<<<<<<< - * return -1 - * return _write_byte(writer, (0x80 | (utf & 0x3f))) - */ - __pyx_t_2 = ((__pyx_f_7aiohttp_12_http_writer__write_byte(__pyx_v_writer, ((uint8_t)(0x80 | ((__pyx_v_utf >> 6) & 0x3f)))) < 0) != 0); - if (__pyx_t_2) { - - /* "aiohttp/_http_writer.pyx":76 - * return -1 - * if _write_byte(writer, (0x80 | ((utf >> 6) & 0x3f))) < 0: - * return -1 # <<<<<<<<<<<<<< - * return _write_byte(writer, (0x80 | (utf & 0x3f))) - * elif utf > 0x10FFFF: - */ - __pyx_r = -1; - goto __pyx_L0; - - /* "aiohttp/_http_writer.pyx":75 - * if _write_byte(writer, (0xe0 | (utf >> 12))) < 0: - * return -1 - * if _write_byte(writer, (0x80 | ((utf >> 6) & 0x3f))) < 0: # <<<<<<<<<<<<<< - * return -1 - * return _write_byte(writer, (0x80 | (utf & 0x3f))) - */ - } - - /* "aiohttp/_http_writer.pyx":77 - * if _write_byte(writer, (0x80 | ((utf >> 6) & 0x3f))) < 0: - * return -1 - * return _write_byte(writer, (0x80 | (utf & 0x3f))) # <<<<<<<<<<<<<< - * elif utf > 0x10FFFF: - * # symbol is too large - */ - __pyx_r = __pyx_f_7aiohttp_12_http_writer__write_byte(__pyx_v_writer, ((uint8_t)(0x80 | (__pyx_v_utf & 0x3f)))); - goto __pyx_L0; - - /* "aiohttp/_http_writer.pyx":72 - * # surogate pair, ignored - * return 0 - * elif utf < 0x10000: # <<<<<<<<<<<<<< - * if _write_byte(writer, (0xe0 | (utf >> 12))) < 0: - * return -1 - */ - } - - /* "aiohttp/_http_writer.pyx":78 - * return -1 - * return _write_byte(writer, (0x80 | (utf & 0x3f))) - * elif utf > 0x10FFFF: # <<<<<<<<<<<<<< - * # symbol is too large - * return 0 - */ - __pyx_t_2 = ((__pyx_v_utf > 0x10FFFF) != 0); - if (__pyx_t_2) { - - /* "aiohttp/_http_writer.pyx":80 - * elif utf > 0x10FFFF: - * # symbol is too large - * return 0 # <<<<<<<<<<<<<< - * else: - * if _write_byte(writer, (0xf0 | (utf >> 18))) < 0: - */ - __pyx_r = 0; - goto __pyx_L0; - - /* "aiohttp/_http_writer.pyx":78 - * return -1 - * return _write_byte(writer, (0x80 | (utf & 0x3f))) - * elif utf > 0x10FFFF: # <<<<<<<<<<<<<< - * # symbol is too large - * return 0 - */ - } - - /* "aiohttp/_http_writer.pyx":82 - * return 0 - * else: - * if _write_byte(writer, (0xf0 | (utf >> 18))) < 0: # <<<<<<<<<<<<<< - * return -1 - * if _write_byte(writer, - */ - /*else*/ { - __pyx_t_2 = ((__pyx_f_7aiohttp_12_http_writer__write_byte(__pyx_v_writer, ((uint8_t)(0xf0 | (__pyx_v_utf >> 18)))) < 0) != 0); - if (__pyx_t_2) { - - /* "aiohttp/_http_writer.pyx":83 - * else: - * if _write_byte(writer, (0xf0 | (utf >> 18))) < 0: - * return -1 # <<<<<<<<<<<<<< - * if _write_byte(writer, - * (0x80 | ((utf >> 12) & 0x3f))) < 0: - */ - __pyx_r = -1; - goto __pyx_L0; - - /* "aiohttp/_http_writer.pyx":82 - * return 0 - * else: - * if _write_byte(writer, (0xf0 | (utf >> 18))) < 0: # <<<<<<<<<<<<<< - * return -1 - * if _write_byte(writer, - */ - } - - /* "aiohttp/_http_writer.pyx":85 - * return -1 - * if _write_byte(writer, - * (0x80 | ((utf >> 12) & 0x3f))) < 0: # <<<<<<<<<<<<<< - * return -1 - * if _write_byte(writer, - */ - __pyx_t_2 = ((__pyx_f_7aiohttp_12_http_writer__write_byte(__pyx_v_writer, ((uint8_t)(0x80 | ((__pyx_v_utf >> 12) & 0x3f)))) < 0) != 0); - - /* "aiohttp/_http_writer.pyx":84 - * if _write_byte(writer, (0xf0 | (utf >> 18))) < 0: - * return -1 - * if _write_byte(writer, # <<<<<<<<<<<<<< - * (0x80 | ((utf >> 12) & 0x3f))) < 0: - * return -1 - */ - if (__pyx_t_2) { - - /* "aiohttp/_http_writer.pyx":86 - * if _write_byte(writer, - * (0x80 | ((utf >> 12) & 0x3f))) < 0: - * return -1 # <<<<<<<<<<<<<< - * if _write_byte(writer, - * (0x80 | ((utf >> 6) & 0x3f))) < 0: - */ - __pyx_r = -1; - goto __pyx_L0; - - /* "aiohttp/_http_writer.pyx":84 - * if _write_byte(writer, (0xf0 | (utf >> 18))) < 0: - * return -1 - * if _write_byte(writer, # <<<<<<<<<<<<<< - * (0x80 | ((utf >> 12) & 0x3f))) < 0: - * return -1 - */ - } - - /* "aiohttp/_http_writer.pyx":88 - * return -1 - * if _write_byte(writer, - * (0x80 | ((utf >> 6) & 0x3f))) < 0: # <<<<<<<<<<<<<< - * return -1 - * return _write_byte(writer, (0x80 | (utf & 0x3f))) - */ - __pyx_t_2 = ((__pyx_f_7aiohttp_12_http_writer__write_byte(__pyx_v_writer, ((uint8_t)(0x80 | ((__pyx_v_utf >> 6) & 0x3f)))) < 0) != 0); - - /* "aiohttp/_http_writer.pyx":87 - * (0x80 | ((utf >> 12) & 0x3f))) < 0: - * return -1 - * if _write_byte(writer, # <<<<<<<<<<<<<< - * (0x80 | ((utf >> 6) & 0x3f))) < 0: - * return -1 - */ - if (__pyx_t_2) { - - /* "aiohttp/_http_writer.pyx":89 - * if _write_byte(writer, - * (0x80 | ((utf >> 6) & 0x3f))) < 0: - * return -1 # <<<<<<<<<<<<<< - * return _write_byte(writer, (0x80 | (utf & 0x3f))) - * - */ - __pyx_r = -1; - goto __pyx_L0; - - /* "aiohttp/_http_writer.pyx":87 - * (0x80 | ((utf >> 12) & 0x3f))) < 0: - * return -1 - * if _write_byte(writer, # <<<<<<<<<<<<<< - * (0x80 | ((utf >> 6) & 0x3f))) < 0: - * return -1 - */ - } - - /* "aiohttp/_http_writer.pyx":90 - * (0x80 | ((utf >> 6) & 0x3f))) < 0: - * return -1 - * return _write_byte(writer, (0x80 | (utf & 0x3f))) # <<<<<<<<<<<<<< - * - * - */ - __pyx_r = __pyx_f_7aiohttp_12_http_writer__write_byte(__pyx_v_writer, ((uint8_t)(0x80 | (__pyx_v_utf & 0x3f)))); - goto __pyx_L0; - } - - /* "aiohttp/_http_writer.pyx":60 - * - * - * cdef inline int _write_utf8(Writer* writer, Py_UCS4 symbol): # <<<<<<<<<<<<<< - * cdef uint64_t utf = symbol - * - */ - - /* function exit code */ - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_writer.pyx":93 - * - * - * cdef inline int _write_str(Writer* writer, str s): # <<<<<<<<<<<<<< - * cdef Py_UCS4 ch - * for ch in s: - */ - -static CYTHON_INLINE int __pyx_f_7aiohttp_12_http_writer__write_str(struct __pyx_t_7aiohttp_12_http_writer_Writer *__pyx_v_writer, PyObject *__pyx_v_s) { - Py_UCS4 __pyx_v_ch; - int __pyx_r; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - Py_ssize_t __pyx_t_2; - Py_ssize_t __pyx_t_3; - void *__pyx_t_4; - int __pyx_t_5; - int __pyx_t_6; - Py_ssize_t __pyx_t_7; - int __pyx_t_8; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("_write_str", 0); - - /* "aiohttp/_http_writer.pyx":95 - * cdef inline int _write_str(Writer* writer, str s): - * cdef Py_UCS4 ch - * for ch in s: # <<<<<<<<<<<<<< - * if _write_utf8(writer, ch) < 0: - * return -1 - */ - if (unlikely(__pyx_v_s == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' is not iterable"); - __PYX_ERR(0, 95, __pyx_L1_error) - } - __Pyx_INCREF(__pyx_v_s); - __pyx_t_1 = __pyx_v_s; - __pyx_t_6 = __Pyx_init_unicode_iteration(__pyx_t_1, (&__pyx_t_3), (&__pyx_t_4), (&__pyx_t_5)); if (unlikely(__pyx_t_6 == ((int)-1))) __PYX_ERR(0, 95, __pyx_L1_error) - for (__pyx_t_7 = 0; __pyx_t_7 < __pyx_t_3; __pyx_t_7++) { - __pyx_t_2 = __pyx_t_7; - __pyx_v_ch = __Pyx_PyUnicode_READ(__pyx_t_5, __pyx_t_4, __pyx_t_2); - - /* "aiohttp/_http_writer.pyx":96 - * cdef Py_UCS4 ch - * for ch in s: - * if _write_utf8(writer, ch) < 0: # <<<<<<<<<<<<<< - * return -1 - * - */ - __pyx_t_8 = ((__pyx_f_7aiohttp_12_http_writer__write_utf8(__pyx_v_writer, __pyx_v_ch) < 0) != 0); - if (__pyx_t_8) { - - /* "aiohttp/_http_writer.pyx":97 - * for ch in s: - * if _write_utf8(writer, ch) < 0: - * return -1 # <<<<<<<<<<<<<< - * - * - */ - __pyx_r = -1; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - goto __pyx_L0; - - /* "aiohttp/_http_writer.pyx":96 - * cdef Py_UCS4 ch - * for ch in s: - * if _write_utf8(writer, ch) < 0: # <<<<<<<<<<<<<< - * return -1 - * - */ - } - } - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_http_writer.pyx":93 - * - * - * cdef inline int _write_str(Writer* writer, str s): # <<<<<<<<<<<<<< - * cdef Py_UCS4 ch - * for ch in s: - */ - - /* function exit code */ - __pyx_r = 0; - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_WriteUnraisable("aiohttp._http_writer._write_str", __pyx_clineno, __pyx_lineno, __pyx_filename, 1, 0); - __pyx_r = 0; - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_writer.pyx":102 - * # --------------- _serialize_headers ---------------------- - * - * cdef str to_str(object s): # <<<<<<<<<<<<<< - * typ = type(s) - * if typ is str: - */ - -static PyObject *__pyx_f_7aiohttp_12_http_writer_to_str(PyObject *__pyx_v_s) { - PyTypeObject *__pyx_v_typ = NULL; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - PyObject *__pyx_t_5 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("to_str", 0); - - /* "aiohttp/_http_writer.pyx":103 - * - * cdef str to_str(object s): - * typ = type(s) # <<<<<<<<<<<<<< - * if typ is str: - * return s - */ - __Pyx_INCREF(((PyObject *)Py_TYPE(__pyx_v_s))); - __pyx_v_typ = ((PyTypeObject*)((PyObject *)Py_TYPE(__pyx_v_s))); - - /* "aiohttp/_http_writer.pyx":104 - * cdef str to_str(object s): - * typ = type(s) - * if typ is str: # <<<<<<<<<<<<<< - * return s - * elif typ is _istr: - */ - __pyx_t_1 = (__pyx_v_typ == (&PyUnicode_Type)); - __pyx_t_2 = (__pyx_t_1 != 0); - if (__pyx_t_2) { - - /* "aiohttp/_http_writer.pyx":105 - * typ = type(s) - * if typ is str: - * return s # <<<<<<<<<<<<<< - * elif typ is _istr: - * return PyObject_Str(s) - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(((PyObject*)__pyx_v_s)); - __pyx_r = ((PyObject*)__pyx_v_s); - goto __pyx_L0; - - /* "aiohttp/_http_writer.pyx":104 - * cdef str to_str(object s): - * typ = type(s) - * if typ is str: # <<<<<<<<<<<<<< - * return s - * elif typ is _istr: - */ - } - - /* "aiohttp/_http_writer.pyx":106 - * if typ is str: - * return s - * elif typ is _istr: # <<<<<<<<<<<<<< - * return PyObject_Str(s) - * elif not isinstance(s, str): - */ - __pyx_t_2 = (__pyx_v_typ == ((PyTypeObject*)__pyx_v_7aiohttp_12_http_writer__istr)); - __pyx_t_1 = (__pyx_t_2 != 0); - if (__pyx_t_1) { - - /* "aiohttp/_http_writer.pyx":107 - * return s - * elif typ is _istr: - * return PyObject_Str(s) # <<<<<<<<<<<<<< - * elif not isinstance(s, str): - * raise TypeError("Cannot serialize non-str key {!r}".format(s)) - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_3 = PyObject_Str(__pyx_v_s); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 107, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (!(likely(PyUnicode_CheckExact(__pyx_t_3))||((__pyx_t_3) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "unicode", Py_TYPE(__pyx_t_3)->tp_name), 0))) __PYX_ERR(0, 107, __pyx_L1_error) - __pyx_r = ((PyObject*)__pyx_t_3); - __pyx_t_3 = 0; - goto __pyx_L0; - - /* "aiohttp/_http_writer.pyx":106 - * if typ is str: - * return s - * elif typ is _istr: # <<<<<<<<<<<<<< - * return PyObject_Str(s) - * elif not isinstance(s, str): - */ - } - - /* "aiohttp/_http_writer.pyx":108 - * elif typ is _istr: - * return PyObject_Str(s) - * elif not isinstance(s, str): # <<<<<<<<<<<<<< - * raise TypeError("Cannot serialize non-str key {!r}".format(s)) - * else: - */ - __pyx_t_1 = PyUnicode_Check(__pyx_v_s); - __pyx_t_2 = ((!(__pyx_t_1 != 0)) != 0); - if (unlikely(__pyx_t_2)) { - - /* "aiohttp/_http_writer.pyx":109 - * return PyObject_Str(s) - * elif not isinstance(s, str): - * raise TypeError("Cannot serialize non-str key {!r}".format(s)) # <<<<<<<<<<<<<< - * else: - * return str(s) - */ - __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_kp_u_Cannot_serialize_non_str_key_r, __pyx_n_s_format); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 109, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_5 = NULL; - if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) { - __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_4); - if (likely(__pyx_t_5)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); - __Pyx_INCREF(__pyx_t_5); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_4, function); - } - } - __pyx_t_3 = (__pyx_t_5) ? __Pyx_PyObject_Call2Args(__pyx_t_4, __pyx_t_5, __pyx_v_s) : __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_v_s); - __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; - if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 109, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __pyx_t_4 = __Pyx_PyObject_CallOneArg(__pyx_builtin_TypeError, __pyx_t_3); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 109, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_Raise(__pyx_t_4, 0, 0, 0); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __PYX_ERR(0, 109, __pyx_L1_error) - - /* "aiohttp/_http_writer.pyx":108 - * elif typ is _istr: - * return PyObject_Str(s) - * elif not isinstance(s, str): # <<<<<<<<<<<<<< - * raise TypeError("Cannot serialize non-str key {!r}".format(s)) - * else: - */ - } - - /* "aiohttp/_http_writer.pyx":111 - * raise TypeError("Cannot serialize non-str key {!r}".format(s)) - * else: - * return str(s) # <<<<<<<<<<<<<< - * - * - */ - /*else*/ { - __Pyx_XDECREF(__pyx_r); - __pyx_t_4 = __Pyx_PyObject_CallOneArg(((PyObject *)(&PyUnicode_Type)), __pyx_v_s); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 111, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __pyx_r = ((PyObject*)__pyx_t_4); - __pyx_t_4 = 0; - goto __pyx_L0; - } - - /* "aiohttp/_http_writer.pyx":102 - * # --------------- _serialize_headers ---------------------- - * - * cdef str to_str(object s): # <<<<<<<<<<<<<< - * typ = type(s) - * if typ is str: - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_XDECREF(__pyx_t_5); - __Pyx_AddTraceback("aiohttp._http_writer.to_str", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v_typ); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "aiohttp/_http_writer.pyx":114 - * - * - * def _serialize_headers(str status_line, headers): # <<<<<<<<<<<<<< - * cdef Writer writer - * cdef object key - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_12_http_writer_1_serialize_headers(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ -static PyMethodDef __pyx_mdef_7aiohttp_12_http_writer_1_serialize_headers = {"_serialize_headers", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_7aiohttp_12_http_writer_1_serialize_headers, METH_VARARGS|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_7aiohttp_12_http_writer_1_serialize_headers(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { - PyObject *__pyx_v_status_line = 0; - PyObject *__pyx_v_headers = 0; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("_serialize_headers (wrapper)", 0); - { - static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_status_line,&__pyx_n_s_headers,0}; - PyObject* values[2] = {0,0}; - if (unlikely(__pyx_kwds)) { - Py_ssize_t kw_args; - const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); - switch (pos_args) { - case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); - CYTHON_FALLTHROUGH; - case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = PyDict_Size(__pyx_kwds); - switch (pos_args) { - case 0: - if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_status_line)) != 0)) kw_args--; - else goto __pyx_L5_argtuple_error; - CYTHON_FALLTHROUGH; - case 1: - if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_headers)) != 0)) kw_args--; - else { - __Pyx_RaiseArgtupleInvalid("_serialize_headers", 1, 2, 2, 1); __PYX_ERR(0, 114, __pyx_L3_error) - } - } - if (unlikely(kw_args > 0)) { - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "_serialize_headers") < 0)) __PYX_ERR(0, 114, __pyx_L3_error) - } - } else if (PyTuple_GET_SIZE(__pyx_args) != 2) { - goto __pyx_L5_argtuple_error; - } else { - values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - values[1] = PyTuple_GET_ITEM(__pyx_args, 1); - } - __pyx_v_status_line = ((PyObject*)values[0]); - __pyx_v_headers = values[1]; - } - goto __pyx_L4_argument_unpacking_done; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("_serialize_headers", 1, 2, 2, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 114, __pyx_L3_error) - __pyx_L3_error:; - __Pyx_AddTraceback("aiohttp._http_writer._serialize_headers", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_status_line), (&PyUnicode_Type), 1, "status_line", 1))) __PYX_ERR(0, 114, __pyx_L1_error) - __pyx_r = __pyx_pf_7aiohttp_12_http_writer__serialize_headers(__pyx_self, __pyx_v_status_line, __pyx_v_headers); - - /* function exit code */ - goto __pyx_L0; - __pyx_L1_error:; - __pyx_r = NULL; - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_12_http_writer__serialize_headers(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_status_line, PyObject *__pyx_v_headers) { - struct __pyx_t_7aiohttp_12_http_writer_Writer __pyx_v_writer; - PyObject *__pyx_v_key = 0; - PyObject *__pyx_v_val = 0; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - PyObject *__pyx_t_2 = NULL; - Py_ssize_t __pyx_t_3; - Py_ssize_t __pyx_t_4; - int __pyx_t_5; - PyObject *__pyx_t_6 = NULL; - PyObject *__pyx_t_7 = NULL; - int __pyx_t_8; - char const *__pyx_t_9; - PyObject *__pyx_t_10 = NULL; - PyObject *__pyx_t_11 = NULL; - PyObject *__pyx_t_12 = NULL; - PyObject *__pyx_t_13 = NULL; - PyObject *__pyx_t_14 = NULL; - PyObject *__pyx_t_15 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("_serialize_headers", 0); - - /* "aiohttp/_http_writer.pyx":120 - * cdef bytes ret - * - * _init_writer(&writer) # <<<<<<<<<<<<<< - * - * try: - */ - __pyx_f_7aiohttp_12_http_writer__init_writer((&__pyx_v_writer)); - - /* "aiohttp/_http_writer.pyx":122 - * _init_writer(&writer) - * - * try: # <<<<<<<<<<<<<< - * if _write_str(&writer, status_line) < 0: - * raise - */ - /*try:*/ { - - /* "aiohttp/_http_writer.pyx":123 - * - * try: - * if _write_str(&writer, status_line) < 0: # <<<<<<<<<<<<<< - * raise - * if _write_byte(&writer, b'\r') < 0: - */ - __pyx_t_1 = ((__pyx_f_7aiohttp_12_http_writer__write_str((&__pyx_v_writer), __pyx_v_status_line) < 0) != 0); - if (unlikely(__pyx_t_1)) { - - /* "aiohttp/_http_writer.pyx":124 - * try: - * if _write_str(&writer, status_line) < 0: - * raise # <<<<<<<<<<<<<< - * if _write_byte(&writer, b'\r') < 0: - * raise - */ - __Pyx_ReraiseException(); __PYX_ERR(0, 124, __pyx_L4_error) - - /* "aiohttp/_http_writer.pyx":123 - * - * try: - * if _write_str(&writer, status_line) < 0: # <<<<<<<<<<<<<< - * raise - * if _write_byte(&writer, b'\r') < 0: - */ - } - - /* "aiohttp/_http_writer.pyx":125 - * if _write_str(&writer, status_line) < 0: - * raise - * if _write_byte(&writer, b'\r') < 0: # <<<<<<<<<<<<<< - * raise - * if _write_byte(&writer, b'\n') < 0: - */ - __pyx_t_1 = ((__pyx_f_7aiohttp_12_http_writer__write_byte((&__pyx_v_writer), '\r') < 0) != 0); - if (unlikely(__pyx_t_1)) { - - /* "aiohttp/_http_writer.pyx":126 - * raise - * if _write_byte(&writer, b'\r') < 0: - * raise # <<<<<<<<<<<<<< - * if _write_byte(&writer, b'\n') < 0: - * raise - */ - __Pyx_ReraiseException(); __PYX_ERR(0, 126, __pyx_L4_error) - - /* "aiohttp/_http_writer.pyx":125 - * if _write_str(&writer, status_line) < 0: - * raise - * if _write_byte(&writer, b'\r') < 0: # <<<<<<<<<<<<<< - * raise - * if _write_byte(&writer, b'\n') < 0: - */ - } - - /* "aiohttp/_http_writer.pyx":127 - * if _write_byte(&writer, b'\r') < 0: - * raise - * if _write_byte(&writer, b'\n') < 0: # <<<<<<<<<<<<<< - * raise - * - */ - __pyx_t_1 = ((__pyx_f_7aiohttp_12_http_writer__write_byte((&__pyx_v_writer), '\n') < 0) != 0); - if (unlikely(__pyx_t_1)) { - - /* "aiohttp/_http_writer.pyx":128 - * raise - * if _write_byte(&writer, b'\n') < 0: - * raise # <<<<<<<<<<<<<< - * - * for key, val in headers.items(): - */ - __Pyx_ReraiseException(); __PYX_ERR(0, 128, __pyx_L4_error) - - /* "aiohttp/_http_writer.pyx":127 - * if _write_byte(&writer, b'\r') < 0: - * raise - * if _write_byte(&writer, b'\n') < 0: # <<<<<<<<<<<<<< - * raise - * - */ - } - - /* "aiohttp/_http_writer.pyx":130 - * raise - * - * for key, val in headers.items(): # <<<<<<<<<<<<<< - * if _write_str(&writer, to_str(key)) < 0: - * raise - */ - __pyx_t_3 = 0; - if (unlikely(__pyx_v_headers == Py_None)) { - PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "items"); - __PYX_ERR(0, 130, __pyx_L4_error) - } - __pyx_t_6 = __Pyx_dict_iterator(__pyx_v_headers, 0, __pyx_n_s_items, (&__pyx_t_4), (&__pyx_t_5)); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 130, __pyx_L4_error) - __Pyx_GOTREF(__pyx_t_6); - __Pyx_XDECREF(__pyx_t_2); - __pyx_t_2 = __pyx_t_6; - __pyx_t_6 = 0; - while (1) { - __pyx_t_8 = __Pyx_dict_iter_next(__pyx_t_2, __pyx_t_4, &__pyx_t_3, &__pyx_t_6, &__pyx_t_7, NULL, __pyx_t_5); - if (unlikely(__pyx_t_8 == 0)) break; - if (unlikely(__pyx_t_8 == -1)) __PYX_ERR(0, 130, __pyx_L4_error) - __Pyx_GOTREF(__pyx_t_6); - __Pyx_GOTREF(__pyx_t_7); - __Pyx_XDECREF_SET(__pyx_v_key, __pyx_t_6); - __pyx_t_6 = 0; - __Pyx_XDECREF_SET(__pyx_v_val, __pyx_t_7); - __pyx_t_7 = 0; - - /* "aiohttp/_http_writer.pyx":131 - * - * for key, val in headers.items(): - * if _write_str(&writer, to_str(key)) < 0: # <<<<<<<<<<<<<< - * raise - * if _write_byte(&writer, b':') < 0: - */ - __pyx_t_7 = __pyx_f_7aiohttp_12_http_writer_to_str(__pyx_v_key); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 131, __pyx_L4_error) - __Pyx_GOTREF(__pyx_t_7); - __pyx_t_1 = ((__pyx_f_7aiohttp_12_http_writer__write_str((&__pyx_v_writer), ((PyObject*)__pyx_t_7)) < 0) != 0); - __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - if (unlikely(__pyx_t_1)) { - - /* "aiohttp/_http_writer.pyx":132 - * for key, val in headers.items(): - * if _write_str(&writer, to_str(key)) < 0: - * raise # <<<<<<<<<<<<<< - * if _write_byte(&writer, b':') < 0: - * raise - */ - __Pyx_ReraiseException(); __PYX_ERR(0, 132, __pyx_L4_error) - - /* "aiohttp/_http_writer.pyx":131 - * - * for key, val in headers.items(): - * if _write_str(&writer, to_str(key)) < 0: # <<<<<<<<<<<<<< - * raise - * if _write_byte(&writer, b':') < 0: - */ - } - - /* "aiohttp/_http_writer.pyx":133 - * if _write_str(&writer, to_str(key)) < 0: - * raise - * if _write_byte(&writer, b':') < 0: # <<<<<<<<<<<<<< - * raise - * if _write_byte(&writer, b' ') < 0: - */ - __pyx_t_1 = ((__pyx_f_7aiohttp_12_http_writer__write_byte((&__pyx_v_writer), ':') < 0) != 0); - if (unlikely(__pyx_t_1)) { - - /* "aiohttp/_http_writer.pyx":134 - * raise - * if _write_byte(&writer, b':') < 0: - * raise # <<<<<<<<<<<<<< - * if _write_byte(&writer, b' ') < 0: - * raise - */ - __Pyx_ReraiseException(); __PYX_ERR(0, 134, __pyx_L4_error) - - /* "aiohttp/_http_writer.pyx":133 - * if _write_str(&writer, to_str(key)) < 0: - * raise - * if _write_byte(&writer, b':') < 0: # <<<<<<<<<<<<<< - * raise - * if _write_byte(&writer, b' ') < 0: - */ - } - - /* "aiohttp/_http_writer.pyx":135 - * if _write_byte(&writer, b':') < 0: - * raise - * if _write_byte(&writer, b' ') < 0: # <<<<<<<<<<<<<< - * raise - * if _write_str(&writer, to_str(val)) < 0: - */ - __pyx_t_1 = ((__pyx_f_7aiohttp_12_http_writer__write_byte((&__pyx_v_writer), ' ') < 0) != 0); - if (unlikely(__pyx_t_1)) { - - /* "aiohttp/_http_writer.pyx":136 - * raise - * if _write_byte(&writer, b' ') < 0: - * raise # <<<<<<<<<<<<<< - * if _write_str(&writer, to_str(val)) < 0: - * raise - */ - __Pyx_ReraiseException(); __PYX_ERR(0, 136, __pyx_L4_error) - - /* "aiohttp/_http_writer.pyx":135 - * if _write_byte(&writer, b':') < 0: - * raise - * if _write_byte(&writer, b' ') < 0: # <<<<<<<<<<<<<< - * raise - * if _write_str(&writer, to_str(val)) < 0: - */ - } - - /* "aiohttp/_http_writer.pyx":137 - * if _write_byte(&writer, b' ') < 0: - * raise - * if _write_str(&writer, to_str(val)) < 0: # <<<<<<<<<<<<<< - * raise - * if _write_byte(&writer, b'\r') < 0: - */ - __pyx_t_7 = __pyx_f_7aiohttp_12_http_writer_to_str(__pyx_v_val); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 137, __pyx_L4_error) - __Pyx_GOTREF(__pyx_t_7); - __pyx_t_1 = ((__pyx_f_7aiohttp_12_http_writer__write_str((&__pyx_v_writer), ((PyObject*)__pyx_t_7)) < 0) != 0); - __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - if (unlikely(__pyx_t_1)) { - - /* "aiohttp/_http_writer.pyx":138 - * raise - * if _write_str(&writer, to_str(val)) < 0: - * raise # <<<<<<<<<<<<<< - * if _write_byte(&writer, b'\r') < 0: - * raise - */ - __Pyx_ReraiseException(); __PYX_ERR(0, 138, __pyx_L4_error) - - /* "aiohttp/_http_writer.pyx":137 - * if _write_byte(&writer, b' ') < 0: - * raise - * if _write_str(&writer, to_str(val)) < 0: # <<<<<<<<<<<<<< - * raise - * if _write_byte(&writer, b'\r') < 0: - */ - } - - /* "aiohttp/_http_writer.pyx":139 - * if _write_str(&writer, to_str(val)) < 0: - * raise - * if _write_byte(&writer, b'\r') < 0: # <<<<<<<<<<<<<< - * raise - * if _write_byte(&writer, b'\n') < 0: - */ - __pyx_t_1 = ((__pyx_f_7aiohttp_12_http_writer__write_byte((&__pyx_v_writer), '\r') < 0) != 0); - if (unlikely(__pyx_t_1)) { - - /* "aiohttp/_http_writer.pyx":140 - * raise - * if _write_byte(&writer, b'\r') < 0: - * raise # <<<<<<<<<<<<<< - * if _write_byte(&writer, b'\n') < 0: - * raise - */ - __Pyx_ReraiseException(); __PYX_ERR(0, 140, __pyx_L4_error) - - /* "aiohttp/_http_writer.pyx":139 - * if _write_str(&writer, to_str(val)) < 0: - * raise - * if _write_byte(&writer, b'\r') < 0: # <<<<<<<<<<<<<< - * raise - * if _write_byte(&writer, b'\n') < 0: - */ - } - - /* "aiohttp/_http_writer.pyx":141 - * if _write_byte(&writer, b'\r') < 0: - * raise - * if _write_byte(&writer, b'\n') < 0: # <<<<<<<<<<<<<< - * raise - * - */ - __pyx_t_1 = ((__pyx_f_7aiohttp_12_http_writer__write_byte((&__pyx_v_writer), '\n') < 0) != 0); - if (unlikely(__pyx_t_1)) { - - /* "aiohttp/_http_writer.pyx":142 - * raise - * if _write_byte(&writer, b'\n') < 0: - * raise # <<<<<<<<<<<<<< - * - * if _write_byte(&writer, b'\r') < 0: - */ - __Pyx_ReraiseException(); __PYX_ERR(0, 142, __pyx_L4_error) - - /* "aiohttp/_http_writer.pyx":141 - * if _write_byte(&writer, b'\r') < 0: - * raise - * if _write_byte(&writer, b'\n') < 0: # <<<<<<<<<<<<<< - * raise - * - */ - } - } - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "aiohttp/_http_writer.pyx":144 - * raise - * - * if _write_byte(&writer, b'\r') < 0: # <<<<<<<<<<<<<< - * raise - * if _write_byte(&writer, b'\n') < 0: - */ - __pyx_t_1 = ((__pyx_f_7aiohttp_12_http_writer__write_byte((&__pyx_v_writer), '\r') < 0) != 0); - if (unlikely(__pyx_t_1)) { - - /* "aiohttp/_http_writer.pyx":145 - * - * if _write_byte(&writer, b'\r') < 0: - * raise # <<<<<<<<<<<<<< - * if _write_byte(&writer, b'\n') < 0: - * raise - */ - __Pyx_ReraiseException(); __PYX_ERR(0, 145, __pyx_L4_error) - - /* "aiohttp/_http_writer.pyx":144 - * raise - * - * if _write_byte(&writer, b'\r') < 0: # <<<<<<<<<<<<<< - * raise - * if _write_byte(&writer, b'\n') < 0: - */ - } - - /* "aiohttp/_http_writer.pyx":146 - * if _write_byte(&writer, b'\r') < 0: - * raise - * if _write_byte(&writer, b'\n') < 0: # <<<<<<<<<<<<<< - * raise - * - */ - __pyx_t_1 = ((__pyx_f_7aiohttp_12_http_writer__write_byte((&__pyx_v_writer), '\n') < 0) != 0); - if (unlikely(__pyx_t_1)) { - - /* "aiohttp/_http_writer.pyx":147 - * raise - * if _write_byte(&writer, b'\n') < 0: - * raise # <<<<<<<<<<<<<< - * - * return PyBytes_FromStringAndSize(writer.buf, writer.pos) - */ - __Pyx_ReraiseException(); __PYX_ERR(0, 147, __pyx_L4_error) - - /* "aiohttp/_http_writer.pyx":146 - * if _write_byte(&writer, b'\r') < 0: - * raise - * if _write_byte(&writer, b'\n') < 0: # <<<<<<<<<<<<<< - * raise - * - */ - } - - /* "aiohttp/_http_writer.pyx":149 - * raise - * - * return PyBytes_FromStringAndSize(writer.buf, writer.pos) # <<<<<<<<<<<<<< - * finally: - * _release_writer(&writer) - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_2 = PyBytes_FromStringAndSize(__pyx_v_writer.buf, __pyx_v_writer.pos); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 149, __pyx_L4_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_r = __pyx_t_2; - __pyx_t_2 = 0; - goto __pyx_L3_return; - } - - /* "aiohttp/_http_writer.pyx":151 - * return PyBytes_FromStringAndSize(writer.buf, writer.pos) - * finally: - * _release_writer(&writer) # <<<<<<<<<<<<<< - */ - /*finally:*/ { - __pyx_L4_error:; - /*exception exit:*/{ - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - __pyx_t_10 = 0; __pyx_t_11 = 0; __pyx_t_12 = 0; __pyx_t_13 = 0; __pyx_t_14 = 0; __pyx_t_15 = 0; - __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; - __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; - if (PY_MAJOR_VERSION >= 3) __Pyx_ExceptionSwap(&__pyx_t_13, &__pyx_t_14, &__pyx_t_15); - if ((PY_MAJOR_VERSION < 3) || unlikely(__Pyx_GetException(&__pyx_t_10, &__pyx_t_11, &__pyx_t_12) < 0)) __Pyx_ErrFetch(&__pyx_t_10, &__pyx_t_11, &__pyx_t_12); - __Pyx_XGOTREF(__pyx_t_10); - __Pyx_XGOTREF(__pyx_t_11); - __Pyx_XGOTREF(__pyx_t_12); - __Pyx_XGOTREF(__pyx_t_13); - __Pyx_XGOTREF(__pyx_t_14); - __Pyx_XGOTREF(__pyx_t_15); - __pyx_t_5 = __pyx_lineno; __pyx_t_8 = __pyx_clineno; __pyx_t_9 = __pyx_filename; - { - __pyx_f_7aiohttp_12_http_writer__release_writer((&__pyx_v_writer)); - } - if (PY_MAJOR_VERSION >= 3) { - __Pyx_XGIVEREF(__pyx_t_13); - __Pyx_XGIVEREF(__pyx_t_14); - __Pyx_XGIVEREF(__pyx_t_15); - __Pyx_ExceptionReset(__pyx_t_13, __pyx_t_14, __pyx_t_15); - } - __Pyx_XGIVEREF(__pyx_t_10); - __Pyx_XGIVEREF(__pyx_t_11); - __Pyx_XGIVEREF(__pyx_t_12); - __Pyx_ErrRestore(__pyx_t_10, __pyx_t_11, __pyx_t_12); - __pyx_t_10 = 0; __pyx_t_11 = 0; __pyx_t_12 = 0; __pyx_t_13 = 0; __pyx_t_14 = 0; __pyx_t_15 = 0; - __pyx_lineno = __pyx_t_5; __pyx_clineno = __pyx_t_8; __pyx_filename = __pyx_t_9; - goto __pyx_L1_error; - } - __pyx_L3_return: { - __pyx_t_15 = __pyx_r; - __pyx_r = 0; - __pyx_f_7aiohttp_12_http_writer__release_writer((&__pyx_v_writer)); - __pyx_r = __pyx_t_15; - __pyx_t_15 = 0; - goto __pyx_L0; - } - } - - /* "aiohttp/_http_writer.pyx":114 - * - * - * def _serialize_headers(str status_line, headers): # <<<<<<<<<<<<<< - * cdef Writer writer - * cdef object key - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_6); - __Pyx_XDECREF(__pyx_t_7); - __Pyx_AddTraceback("aiohttp._http_writer._serialize_headers", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v_key); - __Pyx_XDECREF(__pyx_v_val); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyMethodDef __pyx_methods[] = { - {0, 0, 0, 0} -}; - -#if PY_MAJOR_VERSION >= 3 -#if CYTHON_PEP489_MULTI_PHASE_INIT -static PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def); /*proto*/ -static int __pyx_pymod_exec__http_writer(PyObject* module); /*proto*/ -static PyModuleDef_Slot __pyx_moduledef_slots[] = { - {Py_mod_create, (void*)__pyx_pymod_create}, - {Py_mod_exec, (void*)__pyx_pymod_exec__http_writer}, - {0, NULL} -}; -#endif - -static struct PyModuleDef __pyx_moduledef = { - PyModuleDef_HEAD_INIT, - "_http_writer", - 0, /* m_doc */ - #if CYTHON_PEP489_MULTI_PHASE_INIT - 0, /* m_size */ - #else - -1, /* m_size */ - #endif - __pyx_methods /* m_methods */, - #if CYTHON_PEP489_MULTI_PHASE_INIT - __pyx_moduledef_slots, /* m_slots */ - #else - NULL, /* m_reload */ - #endif - NULL, /* m_traverse */ - NULL, /* m_clear */ - NULL /* m_free */ -}; -#endif -#ifndef CYTHON_SMALL_CODE -#if defined(__clang__) - #define CYTHON_SMALL_CODE -#elif defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3)) - #define CYTHON_SMALL_CODE __attribute__((cold)) -#else - #define CYTHON_SMALL_CODE -#endif -#endif - -static __Pyx_StringTabEntry __pyx_string_tab[] = { - {&__pyx_kp_u_Cannot_serialize_non_str_key_r, __pyx_k_Cannot_serialize_non_str_key_r, sizeof(__pyx_k_Cannot_serialize_non_str_key_r), 0, 1, 0, 0}, - {&__pyx_n_s_TypeError, __pyx_k_TypeError, sizeof(__pyx_k_TypeError), 0, 0, 1, 1}, - {&__pyx_n_s_aiohttp__http_writer, __pyx_k_aiohttp__http_writer, sizeof(__pyx_k_aiohttp__http_writer), 0, 0, 1, 1}, - {&__pyx_kp_s_aiohttp__http_writer_pyx, __pyx_k_aiohttp__http_writer_pyx, sizeof(__pyx_k_aiohttp__http_writer_pyx), 0, 0, 1, 0}, - {&__pyx_n_s_cline_in_traceback, __pyx_k_cline_in_traceback, sizeof(__pyx_k_cline_in_traceback), 0, 0, 1, 1}, - {&__pyx_n_s_format, __pyx_k_format, sizeof(__pyx_k_format), 0, 0, 1, 1}, - {&__pyx_n_s_headers, __pyx_k_headers, sizeof(__pyx_k_headers), 0, 0, 1, 1}, - {&__pyx_n_s_import, __pyx_k_import, sizeof(__pyx_k_import), 0, 0, 1, 1}, - {&__pyx_n_s_istr, __pyx_k_istr, sizeof(__pyx_k_istr), 0, 0, 1, 1}, - {&__pyx_n_s_items, __pyx_k_items, sizeof(__pyx_k_items), 0, 0, 1, 1}, - {&__pyx_n_s_key, __pyx_k_key, sizeof(__pyx_k_key), 0, 0, 1, 1}, - {&__pyx_n_s_main, __pyx_k_main, sizeof(__pyx_k_main), 0, 0, 1, 1}, - {&__pyx_n_s_multidict, __pyx_k_multidict, sizeof(__pyx_k_multidict), 0, 0, 1, 1}, - {&__pyx_n_s_name, __pyx_k_name, sizeof(__pyx_k_name), 0, 0, 1, 1}, - {&__pyx_n_s_ret, __pyx_k_ret, sizeof(__pyx_k_ret), 0, 0, 1, 1}, - {&__pyx_n_s_serialize_headers, __pyx_k_serialize_headers, sizeof(__pyx_k_serialize_headers), 0, 0, 1, 1}, - {&__pyx_n_s_status_line, __pyx_k_status_line, sizeof(__pyx_k_status_line), 0, 0, 1, 1}, - {&__pyx_n_s_test, __pyx_k_test, sizeof(__pyx_k_test), 0, 0, 1, 1}, - {&__pyx_n_s_val, __pyx_k_val, sizeof(__pyx_k_val), 0, 0, 1, 1}, - {&__pyx_n_s_writer, __pyx_k_writer, sizeof(__pyx_k_writer), 0, 0, 1, 1}, - {0, 0, 0, 0, 0, 0, 0} -}; -static CYTHON_SMALL_CODE int __Pyx_InitCachedBuiltins(void) { - __pyx_builtin_TypeError = __Pyx_GetBuiltinName(__pyx_n_s_TypeError); if (!__pyx_builtin_TypeError) __PYX_ERR(0, 109, __pyx_L1_error) - return 0; - __pyx_L1_error:; - return -1; -} - -static CYTHON_SMALL_CODE int __Pyx_InitCachedConstants(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0); - - /* "aiohttp/_http_writer.pyx":114 - * - * - * def _serialize_headers(str status_line, headers): # <<<<<<<<<<<<<< - * cdef Writer writer - * cdef object key - */ - __pyx_tuple_ = PyTuple_Pack(6, __pyx_n_s_status_line, __pyx_n_s_headers, __pyx_n_s_writer, __pyx_n_s_key, __pyx_n_s_val, __pyx_n_s_ret); if (unlikely(!__pyx_tuple_)) __PYX_ERR(0, 114, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple_); - __Pyx_GIVEREF(__pyx_tuple_); - __pyx_codeobj__2 = (PyObject*)__Pyx_PyCode_New(2, 0, 6, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple_, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_aiohttp__http_writer_pyx, __pyx_n_s_serialize_headers, 114, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__2)) __PYX_ERR(0, 114, __pyx_L1_error) - __Pyx_RefNannyFinishContext(); - return 0; - __pyx_L1_error:; - __Pyx_RefNannyFinishContext(); - return -1; -} - -static CYTHON_SMALL_CODE int __Pyx_InitGlobals(void) { - if (__Pyx_InitStrings(__pyx_string_tab) < 0) __PYX_ERR(0, 1, __pyx_L1_error); - return 0; - __pyx_L1_error:; - return -1; -} - -static CYTHON_SMALL_CODE int __Pyx_modinit_global_init_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_variable_export_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_function_export_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_type_init_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_type_import_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_variable_import_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_function_import_code(void); /*proto*/ - -static int __Pyx_modinit_global_init_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_global_init_code", 0); - /*--- Global init code ---*/ - __pyx_v_7aiohttp_12_http_writer__istr = Py_None; Py_INCREF(Py_None); - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_variable_export_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_variable_export_code", 0); - /*--- Variable export code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_function_export_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_function_export_code", 0); - /*--- Function export code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_type_init_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_type_init_code", 0); - /*--- Type init code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_type_import_code(void) { - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__Pyx_modinit_type_import_code", 0); - /*--- Type import code ---*/ - __pyx_t_1 = PyImport_ImportModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 9, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_ptype_7cpython_4type_type = __Pyx_ImportType(__pyx_t_1, __Pyx_BUILTIN_MODULE_NAME, "type", - #if defined(PYPY_VERSION_NUM) && PYPY_VERSION_NUM < 0x050B0000 - sizeof(PyTypeObject), - #else - sizeof(PyHeapTypeObject), - #endif - __Pyx_ImportType_CheckSize_Warn); - if (!__pyx_ptype_7cpython_4type_type) __PYX_ERR(1, 9, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_RefNannyFinishContext(); - return 0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_RefNannyFinishContext(); - return -1; -} - -static int __Pyx_modinit_variable_import_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_variable_import_code", 0); - /*--- Variable import code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_function_import_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_function_import_code", 0); - /*--- Function import code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - - -#ifndef CYTHON_NO_PYINIT_EXPORT -#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC -#elif PY_MAJOR_VERSION < 3 -#ifdef __cplusplus -#define __Pyx_PyMODINIT_FUNC extern "C" void -#else -#define __Pyx_PyMODINIT_FUNC void -#endif -#else -#ifdef __cplusplus -#define __Pyx_PyMODINIT_FUNC extern "C" PyObject * -#else -#define __Pyx_PyMODINIT_FUNC PyObject * -#endif -#endif - - -#if PY_MAJOR_VERSION < 3 -__Pyx_PyMODINIT_FUNC init_http_writer(void) CYTHON_SMALL_CODE; /*proto*/ -__Pyx_PyMODINIT_FUNC init_http_writer(void) -#else -__Pyx_PyMODINIT_FUNC PyInit__http_writer(void) CYTHON_SMALL_CODE; /*proto*/ -__Pyx_PyMODINIT_FUNC PyInit__http_writer(void) -#if CYTHON_PEP489_MULTI_PHASE_INIT -{ - return PyModuleDef_Init(&__pyx_moduledef); -} -static CYTHON_SMALL_CODE int __Pyx_check_single_interpreter(void) { - #if PY_VERSION_HEX >= 0x030700A1 - static PY_INT64_T main_interpreter_id = -1; - PY_INT64_T current_id = PyInterpreterState_GetID(PyThreadState_Get()->interp); - if (main_interpreter_id == -1) { - main_interpreter_id = current_id; - return (unlikely(current_id == -1)) ? -1 : 0; - } else if (unlikely(main_interpreter_id != current_id)) - #else - static PyInterpreterState *main_interpreter = NULL; - PyInterpreterState *current_interpreter = PyThreadState_Get()->interp; - if (!main_interpreter) { - main_interpreter = current_interpreter; - } else if (unlikely(main_interpreter != current_interpreter)) - #endif - { - PyErr_SetString( - PyExc_ImportError, - "Interpreter change detected - this module can only be loaded into one interpreter per process."); - return -1; - } - return 0; -} -static CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *moddict, const char* from_name, const char* to_name, int allow_none) { - PyObject *value = PyObject_GetAttrString(spec, from_name); - int result = 0; - if (likely(value)) { - if (allow_none || value != Py_None) { - result = PyDict_SetItemString(moddict, to_name, value); - } - Py_DECREF(value); - } else if (PyErr_ExceptionMatches(PyExc_AttributeError)) { - PyErr_Clear(); - } else { - result = -1; - } - return result; -} -static CYTHON_SMALL_CODE PyObject* __pyx_pymod_create(PyObject *spec, CYTHON_UNUSED PyModuleDef *def) { - PyObject *module = NULL, *moddict, *modname; - if (__Pyx_check_single_interpreter()) - return NULL; - if (__pyx_m) - return __Pyx_NewRef(__pyx_m); - modname = PyObject_GetAttrString(spec, "name"); - if (unlikely(!modname)) goto bad; - module = PyModule_NewObject(modname); - Py_DECREF(modname); - if (unlikely(!module)) goto bad; - moddict = PyModule_GetDict(module); - if (unlikely(!moddict)) goto bad; - if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "loader", "__loader__", 1) < 0)) goto bad; - if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "origin", "__file__", 1) < 0)) goto bad; - if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "parent", "__package__", 1) < 0)) goto bad; - if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "submodule_search_locations", "__path__", 0) < 0)) goto bad; - return module; -bad: - Py_XDECREF(module); - return NULL; -} - - -static CYTHON_SMALL_CODE int __pyx_pymod_exec__http_writer(PyObject *__pyx_pyinit_module) -#endif -#endif -{ - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannyDeclarations - #if CYTHON_PEP489_MULTI_PHASE_INIT - if (__pyx_m) { - if (__pyx_m == __pyx_pyinit_module) return 0; - PyErr_SetString(PyExc_RuntimeError, "Module '_http_writer' has already been imported. Re-initialisation is not supported."); - return -1; - } - #elif PY_MAJOR_VERSION >= 3 - if (__pyx_m) return __Pyx_NewRef(__pyx_m); - #endif - #if CYTHON_REFNANNY -__Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny"); -if (!__Pyx_RefNanny) { - PyErr_Clear(); - __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny"); - if (!__Pyx_RefNanny) - Py_FatalError("failed to import 'refnanny' module"); -} -#endif - __Pyx_RefNannySetupContext("__Pyx_PyMODINIT_FUNC PyInit__http_writer(void)", 0); - if (__Pyx_check_binary_version() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #ifdef __Pxy_PyFrame_Initialize_Offsets - __Pxy_PyFrame_Initialize_Offsets(); - #endif - __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) __PYX_ERR(0, 1, __pyx_L1_error) - __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) __PYX_ERR(0, 1, __pyx_L1_error) - __pyx_empty_unicode = PyUnicode_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_unicode)) __PYX_ERR(0, 1, __pyx_L1_error) - #ifdef __Pyx_CyFunction_USED - if (__pyx_CyFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_FusedFunction_USED - if (__pyx_FusedFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_Coroutine_USED - if (__pyx_Coroutine_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_Generator_USED - if (__pyx_Generator_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_AsyncGen_USED - if (__pyx_AsyncGen_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_StopAsyncIteration_USED - if (__pyx_StopAsyncIteration_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - /*--- Library function declarations ---*/ - /*--- Threads initialization code ---*/ - #if defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS - #ifdef WITH_THREAD /* Python build with threading support? */ - PyEval_InitThreads(); - #endif - #endif - /*--- Module creation code ---*/ - #if CYTHON_PEP489_MULTI_PHASE_INIT - __pyx_m = __pyx_pyinit_module; - Py_INCREF(__pyx_m); - #else - #if PY_MAJOR_VERSION < 3 - __pyx_m = Py_InitModule4("_http_writer", __pyx_methods, 0, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m); - #else - __pyx_m = PyModule_Create(&__pyx_moduledef); - #endif - if (unlikely(!__pyx_m)) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - __pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(0, 1, __pyx_L1_error) - Py_INCREF(__pyx_d); - __pyx_b = PyImport_AddModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(0, 1, __pyx_L1_error) - Py_INCREF(__pyx_b); - __pyx_cython_runtime = PyImport_AddModule((char *) "cython_runtime"); if (unlikely(!__pyx_cython_runtime)) __PYX_ERR(0, 1, __pyx_L1_error) - Py_INCREF(__pyx_cython_runtime); - if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(0, 1, __pyx_L1_error); - /*--- Initialize various global constants etc. ---*/ - if (__Pyx_InitGlobals() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #if PY_MAJOR_VERSION < 3 && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT) - if (__Pyx_init_sys_getdefaultencoding_params() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - if (__pyx_module_is_main_aiohttp___http_writer) { - if (PyObject_SetAttr(__pyx_m, __pyx_n_s_name, __pyx_n_s_main) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - } - #if PY_MAJOR_VERSION >= 3 - { - PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) __PYX_ERR(0, 1, __pyx_L1_error) - if (!PyDict_GetItemString(modules, "aiohttp._http_writer")) { - if (unlikely(PyDict_SetItemString(modules, "aiohttp._http_writer", __pyx_m) < 0)) __PYX_ERR(0, 1, __pyx_L1_error) - } - } - #endif - /*--- Builtin init code ---*/ - if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - /*--- Constants init code ---*/ - if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - /*--- Global type/function init code ---*/ - (void)__Pyx_modinit_global_init_code(); - (void)__Pyx_modinit_variable_export_code(); - (void)__Pyx_modinit_function_export_code(); - (void)__Pyx_modinit_type_init_code(); - if (unlikely(__Pyx_modinit_type_import_code() < 0)) __PYX_ERR(0, 1, __pyx_L1_error) - (void)__Pyx_modinit_variable_import_code(); - (void)__Pyx_modinit_function_import_code(); - /*--- Execution code ---*/ - #if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) - if (__Pyx_patch_abc() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - - /* "aiohttp/_http_writer.pyx":8 - * from libc.string cimport memcpy - * - * from multidict import istr # <<<<<<<<<<<<<< - * - * DEF BUF_SIZE = 16 * 1024 # 16KiB - */ - __pyx_t_1 = PyList_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 8, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF(__pyx_n_s_istr); - __Pyx_GIVEREF(__pyx_n_s_istr); - PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_istr); - __pyx_t_2 = __Pyx_Import(__pyx_n_s_multidict, __pyx_t_1, 0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 8, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_istr); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 8, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_istr, __pyx_t_1) < 0) __PYX_ERR(0, 8, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "aiohttp/_http_writer.pyx":13 - * cdef char BUFFER[BUF_SIZE] - * - * cdef object _istr = istr # <<<<<<<<<<<<<< - * - * - */ - __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_istr); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_XGOTREF(__pyx_v_7aiohttp_12_http_writer__istr); - __Pyx_DECREF_SET(__pyx_v_7aiohttp_12_http_writer__istr, __pyx_t_2); - __Pyx_GIVEREF(__pyx_t_2); - __pyx_t_2 = 0; - - /* "aiohttp/_http_writer.pyx":114 - * - * - * def _serialize_headers(str status_line, headers): # <<<<<<<<<<<<<< - * cdef Writer writer - * cdef object key - */ - __pyx_t_2 = PyCFunction_NewEx(&__pyx_mdef_7aiohttp_12_http_writer_1_serialize_headers, NULL, __pyx_n_s_aiohttp__http_writer); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 114, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_serialize_headers, __pyx_t_2) < 0) __PYX_ERR(0, 114, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "aiohttp/_http_writer.pyx":1 - * from cpython.bytes cimport PyBytes_FromStringAndSize # <<<<<<<<<<<<<< - * from cpython.exc cimport PyErr_NoMemory - * from cpython.mem cimport PyMem_Free, PyMem_Malloc, PyMem_Realloc - */ - __pyx_t_2 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_2) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /*--- Wrapped vars code ---*/ - - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - if (__pyx_m) { - if (__pyx_d) { - __Pyx_AddTraceback("init aiohttp._http_writer", __pyx_clineno, __pyx_lineno, __pyx_filename); - } - Py_CLEAR(__pyx_m); - } else if (!PyErr_Occurred()) { - PyErr_SetString(PyExc_ImportError, "init aiohttp._http_writer"); - } - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - #if CYTHON_PEP489_MULTI_PHASE_INIT - return (__pyx_m != NULL) ? 0 : -1; - #elif PY_MAJOR_VERSION >= 3 - return __pyx_m; - #else - return; - #endif -} - -/* --- Runtime support code --- */ -/* Refnanny */ -#if CYTHON_REFNANNY -static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) { - PyObject *m = NULL, *p = NULL; - void *r = NULL; - m = PyImport_ImportModule(modname); - if (!m) goto end; - p = PyObject_GetAttrString(m, "RefNannyAPI"); - if (!p) goto end; - r = PyLong_AsVoidPtr(p); -end: - Py_XDECREF(p); - Py_XDECREF(m); - return (__Pyx_RefNannyAPIStruct *)r; -} -#endif - -/* PyObjectGetAttrStr */ -#if CYTHON_USE_TYPE_SLOTS -static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name) { - PyTypeObject* tp = Py_TYPE(obj); - if (likely(tp->tp_getattro)) - return tp->tp_getattro(obj, attr_name); -#if PY_MAJOR_VERSION < 3 - if (likely(tp->tp_getattr)) - return tp->tp_getattr(obj, PyString_AS_STRING(attr_name)); -#endif - return PyObject_GetAttr(obj, attr_name); -} -#endif - -/* GetBuiltinName */ -static PyObject *__Pyx_GetBuiltinName(PyObject *name) { - PyObject* result = __Pyx_PyObject_GetAttrStr(__pyx_b, name); - if (unlikely(!result)) { - PyErr_Format(PyExc_NameError, -#if PY_MAJOR_VERSION >= 3 - "name '%U' is not defined", name); -#else - "name '%.200s' is not defined", PyString_AS_STRING(name)); -#endif - } - return result; -} - -/* PyErrFetchRestore */ -#if CYTHON_FAST_THREAD_STATE -static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { - PyObject *tmp_type, *tmp_value, *tmp_tb; - tmp_type = tstate->curexc_type; - tmp_value = tstate->curexc_value; - tmp_tb = tstate->curexc_traceback; - tstate->curexc_type = type; - tstate->curexc_value = value; - tstate->curexc_traceback = tb; - Py_XDECREF(tmp_type); - Py_XDECREF(tmp_value); - Py_XDECREF(tmp_tb); -} -static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { - *type = tstate->curexc_type; - *value = tstate->curexc_value; - *tb = tstate->curexc_traceback; - tstate->curexc_type = 0; - tstate->curexc_value = 0; - tstate->curexc_traceback = 0; -} -#endif - -/* WriteUnraisableException */ -static void __Pyx_WriteUnraisable(const char *name, CYTHON_UNUSED int clineno, - CYTHON_UNUSED int lineno, CYTHON_UNUSED const char *filename, - int full_traceback, CYTHON_UNUSED int nogil) { - PyObject *old_exc, *old_val, *old_tb; - PyObject *ctx; - __Pyx_PyThreadState_declare -#ifdef WITH_THREAD - PyGILState_STATE state; - if (nogil) - state = PyGILState_Ensure(); -#ifdef _MSC_VER - else state = (PyGILState_STATE)-1; -#endif -#endif - __Pyx_PyThreadState_assign - __Pyx_ErrFetch(&old_exc, &old_val, &old_tb); - if (full_traceback) { - Py_XINCREF(old_exc); - Py_XINCREF(old_val); - Py_XINCREF(old_tb); - __Pyx_ErrRestore(old_exc, old_val, old_tb); - PyErr_PrintEx(1); - } - #if PY_MAJOR_VERSION < 3 - ctx = PyString_FromString(name); - #else - ctx = PyUnicode_FromString(name); - #endif - __Pyx_ErrRestore(old_exc, old_val, old_tb); - if (!ctx) { - PyErr_WriteUnraisable(Py_None); - } else { - PyErr_WriteUnraisable(ctx); - Py_DECREF(ctx); - } -#ifdef WITH_THREAD - if (nogil) - PyGILState_Release(state); -#endif -} - -/* unicode_iter */ -static CYTHON_INLINE int __Pyx_init_unicode_iteration( - PyObject* ustring, Py_ssize_t *length, void** data, int *kind) { -#if CYTHON_PEP393_ENABLED - if (unlikely(__Pyx_PyUnicode_READY(ustring) < 0)) return -1; - *kind = PyUnicode_KIND(ustring); - *length = PyUnicode_GET_LENGTH(ustring); - *data = PyUnicode_DATA(ustring); -#else - *kind = 0; - *length = PyUnicode_GET_SIZE(ustring); - *data = (void*)PyUnicode_AS_UNICODE(ustring); -#endif - return 0; -} - -/* PyCFunctionFastCall */ -#if CYTHON_FAST_PYCCALL -static CYTHON_INLINE PyObject * __Pyx_PyCFunction_FastCall(PyObject *func_obj, PyObject **args, Py_ssize_t nargs) { - PyCFunctionObject *func = (PyCFunctionObject*)func_obj; - PyCFunction meth = PyCFunction_GET_FUNCTION(func); - PyObject *self = PyCFunction_GET_SELF(func); - int flags = PyCFunction_GET_FLAGS(func); - assert(PyCFunction_Check(func)); - assert(METH_FASTCALL == (flags & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS | METH_STACKLESS))); - assert(nargs >= 0); - assert(nargs == 0 || args != NULL); - /* _PyCFunction_FastCallDict() must not be called with an exception set, - because it may clear it (directly or indirectly) and so the - caller loses its exception */ - assert(!PyErr_Occurred()); - if ((PY_VERSION_HEX < 0x030700A0) || unlikely(flags & METH_KEYWORDS)) { - return (*((__Pyx_PyCFunctionFastWithKeywords)(void*)meth)) (self, args, nargs, NULL); - } else { - return (*((__Pyx_PyCFunctionFast)(void*)meth)) (self, args, nargs); - } -} -#endif - -/* PyFunctionFastCall */ -#if CYTHON_FAST_PYCALL -static PyObject* __Pyx_PyFunction_FastCallNoKw(PyCodeObject *co, PyObject **args, Py_ssize_t na, - PyObject *globals) { - PyFrameObject *f; - PyThreadState *tstate = __Pyx_PyThreadState_Current; - PyObject **fastlocals; - Py_ssize_t i; - PyObject *result; - assert(globals != NULL); - /* XXX Perhaps we should create a specialized - PyFrame_New() that doesn't take locals, but does - take builtins without sanity checking them. - */ - assert(tstate != NULL); - f = PyFrame_New(tstate, co, globals, NULL); - if (f == NULL) { - return NULL; - } - fastlocals = __Pyx_PyFrame_GetLocalsplus(f); - for (i = 0; i < na; i++) { - Py_INCREF(*args); - fastlocals[i] = *args++; - } - result = PyEval_EvalFrameEx(f,0); - ++tstate->recursion_depth; - Py_DECREF(f); - --tstate->recursion_depth; - return result; -} -#if 1 || PY_VERSION_HEX < 0x030600B1 -static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs) { - PyCodeObject *co = (PyCodeObject *)PyFunction_GET_CODE(func); - PyObject *globals = PyFunction_GET_GLOBALS(func); - PyObject *argdefs = PyFunction_GET_DEFAULTS(func); - PyObject *closure; -#if PY_MAJOR_VERSION >= 3 - PyObject *kwdefs; -#endif - PyObject *kwtuple, **k; - PyObject **d; - Py_ssize_t nd; - Py_ssize_t nk; - PyObject *result; - assert(kwargs == NULL || PyDict_Check(kwargs)); - nk = kwargs ? PyDict_Size(kwargs) : 0; - if (Py_EnterRecursiveCall((char*)" while calling a Python object")) { - return NULL; - } - if ( -#if PY_MAJOR_VERSION >= 3 - co->co_kwonlyargcount == 0 && -#endif - likely(kwargs == NULL || nk == 0) && - co->co_flags == (CO_OPTIMIZED | CO_NEWLOCALS | CO_NOFREE)) { - if (argdefs == NULL && co->co_argcount == nargs) { - result = __Pyx_PyFunction_FastCallNoKw(co, args, nargs, globals); - goto done; - } - else if (nargs == 0 && argdefs != NULL - && co->co_argcount == Py_SIZE(argdefs)) { - /* function called with no arguments, but all parameters have - a default value: use default values as arguments .*/ - args = &PyTuple_GET_ITEM(argdefs, 0); - result =__Pyx_PyFunction_FastCallNoKw(co, args, Py_SIZE(argdefs), globals); - goto done; - } - } - if (kwargs != NULL) { - Py_ssize_t pos, i; - kwtuple = PyTuple_New(2 * nk); - if (kwtuple == NULL) { - result = NULL; - goto done; - } - k = &PyTuple_GET_ITEM(kwtuple, 0); - pos = i = 0; - while (PyDict_Next(kwargs, &pos, &k[i], &k[i+1])) { - Py_INCREF(k[i]); - Py_INCREF(k[i+1]); - i += 2; - } - nk = i / 2; - } - else { - kwtuple = NULL; - k = NULL; - } - closure = PyFunction_GET_CLOSURE(func); -#if PY_MAJOR_VERSION >= 3 - kwdefs = PyFunction_GET_KW_DEFAULTS(func); -#endif - if (argdefs != NULL) { - d = &PyTuple_GET_ITEM(argdefs, 0); - nd = Py_SIZE(argdefs); - } - else { - d = NULL; - nd = 0; - } -#if PY_MAJOR_VERSION >= 3 - result = PyEval_EvalCodeEx((PyObject*)co, globals, (PyObject *)NULL, - args, (int)nargs, - k, (int)nk, - d, (int)nd, kwdefs, closure); -#else - result = PyEval_EvalCodeEx(co, globals, (PyObject *)NULL, - args, (int)nargs, - k, (int)nk, - d, (int)nd, closure); -#endif - Py_XDECREF(kwtuple); -done: - Py_LeaveRecursiveCall(); - return result; -} -#endif -#endif - -/* PyObjectCall */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) { - PyObject *result; - ternaryfunc call = func->ob_type->tp_call; - if (unlikely(!call)) - return PyObject_Call(func, arg, kw); - if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) - return NULL; - result = (*call)(func, arg, kw); - Py_LeaveRecursiveCall(); - if (unlikely(!result) && unlikely(!PyErr_Occurred())) { - PyErr_SetString( - PyExc_SystemError, - "NULL result without error in PyObject_Call"); - } - return result; -} -#endif - -/* PyObjectCall2Args */ -static CYTHON_UNUSED PyObject* __Pyx_PyObject_Call2Args(PyObject* function, PyObject* arg1, PyObject* arg2) { - PyObject *args, *result = NULL; - #if CYTHON_FAST_PYCALL - if (PyFunction_Check(function)) { - PyObject *args[2] = {arg1, arg2}; - return __Pyx_PyFunction_FastCall(function, args, 2); - } - #endif - #if CYTHON_FAST_PYCCALL - if (__Pyx_PyFastCFunction_Check(function)) { - PyObject *args[2] = {arg1, arg2}; - return __Pyx_PyCFunction_FastCall(function, args, 2); - } - #endif - args = PyTuple_New(2); - if (unlikely(!args)) goto done; - Py_INCREF(arg1); - PyTuple_SET_ITEM(args, 0, arg1); - Py_INCREF(arg2); - PyTuple_SET_ITEM(args, 1, arg2); - Py_INCREF(function); - result = __Pyx_PyObject_Call(function, args, NULL); - Py_DECREF(args); - Py_DECREF(function); -done: - return result; -} - -/* PyObjectCallMethO */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg) { - PyObject *self, *result; - PyCFunction cfunc; - cfunc = PyCFunction_GET_FUNCTION(func); - self = PyCFunction_GET_SELF(func); - if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) - return NULL; - result = cfunc(self, arg); - Py_LeaveRecursiveCall(); - if (unlikely(!result) && unlikely(!PyErr_Occurred())) { - PyErr_SetString( - PyExc_SystemError, - "NULL result without error in PyObject_Call"); - } - return result; -} -#endif - -/* PyObjectCallOneArg */ -#if CYTHON_COMPILING_IN_CPYTHON -static PyObject* __Pyx__PyObject_CallOneArg(PyObject *func, PyObject *arg) { - PyObject *result; - PyObject *args = PyTuple_New(1); - if (unlikely(!args)) return NULL; - Py_INCREF(arg); - PyTuple_SET_ITEM(args, 0, arg); - result = __Pyx_PyObject_Call(func, args, NULL); - Py_DECREF(args); - return result; -} -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { -#if CYTHON_FAST_PYCALL - if (PyFunction_Check(func)) { - return __Pyx_PyFunction_FastCall(func, &arg, 1); - } -#endif - if (likely(PyCFunction_Check(func))) { - if (likely(PyCFunction_GET_FLAGS(func) & METH_O)) { - return __Pyx_PyObject_CallMethO(func, arg); -#if CYTHON_FAST_PYCCALL - } else if (PyCFunction_GET_FLAGS(func) & METH_FASTCALL) { - return __Pyx_PyCFunction_FastCall(func, &arg, 1); -#endif - } - } - return __Pyx__PyObject_CallOneArg(func, arg); -} -#else -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { - PyObject *result; - PyObject *args = PyTuple_Pack(1, arg); - if (unlikely(!args)) return NULL; - result = __Pyx_PyObject_Call(func, args, NULL); - Py_DECREF(args); - return result; -} -#endif - -/* RaiseException */ -#if PY_MAJOR_VERSION < 3 -static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, - CYTHON_UNUSED PyObject *cause) { - __Pyx_PyThreadState_declare - Py_XINCREF(type); - if (!value || value == Py_None) - value = NULL; - else - Py_INCREF(value); - if (!tb || tb == Py_None) - tb = NULL; - else { - Py_INCREF(tb); - if (!PyTraceBack_Check(tb)) { - PyErr_SetString(PyExc_TypeError, - "raise: arg 3 must be a traceback or None"); - goto raise_error; - } - } - if (PyType_Check(type)) { -#if CYTHON_COMPILING_IN_PYPY - if (!value) { - Py_INCREF(Py_None); - value = Py_None; - } -#endif - PyErr_NormalizeException(&type, &value, &tb); - } else { - if (value) { - PyErr_SetString(PyExc_TypeError, - "instance exception may not have a separate value"); - goto raise_error; - } - value = type; - type = (PyObject*) Py_TYPE(type); - Py_INCREF(type); - if (!PyType_IsSubtype((PyTypeObject *)type, (PyTypeObject *)PyExc_BaseException)) { - PyErr_SetString(PyExc_TypeError, - "raise: exception class must be a subclass of BaseException"); - goto raise_error; - } - } - __Pyx_PyThreadState_assign - __Pyx_ErrRestore(type, value, tb); - return; -raise_error: - Py_XDECREF(value); - Py_XDECREF(type); - Py_XDECREF(tb); - return; -} -#else -static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) { - PyObject* owned_instance = NULL; - if (tb == Py_None) { - tb = 0; - } else if (tb && !PyTraceBack_Check(tb)) { - PyErr_SetString(PyExc_TypeError, - "raise: arg 3 must be a traceback or None"); - goto bad; - } - if (value == Py_None) - value = 0; - if (PyExceptionInstance_Check(type)) { - if (value) { - PyErr_SetString(PyExc_TypeError, - "instance exception may not have a separate value"); - goto bad; - } - value = type; - type = (PyObject*) Py_TYPE(value); - } else if (PyExceptionClass_Check(type)) { - PyObject *instance_class = NULL; - if (value && PyExceptionInstance_Check(value)) { - instance_class = (PyObject*) Py_TYPE(value); - if (instance_class != type) { - int is_subclass = PyObject_IsSubclass(instance_class, type); - if (!is_subclass) { - instance_class = NULL; - } else if (unlikely(is_subclass == -1)) { - goto bad; - } else { - type = instance_class; - } - } - } - if (!instance_class) { - PyObject *args; - if (!value) - args = PyTuple_New(0); - else if (PyTuple_Check(value)) { - Py_INCREF(value); - args = value; - } else - args = PyTuple_Pack(1, value); - if (!args) - goto bad; - owned_instance = PyObject_Call(type, args, NULL); - Py_DECREF(args); - if (!owned_instance) - goto bad; - value = owned_instance; - if (!PyExceptionInstance_Check(value)) { - PyErr_Format(PyExc_TypeError, - "calling %R should have returned an instance of " - "BaseException, not %R", - type, Py_TYPE(value)); - goto bad; - } - } - } else { - PyErr_SetString(PyExc_TypeError, - "raise: exception class must be a subclass of BaseException"); - goto bad; - } - if (cause) { - PyObject *fixed_cause; - if (cause == Py_None) { - fixed_cause = NULL; - } else if (PyExceptionClass_Check(cause)) { - fixed_cause = PyObject_CallObject(cause, NULL); - if (fixed_cause == NULL) - goto bad; - } else if (PyExceptionInstance_Check(cause)) { - fixed_cause = cause; - Py_INCREF(fixed_cause); - } else { - PyErr_SetString(PyExc_TypeError, - "exception causes must derive from " - "BaseException"); - goto bad; - } - PyException_SetCause(value, fixed_cause); - } - PyErr_SetObject(type, value); - if (tb) { -#if CYTHON_COMPILING_IN_PYPY - PyObject *tmp_type, *tmp_value, *tmp_tb; - PyErr_Fetch(&tmp_type, &tmp_value, &tmp_tb); - Py_INCREF(tb); - PyErr_Restore(tmp_type, tmp_value, tb); - Py_XDECREF(tmp_tb); -#else - PyThreadState *tstate = __Pyx_PyThreadState_Current; - PyObject* tmp_tb = tstate->curexc_traceback; - if (tb != tmp_tb) { - Py_INCREF(tb); - tstate->curexc_traceback = tb; - Py_XDECREF(tmp_tb); - } -#endif - } -bad: - Py_XDECREF(owned_instance); - return; -} -#endif - -/* RaiseArgTupleInvalid */ -static void __Pyx_RaiseArgtupleInvalid( - const char* func_name, - int exact, - Py_ssize_t num_min, - Py_ssize_t num_max, - Py_ssize_t num_found) -{ - Py_ssize_t num_expected; - const char *more_or_less; - if (num_found < num_min) { - num_expected = num_min; - more_or_less = "at least"; - } else { - num_expected = num_max; - more_or_less = "at most"; - } - if (exact) { - more_or_less = "exactly"; - } - PyErr_Format(PyExc_TypeError, - "%.200s() takes %.8s %" CYTHON_FORMAT_SSIZE_T "d positional argument%.1s (%" CYTHON_FORMAT_SSIZE_T "d given)", - func_name, more_or_less, num_expected, - (num_expected == 1) ? "" : "s", num_found); -} - -/* RaiseDoubleKeywords */ -static void __Pyx_RaiseDoubleKeywordsError( - const char* func_name, - PyObject* kw_name) -{ - PyErr_Format(PyExc_TypeError, - #if PY_MAJOR_VERSION >= 3 - "%s() got multiple values for keyword argument '%U'", func_name, kw_name); - #else - "%s() got multiple values for keyword argument '%s'", func_name, - PyString_AsString(kw_name)); - #endif -} - -/* ParseKeywords */ -static int __Pyx_ParseOptionalKeywords( - PyObject *kwds, - PyObject **argnames[], - PyObject *kwds2, - PyObject *values[], - Py_ssize_t num_pos_args, - const char* function_name) -{ - PyObject *key = 0, *value = 0; - Py_ssize_t pos = 0; - PyObject*** name; - PyObject*** first_kw_arg = argnames + num_pos_args; - while (PyDict_Next(kwds, &pos, &key, &value)) { - name = first_kw_arg; - while (*name && (**name != key)) name++; - if (*name) { - values[name-argnames] = value; - continue; - } - name = first_kw_arg; - #if PY_MAJOR_VERSION < 3 - if (likely(PyString_Check(key))) { - while (*name) { - if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key)) - && _PyString_Eq(**name, key)) { - values[name-argnames] = value; - break; - } - name++; - } - if (*name) continue; - else { - PyObject*** argname = argnames; - while (argname != first_kw_arg) { - if ((**argname == key) || ( - (CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**argname) == PyString_GET_SIZE(key)) - && _PyString_Eq(**argname, key))) { - goto arg_passed_twice; - } - argname++; - } - } - } else - #endif - if (likely(PyUnicode_Check(key))) { - while (*name) { - int cmp = (**name == key) ? 0 : - #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 - (__Pyx_PyUnicode_GET_LENGTH(**name) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 : - #endif - PyUnicode_Compare(**name, key); - if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; - if (cmp == 0) { - values[name-argnames] = value; - break; - } - name++; - } - if (*name) continue; - else { - PyObject*** argname = argnames; - while (argname != first_kw_arg) { - int cmp = (**argname == key) ? 0 : - #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 - (__Pyx_PyUnicode_GET_LENGTH(**argname) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 : - #endif - PyUnicode_Compare(**argname, key); - if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; - if (cmp == 0) goto arg_passed_twice; - argname++; - } - } - } else - goto invalid_keyword_type; - if (kwds2) { - if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad; - } else { - goto invalid_keyword; - } - } - return 0; -arg_passed_twice: - __Pyx_RaiseDoubleKeywordsError(function_name, key); - goto bad; -invalid_keyword_type: - PyErr_Format(PyExc_TypeError, - "%.200s() keywords must be strings", function_name); - goto bad; -invalid_keyword: - PyErr_Format(PyExc_TypeError, - #if PY_MAJOR_VERSION < 3 - "%.200s() got an unexpected keyword argument '%.200s'", - function_name, PyString_AsString(key)); - #else - "%s() got an unexpected keyword argument '%U'", - function_name, key); - #endif -bad: - return -1; -} - -/* ArgTypeTest */ -static int __Pyx__ArgTypeTest(PyObject *obj, PyTypeObject *type, const char *name, int exact) -{ - if (unlikely(!type)) { - PyErr_SetString(PyExc_SystemError, "Missing type object"); - return 0; - } - else if (exact) { - #if PY_MAJOR_VERSION == 2 - if ((type == &PyBaseString_Type) && likely(__Pyx_PyBaseString_CheckExact(obj))) return 1; - #endif - } - else { - if (likely(__Pyx_TypeCheck(obj, type))) return 1; - } - PyErr_Format(PyExc_TypeError, - "Argument '%.200s' has incorrect type (expected %.200s, got %.200s)", - name, type->tp_name, Py_TYPE(obj)->tp_name); - return 0; -} - -/* GetTopmostException */ -#if CYTHON_USE_EXC_INFO_STACK -static _PyErr_StackItem * -__Pyx_PyErr_GetTopmostException(PyThreadState *tstate) -{ - _PyErr_StackItem *exc_info = tstate->exc_info; - while ((exc_info->exc_type == NULL || exc_info->exc_type == Py_None) && - exc_info->previous_item != NULL) - { - exc_info = exc_info->previous_item; - } - return exc_info; -} -#endif - -/* ReRaiseException */ -static CYTHON_INLINE void __Pyx_ReraiseException(void) { - PyObject *type = NULL, *value = NULL, *tb = NULL; -#if CYTHON_FAST_THREAD_STATE - PyThreadState *tstate = PyThreadState_GET(); - #if CYTHON_USE_EXC_INFO_STACK - _PyErr_StackItem *exc_info = __Pyx_PyErr_GetTopmostException(tstate); - type = exc_info->exc_type; - value = exc_info->exc_value; - tb = exc_info->exc_traceback; - #else - type = tstate->exc_type; - value = tstate->exc_value; - tb = tstate->exc_traceback; - #endif -#else - PyErr_GetExcInfo(&type, &value, &tb); -#endif - if (!type || type == Py_None) { -#if !CYTHON_FAST_THREAD_STATE - Py_XDECREF(type); - Py_XDECREF(value); - Py_XDECREF(tb); -#endif - PyErr_SetString(PyExc_RuntimeError, - "No active exception to reraise"); - } else { -#if CYTHON_FAST_THREAD_STATE - Py_INCREF(type); - Py_XINCREF(value); - Py_XINCREF(tb); -#endif - PyErr_Restore(type, value, tb); - } -} - -/* IterFinish */ -static CYTHON_INLINE int __Pyx_IterFinish(void) { -#if CYTHON_FAST_THREAD_STATE - PyThreadState *tstate = __Pyx_PyThreadState_Current; - PyObject* exc_type = tstate->curexc_type; - if (unlikely(exc_type)) { - if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) { - PyObject *exc_value, *exc_tb; - exc_value = tstate->curexc_value; - exc_tb = tstate->curexc_traceback; - tstate->curexc_type = 0; - tstate->curexc_value = 0; - tstate->curexc_traceback = 0; - Py_DECREF(exc_type); - Py_XDECREF(exc_value); - Py_XDECREF(exc_tb); - return 0; - } else { - return -1; - } - } - return 0; -#else - if (unlikely(PyErr_Occurred())) { - if (likely(PyErr_ExceptionMatches(PyExc_StopIteration))) { - PyErr_Clear(); - return 0; - } else { - return -1; - } - } - return 0; -#endif -} - -/* PyObjectCallNoArg */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func) { -#if CYTHON_FAST_PYCALL - if (PyFunction_Check(func)) { - return __Pyx_PyFunction_FastCall(func, NULL, 0); - } -#endif -#ifdef __Pyx_CyFunction_USED - if (likely(PyCFunction_Check(func) || __Pyx_CyFunction_Check(func))) -#else - if (likely(PyCFunction_Check(func))) -#endif - { - if (likely(PyCFunction_GET_FLAGS(func) & METH_NOARGS)) { - return __Pyx_PyObject_CallMethO(func, NULL); - } - } - return __Pyx_PyObject_Call(func, __pyx_empty_tuple, NULL); -} -#endif - -/* PyObjectGetMethod */ -static int __Pyx_PyObject_GetMethod(PyObject *obj, PyObject *name, PyObject **method) { - PyObject *attr; -#if CYTHON_UNPACK_METHODS && CYTHON_COMPILING_IN_CPYTHON && CYTHON_USE_PYTYPE_LOOKUP - PyTypeObject *tp = Py_TYPE(obj); - PyObject *descr; - descrgetfunc f = NULL; - PyObject **dictptr, *dict; - int meth_found = 0; - assert (*method == NULL); - if (unlikely(tp->tp_getattro != PyObject_GenericGetAttr)) { - attr = __Pyx_PyObject_GetAttrStr(obj, name); - goto try_unpack; - } - if (unlikely(tp->tp_dict == NULL) && unlikely(PyType_Ready(tp) < 0)) { - return 0; - } - descr = _PyType_Lookup(tp, name); - if (likely(descr != NULL)) { - Py_INCREF(descr); -#if PY_MAJOR_VERSION >= 3 - #ifdef __Pyx_CyFunction_USED - if (likely(PyFunction_Check(descr) || (Py_TYPE(descr) == &PyMethodDescr_Type) || __Pyx_CyFunction_Check(descr))) - #else - if (likely(PyFunction_Check(descr) || (Py_TYPE(descr) == &PyMethodDescr_Type))) - #endif -#else - #ifdef __Pyx_CyFunction_USED - if (likely(PyFunction_Check(descr) || __Pyx_CyFunction_Check(descr))) - #else - if (likely(PyFunction_Check(descr))) - #endif -#endif - { - meth_found = 1; - } else { - f = Py_TYPE(descr)->tp_descr_get; - if (f != NULL && PyDescr_IsData(descr)) { - attr = f(descr, obj, (PyObject *)Py_TYPE(obj)); - Py_DECREF(descr); - goto try_unpack; - } - } - } - dictptr = _PyObject_GetDictPtr(obj); - if (dictptr != NULL && (dict = *dictptr) != NULL) { - Py_INCREF(dict); - attr = __Pyx_PyDict_GetItemStr(dict, name); - if (attr != NULL) { - Py_INCREF(attr); - Py_DECREF(dict); - Py_XDECREF(descr); - goto try_unpack; - } - Py_DECREF(dict); - } - if (meth_found) { - *method = descr; - return 1; - } - if (f != NULL) { - attr = f(descr, obj, (PyObject *)Py_TYPE(obj)); - Py_DECREF(descr); - goto try_unpack; - } - if (descr != NULL) { - *method = descr; - return 0; - } - PyErr_Format(PyExc_AttributeError, -#if PY_MAJOR_VERSION >= 3 - "'%.50s' object has no attribute '%U'", - tp->tp_name, name); -#else - "'%.50s' object has no attribute '%.400s'", - tp->tp_name, PyString_AS_STRING(name)); -#endif - return 0; -#else - attr = __Pyx_PyObject_GetAttrStr(obj, name); - goto try_unpack; -#endif -try_unpack: -#if CYTHON_UNPACK_METHODS - if (likely(attr) && PyMethod_Check(attr) && likely(PyMethod_GET_SELF(attr) == obj)) { - PyObject *function = PyMethod_GET_FUNCTION(attr); - Py_INCREF(function); - Py_DECREF(attr); - *method = function; - return 1; - } -#endif - *method = attr; - return 0; -} - -/* PyObjectCallMethod0 */ -static PyObject* __Pyx_PyObject_CallMethod0(PyObject* obj, PyObject* method_name) { - PyObject *method = NULL, *result = NULL; - int is_method = __Pyx_PyObject_GetMethod(obj, method_name, &method); - if (likely(is_method)) { - result = __Pyx_PyObject_CallOneArg(method, obj); - Py_DECREF(method); - return result; - } - if (unlikely(!method)) goto bad; - result = __Pyx_PyObject_CallNoArg(method); - Py_DECREF(method); -bad: - return result; -} - -/* RaiseNeedMoreValuesToUnpack */ -static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index) { - PyErr_Format(PyExc_ValueError, - "need more than %" CYTHON_FORMAT_SSIZE_T "d value%.1s to unpack", - index, (index == 1) ? "" : "s"); -} - -/* RaiseTooManyValuesToUnpack */ -static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected) { - PyErr_Format(PyExc_ValueError, - "too many values to unpack (expected %" CYTHON_FORMAT_SSIZE_T "d)", expected); -} - -/* UnpackItemEndCheck */ -static int __Pyx_IternextUnpackEndCheck(PyObject *retval, Py_ssize_t expected) { - if (unlikely(retval)) { - Py_DECREF(retval); - __Pyx_RaiseTooManyValuesError(expected); - return -1; - } else { - return __Pyx_IterFinish(); - } - return 0; -} - -/* RaiseNoneIterError */ -static CYTHON_INLINE void __Pyx_RaiseNoneNotIterableError(void) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); -} - -/* UnpackTupleError */ -static void __Pyx_UnpackTupleError(PyObject *t, Py_ssize_t index) { - if (t == Py_None) { - __Pyx_RaiseNoneNotIterableError(); - } else if (PyTuple_GET_SIZE(t) < index) { - __Pyx_RaiseNeedMoreValuesError(PyTuple_GET_SIZE(t)); - } else { - __Pyx_RaiseTooManyValuesError(index); - } -} - -/* UnpackTuple2 */ -static CYTHON_INLINE int __Pyx_unpack_tuple2_exact( - PyObject* tuple, PyObject** pvalue1, PyObject** pvalue2, int decref_tuple) { - PyObject *value1 = NULL, *value2 = NULL; -#if CYTHON_COMPILING_IN_PYPY - value1 = PySequence_ITEM(tuple, 0); if (unlikely(!value1)) goto bad; - value2 = PySequence_ITEM(tuple, 1); if (unlikely(!value2)) goto bad; -#else - value1 = PyTuple_GET_ITEM(tuple, 0); Py_INCREF(value1); - value2 = PyTuple_GET_ITEM(tuple, 1); Py_INCREF(value2); -#endif - if (decref_tuple) { - Py_DECREF(tuple); - } - *pvalue1 = value1; - *pvalue2 = value2; - return 0; -#if CYTHON_COMPILING_IN_PYPY -bad: - Py_XDECREF(value1); - Py_XDECREF(value2); - if (decref_tuple) { Py_XDECREF(tuple); } - return -1; -#endif -} -static int __Pyx_unpack_tuple2_generic(PyObject* tuple, PyObject** pvalue1, PyObject** pvalue2, - int has_known_size, int decref_tuple) { - Py_ssize_t index; - PyObject *value1 = NULL, *value2 = NULL, *iter = NULL; - iternextfunc iternext; - iter = PyObject_GetIter(tuple); - if (unlikely(!iter)) goto bad; - if (decref_tuple) { Py_DECREF(tuple); tuple = NULL; } - iternext = Py_TYPE(iter)->tp_iternext; - value1 = iternext(iter); if (unlikely(!value1)) { index = 0; goto unpacking_failed; } - value2 = iternext(iter); if (unlikely(!value2)) { index = 1; goto unpacking_failed; } - if (!has_known_size && unlikely(__Pyx_IternextUnpackEndCheck(iternext(iter), 2))) goto bad; - Py_DECREF(iter); - *pvalue1 = value1; - *pvalue2 = value2; - return 0; -unpacking_failed: - if (!has_known_size && __Pyx_IterFinish() == 0) - __Pyx_RaiseNeedMoreValuesError(index); -bad: - Py_XDECREF(iter); - Py_XDECREF(value1); - Py_XDECREF(value2); - if (decref_tuple) { Py_XDECREF(tuple); } - return -1; -} - -/* dict_iter */ -static CYTHON_INLINE PyObject* __Pyx_dict_iterator(PyObject* iterable, int is_dict, PyObject* method_name, - Py_ssize_t* p_orig_length, int* p_source_is_dict) { - is_dict = is_dict || likely(PyDict_CheckExact(iterable)); - *p_source_is_dict = is_dict; - if (is_dict) { -#if !CYTHON_COMPILING_IN_PYPY - *p_orig_length = PyDict_Size(iterable); - Py_INCREF(iterable); - return iterable; -#elif PY_MAJOR_VERSION >= 3 - static PyObject *py_items = NULL, *py_keys = NULL, *py_values = NULL; - PyObject **pp = NULL; - if (method_name) { - const char *name = PyUnicode_AsUTF8(method_name); - if (strcmp(name, "iteritems") == 0) pp = &py_items; - else if (strcmp(name, "iterkeys") == 0) pp = &py_keys; - else if (strcmp(name, "itervalues") == 0) pp = &py_values; - if (pp) { - if (!*pp) { - *pp = PyUnicode_FromString(name + 4); - if (!*pp) - return NULL; - } - method_name = *pp; - } - } -#endif - } - *p_orig_length = 0; - if (method_name) { - PyObject* iter; - iterable = __Pyx_PyObject_CallMethod0(iterable, method_name); - if (!iterable) - return NULL; -#if !CYTHON_COMPILING_IN_PYPY - if (PyTuple_CheckExact(iterable) || PyList_CheckExact(iterable)) - return iterable; -#endif - iter = PyObject_GetIter(iterable); - Py_DECREF(iterable); - return iter; - } - return PyObject_GetIter(iterable); -} -static CYTHON_INLINE int __Pyx_dict_iter_next( - PyObject* iter_obj, CYTHON_NCP_UNUSED Py_ssize_t orig_length, CYTHON_NCP_UNUSED Py_ssize_t* ppos, - PyObject** pkey, PyObject** pvalue, PyObject** pitem, int source_is_dict) { - PyObject* next_item; -#if !CYTHON_COMPILING_IN_PYPY - if (source_is_dict) { - PyObject *key, *value; - if (unlikely(orig_length != PyDict_Size(iter_obj))) { - PyErr_SetString(PyExc_RuntimeError, "dictionary changed size during iteration"); - return -1; - } - if (unlikely(!PyDict_Next(iter_obj, ppos, &key, &value))) { - return 0; - } - if (pitem) { - PyObject* tuple = PyTuple_New(2); - if (unlikely(!tuple)) { - return -1; - } - Py_INCREF(key); - Py_INCREF(value); - PyTuple_SET_ITEM(tuple, 0, key); - PyTuple_SET_ITEM(tuple, 1, value); - *pitem = tuple; - } else { - if (pkey) { - Py_INCREF(key); - *pkey = key; - } - if (pvalue) { - Py_INCREF(value); - *pvalue = value; - } - } - return 1; - } else if (PyTuple_CheckExact(iter_obj)) { - Py_ssize_t pos = *ppos; - if (unlikely(pos >= PyTuple_GET_SIZE(iter_obj))) return 0; - *ppos = pos + 1; - next_item = PyTuple_GET_ITEM(iter_obj, pos); - Py_INCREF(next_item); - } else if (PyList_CheckExact(iter_obj)) { - Py_ssize_t pos = *ppos; - if (unlikely(pos >= PyList_GET_SIZE(iter_obj))) return 0; - *ppos = pos + 1; - next_item = PyList_GET_ITEM(iter_obj, pos); - Py_INCREF(next_item); - } else -#endif - { - next_item = PyIter_Next(iter_obj); - if (unlikely(!next_item)) { - return __Pyx_IterFinish(); - } - } - if (pitem) { - *pitem = next_item; - } else if (pkey && pvalue) { - if (__Pyx_unpack_tuple2(next_item, pkey, pvalue, source_is_dict, source_is_dict, 1)) - return -1; - } else if (pkey) { - *pkey = next_item; - } else { - *pvalue = next_item; - } - return 1; -} - -/* GetException */ -#if CYTHON_FAST_THREAD_STATE -static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) -#else -static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb) -#endif -{ - PyObject *local_type, *local_value, *local_tb; -#if CYTHON_FAST_THREAD_STATE - PyObject *tmp_type, *tmp_value, *tmp_tb; - local_type = tstate->curexc_type; - local_value = tstate->curexc_value; - local_tb = tstate->curexc_traceback; - tstate->curexc_type = 0; - tstate->curexc_value = 0; - tstate->curexc_traceback = 0; -#else - PyErr_Fetch(&local_type, &local_value, &local_tb); -#endif - PyErr_NormalizeException(&local_type, &local_value, &local_tb); -#if CYTHON_FAST_THREAD_STATE - if (unlikely(tstate->curexc_type)) -#else - if (unlikely(PyErr_Occurred())) -#endif - goto bad; - #if PY_MAJOR_VERSION >= 3 - if (local_tb) { - if (unlikely(PyException_SetTraceback(local_value, local_tb) < 0)) - goto bad; - } - #endif - Py_XINCREF(local_tb); - Py_XINCREF(local_type); - Py_XINCREF(local_value); - *type = local_type; - *value = local_value; - *tb = local_tb; -#if CYTHON_FAST_THREAD_STATE - #if CYTHON_USE_EXC_INFO_STACK - { - _PyErr_StackItem *exc_info = tstate->exc_info; - tmp_type = exc_info->exc_type; - tmp_value = exc_info->exc_value; - tmp_tb = exc_info->exc_traceback; - exc_info->exc_type = local_type; - exc_info->exc_value = local_value; - exc_info->exc_traceback = local_tb; - } - #else - tmp_type = tstate->exc_type; - tmp_value = tstate->exc_value; - tmp_tb = tstate->exc_traceback; - tstate->exc_type = local_type; - tstate->exc_value = local_value; - tstate->exc_traceback = local_tb; - #endif - Py_XDECREF(tmp_type); - Py_XDECREF(tmp_value); - Py_XDECREF(tmp_tb); -#else - PyErr_SetExcInfo(local_type, local_value, local_tb); -#endif - return 0; -bad: - *type = 0; - *value = 0; - *tb = 0; - Py_XDECREF(local_type); - Py_XDECREF(local_value); - Py_XDECREF(local_tb); - return -1; -} - -/* SwapException */ -#if CYTHON_FAST_THREAD_STATE -static CYTHON_INLINE void __Pyx__ExceptionSwap(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { - PyObject *tmp_type, *tmp_value, *tmp_tb; - #if CYTHON_USE_EXC_INFO_STACK - _PyErr_StackItem *exc_info = tstate->exc_info; - tmp_type = exc_info->exc_type; - tmp_value = exc_info->exc_value; - tmp_tb = exc_info->exc_traceback; - exc_info->exc_type = *type; - exc_info->exc_value = *value; - exc_info->exc_traceback = *tb; - #else - tmp_type = tstate->exc_type; - tmp_value = tstate->exc_value; - tmp_tb = tstate->exc_traceback; - tstate->exc_type = *type; - tstate->exc_value = *value; - tstate->exc_traceback = *tb; - #endif - *type = tmp_type; - *value = tmp_value; - *tb = tmp_tb; -} -#else -static CYTHON_INLINE void __Pyx_ExceptionSwap(PyObject **type, PyObject **value, PyObject **tb) { - PyObject *tmp_type, *tmp_value, *tmp_tb; - PyErr_GetExcInfo(&tmp_type, &tmp_value, &tmp_tb); - PyErr_SetExcInfo(*type, *value, *tb); - *type = tmp_type; - *value = tmp_value; - *tb = tmp_tb; -} -#endif - -/* SaveResetException */ -#if CYTHON_FAST_THREAD_STATE -static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { - #if CYTHON_USE_EXC_INFO_STACK - _PyErr_StackItem *exc_info = __Pyx_PyErr_GetTopmostException(tstate); - *type = exc_info->exc_type; - *value = exc_info->exc_value; - *tb = exc_info->exc_traceback; - #else - *type = tstate->exc_type; - *value = tstate->exc_value; - *tb = tstate->exc_traceback; - #endif - Py_XINCREF(*type); - Py_XINCREF(*value); - Py_XINCREF(*tb); -} -static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { - PyObject *tmp_type, *tmp_value, *tmp_tb; - #if CYTHON_USE_EXC_INFO_STACK - _PyErr_StackItem *exc_info = tstate->exc_info; - tmp_type = exc_info->exc_type; - tmp_value = exc_info->exc_value; - tmp_tb = exc_info->exc_traceback; - exc_info->exc_type = type; - exc_info->exc_value = value; - exc_info->exc_traceback = tb; - #else - tmp_type = tstate->exc_type; - tmp_value = tstate->exc_value; - tmp_tb = tstate->exc_traceback; - tstate->exc_type = type; - tstate->exc_value = value; - tstate->exc_traceback = tb; - #endif - Py_XDECREF(tmp_type); - Py_XDECREF(tmp_value); - Py_XDECREF(tmp_tb); -} -#endif - -/* TypeImport */ -#ifndef __PYX_HAVE_RT_ImportType -#define __PYX_HAVE_RT_ImportType -static PyTypeObject *__Pyx_ImportType(PyObject *module, const char *module_name, const char *class_name, - size_t size, enum __Pyx_ImportType_CheckSize check_size) -{ - PyObject *result = 0; - char warning[200]; - Py_ssize_t basicsize; -#ifdef Py_LIMITED_API - PyObject *py_basicsize; -#endif - result = PyObject_GetAttrString(module, class_name); - if (!result) - goto bad; - if (!PyType_Check(result)) { - PyErr_Format(PyExc_TypeError, - "%.200s.%.200s is not a type object", - module_name, class_name); - goto bad; - } -#ifndef Py_LIMITED_API - basicsize = ((PyTypeObject *)result)->tp_basicsize; -#else - py_basicsize = PyObject_GetAttrString(result, "__basicsize__"); - if (!py_basicsize) - goto bad; - basicsize = PyLong_AsSsize_t(py_basicsize); - Py_DECREF(py_basicsize); - py_basicsize = 0; - if (basicsize == (Py_ssize_t)-1 && PyErr_Occurred()) - goto bad; -#endif - if ((size_t)basicsize < size) { - PyErr_Format(PyExc_ValueError, - "%.200s.%.200s size changed, may indicate binary incompatibility. " - "Expected %zd from C header, got %zd from PyObject", - module_name, class_name, size, basicsize); - goto bad; - } - if (check_size == __Pyx_ImportType_CheckSize_Error && (size_t)basicsize != size) { - PyErr_Format(PyExc_ValueError, - "%.200s.%.200s size changed, may indicate binary incompatibility. " - "Expected %zd from C header, got %zd from PyObject", - module_name, class_name, size, basicsize); - goto bad; - } - else if (check_size == __Pyx_ImportType_CheckSize_Warn && (size_t)basicsize > size) { - PyOS_snprintf(warning, sizeof(warning), - "%s.%s size changed, may indicate binary incompatibility. " - "Expected %zd from C header, got %zd from PyObject", - module_name, class_name, size, basicsize); - if (PyErr_WarnEx(NULL, warning, 0) < 0) goto bad; - } - return (PyTypeObject *)result; -bad: - Py_XDECREF(result); - return NULL; -} -#endif - -/* Import */ -static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level) { - PyObject *empty_list = 0; - PyObject *module = 0; - PyObject *global_dict = 0; - PyObject *empty_dict = 0; - PyObject *list; - #if PY_MAJOR_VERSION < 3 - PyObject *py_import; - py_import = __Pyx_PyObject_GetAttrStr(__pyx_b, __pyx_n_s_import); - if (!py_import) - goto bad; - #endif - if (from_list) - list = from_list; - else { - empty_list = PyList_New(0); - if (!empty_list) - goto bad; - list = empty_list; - } - global_dict = PyModule_GetDict(__pyx_m); - if (!global_dict) - goto bad; - empty_dict = PyDict_New(); - if (!empty_dict) - goto bad; - { - #if PY_MAJOR_VERSION >= 3 - if (level == -1) { - if ((1) && (strchr(__Pyx_MODULE_NAME, '.'))) { - module = PyImport_ImportModuleLevelObject( - name, global_dict, empty_dict, list, 1); - if (!module) { - if (!PyErr_ExceptionMatches(PyExc_ImportError)) - goto bad; - PyErr_Clear(); - } - } - level = 0; - } - #endif - if (!module) { - #if PY_MAJOR_VERSION < 3 - PyObject *py_level = PyInt_FromLong(level); - if (!py_level) - goto bad; - module = PyObject_CallFunctionObjArgs(py_import, - name, global_dict, empty_dict, list, py_level, (PyObject *)NULL); - Py_DECREF(py_level); - #else - module = PyImport_ImportModuleLevelObject( - name, global_dict, empty_dict, list, level); - #endif - } - } -bad: - #if PY_MAJOR_VERSION < 3 - Py_XDECREF(py_import); - #endif - Py_XDECREF(empty_list); - Py_XDECREF(empty_dict); - return module; -} - -/* ImportFrom */ -static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name) { - PyObject* value = __Pyx_PyObject_GetAttrStr(module, name); - if (unlikely(!value) && PyErr_ExceptionMatches(PyExc_AttributeError)) { - PyErr_Format(PyExc_ImportError, - #if PY_MAJOR_VERSION < 3 - "cannot import name %.230s", PyString_AS_STRING(name)); - #else - "cannot import name %S", name); - #endif - } - return value; -} - -/* PyDictVersioning */ -#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS -static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj) { - PyObject *dict = Py_TYPE(obj)->tp_dict; - return likely(dict) ? __PYX_GET_DICT_VERSION(dict) : 0; -} -static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj) { - PyObject **dictptr = NULL; - Py_ssize_t offset = Py_TYPE(obj)->tp_dictoffset; - if (offset) { -#if CYTHON_COMPILING_IN_CPYTHON - dictptr = (likely(offset > 0)) ? (PyObject **) ((char *)obj + offset) : _PyObject_GetDictPtr(obj); -#else - dictptr = _PyObject_GetDictPtr(obj); -#endif - } - return (dictptr && *dictptr) ? __PYX_GET_DICT_VERSION(*dictptr) : 0; -} -static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version) { - PyObject *dict = Py_TYPE(obj)->tp_dict; - if (unlikely(!dict) || unlikely(tp_dict_version != __PYX_GET_DICT_VERSION(dict))) - return 0; - return obj_dict_version == __Pyx_get_object_dict_version(obj); -} -#endif - -/* GetModuleGlobalName */ -#if CYTHON_USE_DICT_VERSIONS -static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value) -#else -static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name) -#endif -{ - PyObject *result; -#if !CYTHON_AVOID_BORROWED_REFS -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 - result = _PyDict_GetItem_KnownHash(__pyx_d, name, ((PyASCIIObject *) name)->hash); - __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) - if (likely(result)) { - return __Pyx_NewRef(result); - } else if (unlikely(PyErr_Occurred())) { - return NULL; - } -#else - result = PyDict_GetItem(__pyx_d, name); - __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) - if (likely(result)) { - return __Pyx_NewRef(result); - } -#endif -#else - result = PyObject_GetItem(__pyx_d, name); - __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) - if (likely(result)) { - return __Pyx_NewRef(result); - } - PyErr_Clear(); -#endif - return __Pyx_GetBuiltinName(name); -} - -/* CLineInTraceback */ -#ifndef CYTHON_CLINE_IN_TRACEBACK -static int __Pyx_CLineForTraceback(CYTHON_NCP_UNUSED PyThreadState *tstate, int c_line) { - PyObject *use_cline; - PyObject *ptype, *pvalue, *ptraceback; -#if CYTHON_COMPILING_IN_CPYTHON - PyObject **cython_runtime_dict; -#endif - if (unlikely(!__pyx_cython_runtime)) { - return c_line; - } - __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); -#if CYTHON_COMPILING_IN_CPYTHON - cython_runtime_dict = _PyObject_GetDictPtr(__pyx_cython_runtime); - if (likely(cython_runtime_dict)) { - __PYX_PY_DICT_LOOKUP_IF_MODIFIED( - use_cline, *cython_runtime_dict, - __Pyx_PyDict_GetItemStr(*cython_runtime_dict, __pyx_n_s_cline_in_traceback)) - } else -#endif - { - PyObject *use_cline_obj = __Pyx_PyObject_GetAttrStr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback); - if (use_cline_obj) { - use_cline = PyObject_Not(use_cline_obj) ? Py_False : Py_True; - Py_DECREF(use_cline_obj); - } else { - PyErr_Clear(); - use_cline = NULL; - } - } - if (!use_cline) { - c_line = 0; - PyObject_SetAttr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback, Py_False); - } - else if (use_cline == Py_False || (use_cline != Py_True && PyObject_Not(use_cline) != 0)) { - c_line = 0; - } - __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); - return c_line; -} -#endif - -/* CodeObjectCache */ -static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) { - int start = 0, mid = 0, end = count - 1; - if (end >= 0 && code_line > entries[end].code_line) { - return count; - } - while (start < end) { - mid = start + (end - start) / 2; - if (code_line < entries[mid].code_line) { - end = mid; - } else if (code_line > entries[mid].code_line) { - start = mid + 1; - } else { - return mid; - } - } - if (code_line <= entries[mid].code_line) { - return mid; - } else { - return mid + 1; - } -} -static PyCodeObject *__pyx_find_code_object(int code_line) { - PyCodeObject* code_object; - int pos; - if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) { - return NULL; - } - pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); - if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) { - return NULL; - } - code_object = __pyx_code_cache.entries[pos].code_object; - Py_INCREF(code_object); - return code_object; -} -static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) { - int pos, i; - __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries; - if (unlikely(!code_line)) { - return; - } - if (unlikely(!entries)) { - entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry)); - if (likely(entries)) { - __pyx_code_cache.entries = entries; - __pyx_code_cache.max_count = 64; - __pyx_code_cache.count = 1; - entries[0].code_line = code_line; - entries[0].code_object = code_object; - Py_INCREF(code_object); - } - return; - } - pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); - if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) { - PyCodeObject* tmp = entries[pos].code_object; - entries[pos].code_object = code_object; - Py_DECREF(tmp); - return; - } - if (__pyx_code_cache.count == __pyx_code_cache.max_count) { - int new_max = __pyx_code_cache.max_count + 64; - entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc( - __pyx_code_cache.entries, ((size_t)new_max) * sizeof(__Pyx_CodeObjectCacheEntry)); - if (unlikely(!entries)) { - return; - } - __pyx_code_cache.entries = entries; - __pyx_code_cache.max_count = new_max; - } - for (i=__pyx_code_cache.count; i>pos; i--) { - entries[i] = entries[i-1]; - } - entries[pos].code_line = code_line; - entries[pos].code_object = code_object; - __pyx_code_cache.count++; - Py_INCREF(code_object); -} - -/* AddTraceback */ -#include "compile.h" -#include "frameobject.h" -#include "traceback.h" -static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( - const char *funcname, int c_line, - int py_line, const char *filename) { - PyCodeObject *py_code = 0; - PyObject *py_srcfile = 0; - PyObject *py_funcname = 0; - #if PY_MAJOR_VERSION < 3 - py_srcfile = PyString_FromString(filename); - #else - py_srcfile = PyUnicode_FromString(filename); - #endif - if (!py_srcfile) goto bad; - if (c_line) { - #if PY_MAJOR_VERSION < 3 - py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); - #else - py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); - #endif - } - else { - #if PY_MAJOR_VERSION < 3 - py_funcname = PyString_FromString(funcname); - #else - py_funcname = PyUnicode_FromString(funcname); - #endif - } - if (!py_funcname) goto bad; - py_code = __Pyx_PyCode_New( - 0, - 0, - 0, - 0, - 0, - __pyx_empty_bytes, /*PyObject *code,*/ - __pyx_empty_tuple, /*PyObject *consts,*/ - __pyx_empty_tuple, /*PyObject *names,*/ - __pyx_empty_tuple, /*PyObject *varnames,*/ - __pyx_empty_tuple, /*PyObject *freevars,*/ - __pyx_empty_tuple, /*PyObject *cellvars,*/ - py_srcfile, /*PyObject *filename,*/ - py_funcname, /*PyObject *name,*/ - py_line, - __pyx_empty_bytes /*PyObject *lnotab*/ - ); - Py_DECREF(py_srcfile); - Py_DECREF(py_funcname); - return py_code; -bad: - Py_XDECREF(py_srcfile); - Py_XDECREF(py_funcname); - return NULL; -} -static void __Pyx_AddTraceback(const char *funcname, int c_line, - int py_line, const char *filename) { - PyCodeObject *py_code = 0; - PyFrameObject *py_frame = 0; - PyThreadState *tstate = __Pyx_PyThreadState_Current; - if (c_line) { - c_line = __Pyx_CLineForTraceback(tstate, c_line); - } - py_code = __pyx_find_code_object(c_line ? -c_line : py_line); - if (!py_code) { - py_code = __Pyx_CreateCodeObjectForTraceback( - funcname, c_line, py_line, filename); - if (!py_code) goto bad; - __pyx_insert_code_object(c_line ? -c_line : py_line, py_code); - } - py_frame = PyFrame_New( - tstate, /*PyThreadState *tstate,*/ - py_code, /*PyCodeObject *code,*/ - __pyx_d, /*PyObject *globals,*/ - 0 /*PyObject *locals*/ - ); - if (!py_frame) goto bad; - __Pyx_PyFrame_SetLineNumber(py_frame, py_line); - PyTraceBack_Here(py_frame); -bad: - Py_XDECREF(py_code); - Py_XDECREF(py_frame); -} - -/* CIntToPy */ -static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) { - const long neg_one = (long) ((long) 0 - (long) 1), const_zero = (long) 0; - const int is_unsigned = neg_one > const_zero; - if (is_unsigned) { - if (sizeof(long) < sizeof(long)) { - return PyInt_FromLong((long) value); - } else if (sizeof(long) <= sizeof(unsigned long)) { - return PyLong_FromUnsignedLong((unsigned long) value); -#ifdef HAVE_LONG_LONG - } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { - return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); -#endif - } - } else { - if (sizeof(long) <= sizeof(long)) { - return PyInt_FromLong((long) value); -#ifdef HAVE_LONG_LONG - } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { - return PyLong_FromLongLong((PY_LONG_LONG) value); -#endif - } - } - { - int one = 1; int little = (int)*(unsigned char *)&one; - unsigned char *bytes = (unsigned char *)&value; - return _PyLong_FromByteArray(bytes, sizeof(long), - little, !is_unsigned); - } -} - -/* CIntFromPyVerify */ -#define __PYX_VERIFY_RETURN_INT(target_type, func_type, func_value)\ - __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 0) -#define __PYX_VERIFY_RETURN_INT_EXC(target_type, func_type, func_value)\ - __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 1) -#define __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, exc)\ - {\ - func_type value = func_value;\ - if (sizeof(target_type) < sizeof(func_type)) {\ - if (unlikely(value != (func_type) (target_type) value)) {\ - func_type zero = 0;\ - if (exc && unlikely(value == (func_type)-1 && PyErr_Occurred()))\ - return (target_type) -1;\ - if (is_unsigned && unlikely(value < zero))\ - goto raise_neg_overflow;\ - else\ - goto raise_overflow;\ - }\ - }\ - return (target_type) value;\ - } - -/* CIntFromPy */ -static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) { - const long neg_one = (long) ((long) 0 - (long) 1), const_zero = (long) 0; - const int is_unsigned = neg_one > const_zero; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_Check(x))) { - if (sizeof(long) < sizeof(long)) { - __PYX_VERIFY_RETURN_INT(long, long, PyInt_AS_LONG(x)) - } else { - long val = PyInt_AS_LONG(x); - if (is_unsigned && unlikely(val < 0)) { - goto raise_neg_overflow; - } - return (long) val; - } - } else -#endif - if (likely(PyLong_Check(x))) { - if (is_unsigned) { -#if CYTHON_USE_PYLONG_INTERNALS - const digit* digits = ((PyLongObject*)x)->ob_digit; - switch (Py_SIZE(x)) { - case 0: return (long) 0; - case 1: __PYX_VERIFY_RETURN_INT(long, digit, digits[0]) - case 2: - if (8 * sizeof(long) > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) >= 2 * PyLong_SHIFT) { - return (long) (((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); - } - } - break; - case 3: - if (8 * sizeof(long) > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) >= 3 * PyLong_SHIFT) { - return (long) (((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); - } - } - break; - case 4: - if (8 * sizeof(long) > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) >= 4 * PyLong_SHIFT) { - return (long) (((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); - } - } - break; - } -#endif -#if CYTHON_COMPILING_IN_CPYTHON - if (unlikely(Py_SIZE(x) < 0)) { - goto raise_neg_overflow; - } -#else - { - int result = PyObject_RichCompareBool(x, Py_False, Py_LT); - if (unlikely(result < 0)) - return (long) -1; - if (unlikely(result == 1)) - goto raise_neg_overflow; - } -#endif - if (sizeof(long) <= sizeof(unsigned long)) { - __PYX_VERIFY_RETURN_INT_EXC(long, unsigned long, PyLong_AsUnsignedLong(x)) -#ifdef HAVE_LONG_LONG - } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { - __PYX_VERIFY_RETURN_INT_EXC(long, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) -#endif - } - } else { -#if CYTHON_USE_PYLONG_INTERNALS - const digit* digits = ((PyLongObject*)x)->ob_digit; - switch (Py_SIZE(x)) { - case 0: return (long) 0; - case -1: __PYX_VERIFY_RETURN_INT(long, sdigit, (sdigit) (-(sdigit)digits[0])) - case 1: __PYX_VERIFY_RETURN_INT(long, digit, +digits[0]) - case -2: - if (8 * sizeof(long) - 1 > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { - return (long) (((long)-1)*(((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case 2: - if (8 * sizeof(long) > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { - return (long) ((((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case -3: - if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { - return (long) (((long)-1)*(((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case 3: - if (8 * sizeof(long) > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { - return (long) ((((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case -4: - if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { - return (long) (((long)-1)*(((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case 4: - if (8 * sizeof(long) > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { - return (long) ((((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - } -#endif - if (sizeof(long) <= sizeof(long)) { - __PYX_VERIFY_RETURN_INT_EXC(long, long, PyLong_AsLong(x)) -#ifdef HAVE_LONG_LONG - } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { - __PYX_VERIFY_RETURN_INT_EXC(long, PY_LONG_LONG, PyLong_AsLongLong(x)) -#endif - } - } - { -#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) - PyErr_SetString(PyExc_RuntimeError, - "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); -#else - long val; - PyObject *v = __Pyx_PyNumber_IntOrLong(x); - #if PY_MAJOR_VERSION < 3 - if (likely(v) && !PyLong_Check(v)) { - PyObject *tmp = v; - v = PyNumber_Long(tmp); - Py_DECREF(tmp); - } - #endif - if (likely(v)) { - int one = 1; int is_little = (int)*(unsigned char *)&one; - unsigned char *bytes = (unsigned char *)&val; - int ret = _PyLong_AsByteArray((PyLongObject *)v, - bytes, sizeof(val), - is_little, !is_unsigned); - Py_DECREF(v); - if (likely(!ret)) - return val; - } -#endif - return (long) -1; - } - } else { - long val; - PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); - if (!tmp) return (long) -1; - val = __Pyx_PyInt_As_long(tmp); - Py_DECREF(tmp); - return val; - } -raise_overflow: - PyErr_SetString(PyExc_OverflowError, - "value too large to convert to long"); - return (long) -1; -raise_neg_overflow: - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to long"); - return (long) -1; -} - -/* CIntFromPy */ -static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *x) { - const int neg_one = (int) ((int) 0 - (int) 1), const_zero = (int) 0; - const int is_unsigned = neg_one > const_zero; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_Check(x))) { - if (sizeof(int) < sizeof(long)) { - __PYX_VERIFY_RETURN_INT(int, long, PyInt_AS_LONG(x)) - } else { - long val = PyInt_AS_LONG(x); - if (is_unsigned && unlikely(val < 0)) { - goto raise_neg_overflow; - } - return (int) val; - } - } else -#endif - if (likely(PyLong_Check(x))) { - if (is_unsigned) { -#if CYTHON_USE_PYLONG_INTERNALS - const digit* digits = ((PyLongObject*)x)->ob_digit; - switch (Py_SIZE(x)) { - case 0: return (int) 0; - case 1: __PYX_VERIFY_RETURN_INT(int, digit, digits[0]) - case 2: - if (8 * sizeof(int) > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) >= 2 * PyLong_SHIFT) { - return (int) (((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); - } - } - break; - case 3: - if (8 * sizeof(int) > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) >= 3 * PyLong_SHIFT) { - return (int) (((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); - } - } - break; - case 4: - if (8 * sizeof(int) > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) >= 4 * PyLong_SHIFT) { - return (int) (((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); - } - } - break; - } -#endif -#if CYTHON_COMPILING_IN_CPYTHON - if (unlikely(Py_SIZE(x) < 0)) { - goto raise_neg_overflow; - } -#else - { - int result = PyObject_RichCompareBool(x, Py_False, Py_LT); - if (unlikely(result < 0)) - return (int) -1; - if (unlikely(result == 1)) - goto raise_neg_overflow; - } -#endif - if (sizeof(int) <= sizeof(unsigned long)) { - __PYX_VERIFY_RETURN_INT_EXC(int, unsigned long, PyLong_AsUnsignedLong(x)) -#ifdef HAVE_LONG_LONG - } else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) { - __PYX_VERIFY_RETURN_INT_EXC(int, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) -#endif - } - } else { -#if CYTHON_USE_PYLONG_INTERNALS - const digit* digits = ((PyLongObject*)x)->ob_digit; - switch (Py_SIZE(x)) { - case 0: return (int) 0; - case -1: __PYX_VERIFY_RETURN_INT(int, sdigit, (sdigit) (-(sdigit)digits[0])) - case 1: __PYX_VERIFY_RETURN_INT(int, digit, +digits[0]) - case -2: - if (8 * sizeof(int) - 1 > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { - return (int) (((int)-1)*(((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case 2: - if (8 * sizeof(int) > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { - return (int) ((((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case -3: - if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { - return (int) (((int)-1)*(((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case 3: - if (8 * sizeof(int) > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { - return (int) ((((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case -4: - if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { - return (int) (((int)-1)*(((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case 4: - if (8 * sizeof(int) > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { - return (int) ((((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - } -#endif - if (sizeof(int) <= sizeof(long)) { - __PYX_VERIFY_RETURN_INT_EXC(int, long, PyLong_AsLong(x)) -#ifdef HAVE_LONG_LONG - } else if (sizeof(int) <= sizeof(PY_LONG_LONG)) { - __PYX_VERIFY_RETURN_INT_EXC(int, PY_LONG_LONG, PyLong_AsLongLong(x)) -#endif - } - } - { -#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) - PyErr_SetString(PyExc_RuntimeError, - "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); -#else - int val; - PyObject *v = __Pyx_PyNumber_IntOrLong(x); - #if PY_MAJOR_VERSION < 3 - if (likely(v) && !PyLong_Check(v)) { - PyObject *tmp = v; - v = PyNumber_Long(tmp); - Py_DECREF(tmp); - } - #endif - if (likely(v)) { - int one = 1; int is_little = (int)*(unsigned char *)&one; - unsigned char *bytes = (unsigned char *)&val; - int ret = _PyLong_AsByteArray((PyLongObject *)v, - bytes, sizeof(val), - is_little, !is_unsigned); - Py_DECREF(v); - if (likely(!ret)) - return val; - } -#endif - return (int) -1; - } - } else { - int val; - PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); - if (!tmp) return (int) -1; - val = __Pyx_PyInt_As_int(tmp); - Py_DECREF(tmp); - return val; - } -raise_overflow: - PyErr_SetString(PyExc_OverflowError, - "value too large to convert to int"); - return (int) -1; -raise_neg_overflow: - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to int"); - return (int) -1; -} - -/* FastTypeChecks */ -#if CYTHON_COMPILING_IN_CPYTHON -static int __Pyx_InBases(PyTypeObject *a, PyTypeObject *b) { - while (a) { - a = a->tp_base; - if (a == b) - return 1; - } - return b == &PyBaseObject_Type; -} -static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b) { - PyObject *mro; - if (a == b) return 1; - mro = a->tp_mro; - if (likely(mro)) { - Py_ssize_t i, n; - n = PyTuple_GET_SIZE(mro); - for (i = 0; i < n; i++) { - if (PyTuple_GET_ITEM(mro, i) == (PyObject *)b) - return 1; - } - return 0; - } - return __Pyx_InBases(a, b); -} -#if PY_MAJOR_VERSION == 2 -static int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject* exc_type2) { - PyObject *exception, *value, *tb; - int res; - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - __Pyx_ErrFetch(&exception, &value, &tb); - res = exc_type1 ? PyObject_IsSubclass(err, exc_type1) : 0; - if (unlikely(res == -1)) { - PyErr_WriteUnraisable(err); - res = 0; - } - if (!res) { - res = PyObject_IsSubclass(err, exc_type2); - if (unlikely(res == -1)) { - PyErr_WriteUnraisable(err); - res = 0; - } - } - __Pyx_ErrRestore(exception, value, tb); - return res; -} -#else -static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject *exc_type2) { - int res = exc_type1 ? __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type1) : 0; - if (!res) { - res = __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type2); - } - return res; -} -#endif -static int __Pyx_PyErr_GivenExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { - Py_ssize_t i, n; - assert(PyExceptionClass_Check(exc_type)); - n = PyTuple_GET_SIZE(tuple); -#if PY_MAJOR_VERSION >= 3 - for (i=0; ip) { - #if PY_MAJOR_VERSION < 3 - if (t->is_unicode) { - *t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL); - } else if (t->intern) { - *t->p = PyString_InternFromString(t->s); - } else { - *t->p = PyString_FromStringAndSize(t->s, t->n - 1); - } - #else - if (t->is_unicode | t->is_str) { - if (t->intern) { - *t->p = PyUnicode_InternFromString(t->s); - } else if (t->encoding) { - *t->p = PyUnicode_Decode(t->s, t->n - 1, t->encoding, NULL); - } else { - *t->p = PyUnicode_FromStringAndSize(t->s, t->n - 1); - } - } else { - *t->p = PyBytes_FromStringAndSize(t->s, t->n - 1); - } - #endif - if (!*t->p) - return -1; - if (PyObject_Hash(*t->p) == -1) - return -1; - ++t; - } - return 0; -} - -static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char* c_str) { - return __Pyx_PyUnicode_FromStringAndSize(c_str, (Py_ssize_t)strlen(c_str)); -} -static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject* o) { - Py_ssize_t ignore; - return __Pyx_PyObject_AsStringAndSize(o, &ignore); -} -#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT -#if !CYTHON_PEP393_ENABLED -static const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { - char* defenc_c; - PyObject* defenc = _PyUnicode_AsDefaultEncodedString(o, NULL); - if (!defenc) return NULL; - defenc_c = PyBytes_AS_STRING(defenc); -#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII - { - char* end = defenc_c + PyBytes_GET_SIZE(defenc); - char* c; - for (c = defenc_c; c < end; c++) { - if ((unsigned char) (*c) >= 128) { - PyUnicode_AsASCIIString(o); - return NULL; - } - } - } -#endif - *length = PyBytes_GET_SIZE(defenc); - return defenc_c; -} -#else -static CYTHON_INLINE const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { - if (unlikely(__Pyx_PyUnicode_READY(o) == -1)) return NULL; -#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII - if (likely(PyUnicode_IS_ASCII(o))) { - *length = PyUnicode_GET_LENGTH(o); - return PyUnicode_AsUTF8(o); - } else { - PyUnicode_AsASCIIString(o); - return NULL; - } -#else - return PyUnicode_AsUTF8AndSize(o, length); -#endif -} -#endif -#endif -static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) { -#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT - if ( -#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII - __Pyx_sys_getdefaultencoding_not_ascii && -#endif - PyUnicode_Check(o)) { - return __Pyx_PyUnicode_AsStringAndSize(o, length); - } else -#endif -#if (!CYTHON_COMPILING_IN_PYPY) || (defined(PyByteArray_AS_STRING) && defined(PyByteArray_GET_SIZE)) - if (PyByteArray_Check(o)) { - *length = PyByteArray_GET_SIZE(o); - return PyByteArray_AS_STRING(o); - } else -#endif - { - char* result; - int r = PyBytes_AsStringAndSize(o, &result, length); - if (unlikely(r < 0)) { - return NULL; - } else { - return result; - } - } -} -static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) { - int is_true = x == Py_True; - if (is_true | (x == Py_False) | (x == Py_None)) return is_true; - else return PyObject_IsTrue(x); -} -static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject* x) { - int retval; - if (unlikely(!x)) return -1; - retval = __Pyx_PyObject_IsTrue(x); - Py_DECREF(x); - return retval; -} -static PyObject* __Pyx_PyNumber_IntOrLongWrongResultType(PyObject* result, const char* type_name) { -#if PY_MAJOR_VERSION >= 3 - if (PyLong_Check(result)) { - if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1, - "__int__ returned non-int (type %.200s). " - "The ability to return an instance of a strict subclass of int " - "is deprecated, and may be removed in a future version of Python.", - Py_TYPE(result)->tp_name)) { - Py_DECREF(result); - return NULL; - } - return result; - } -#endif - PyErr_Format(PyExc_TypeError, - "__%.4s__ returned non-%.4s (type %.200s)", - type_name, type_name, Py_TYPE(result)->tp_name); - Py_DECREF(result); - return NULL; -} -static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x) { -#if CYTHON_USE_TYPE_SLOTS - PyNumberMethods *m; -#endif - const char *name = NULL; - PyObject *res = NULL; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_Check(x) || PyLong_Check(x))) -#else - if (likely(PyLong_Check(x))) -#endif - return __Pyx_NewRef(x); -#if CYTHON_USE_TYPE_SLOTS - m = Py_TYPE(x)->tp_as_number; - #if PY_MAJOR_VERSION < 3 - if (m && m->nb_int) { - name = "int"; - res = m->nb_int(x); - } - else if (m && m->nb_long) { - name = "long"; - res = m->nb_long(x); - } - #else - if (likely(m && m->nb_int)) { - name = "int"; - res = m->nb_int(x); - } - #endif -#else - if (!PyBytes_CheckExact(x) && !PyUnicode_CheckExact(x)) { - res = PyNumber_Int(x); - } -#endif - if (likely(res)) { -#if PY_MAJOR_VERSION < 3 - if (unlikely(!PyInt_Check(res) && !PyLong_Check(res))) { -#else - if (unlikely(!PyLong_CheckExact(res))) { -#endif - return __Pyx_PyNumber_IntOrLongWrongResultType(res, name); - } - } - else if (!PyErr_Occurred()) { - PyErr_SetString(PyExc_TypeError, - "an integer is required"); - } - return res; -} -static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { - Py_ssize_t ival; - PyObject *x; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_CheckExact(b))) { - if (sizeof(Py_ssize_t) >= sizeof(long)) - return PyInt_AS_LONG(b); - else - return PyInt_AsSsize_t(b); - } -#endif - if (likely(PyLong_CheckExact(b))) { - #if CYTHON_USE_PYLONG_INTERNALS - const digit* digits = ((PyLongObject*)b)->ob_digit; - const Py_ssize_t size = Py_SIZE(b); - if (likely(__Pyx_sst_abs(size) <= 1)) { - ival = likely(size) ? digits[0] : 0; - if (size == -1) ival = -ival; - return ival; - } else { - switch (size) { - case 2: - if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { - return (Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case -2: - if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { - return -(Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case 3: - if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { - return (Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case -3: - if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { - return -(Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case 4: - if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { - return (Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case -4: - if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { - return -(Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - } - } - #endif - return PyLong_AsSsize_t(b); - } - x = PyNumber_Index(b); - if (!x) return -1; - ival = PyInt_AsSsize_t(x); - Py_DECREF(x); - return ival; -} -static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b) { - return b ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False); -} -static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { - return PyInt_FromSize_t(ival); -} - - -#endif /* Py_PYTHON_H */ diff --git a/third_party/python/aiohttp/aiohttp/_http_writer.pyx b/third_party/python/aiohttp/aiohttp/_http_writer.pyx deleted file mode 100644 index 84b42fa1c35e..000000000000 --- a/third_party/python/aiohttp/aiohttp/_http_writer.pyx +++ /dev/null @@ -1,151 +0,0 @@ -from cpython.bytes cimport PyBytes_FromStringAndSize -from cpython.exc cimport PyErr_NoMemory -from cpython.mem cimport PyMem_Free, PyMem_Malloc, PyMem_Realloc -from cpython.object cimport PyObject_Str -from libc.stdint cimport uint8_t, uint64_t -from libc.string cimport memcpy - -from multidict import istr - -DEF BUF_SIZE = 16 * 1024 # 16KiB -cdef char BUFFER[BUF_SIZE] - -cdef object _istr = istr - - -# ----------------- writer --------------------------- - -cdef struct Writer: - char *buf - Py_ssize_t size - Py_ssize_t pos - - -cdef inline void _init_writer(Writer* writer): - writer.buf = &BUFFER[0] - writer.size = BUF_SIZE - writer.pos = 0 - - -cdef inline void _release_writer(Writer* writer): - if writer.buf != BUFFER: - PyMem_Free(writer.buf) - - -cdef inline int _write_byte(Writer* writer, uint8_t ch): - cdef char * buf - cdef Py_ssize_t size - - if writer.pos == writer.size: - # reallocate - size = writer.size + BUF_SIZE - if writer.buf == BUFFER: - buf = PyMem_Malloc(size) - if buf == NULL: - PyErr_NoMemory() - return -1 - memcpy(buf, writer.buf, writer.size) - else: - buf = PyMem_Realloc(writer.buf, size) - if buf == NULL: - PyErr_NoMemory() - return -1 - writer.buf = buf - writer.size = size - writer.buf[writer.pos] = ch - writer.pos += 1 - return 0 - - -cdef inline int _write_utf8(Writer* writer, Py_UCS4 symbol): - cdef uint64_t utf = symbol - - if utf < 0x80: - return _write_byte(writer, utf) - elif utf < 0x800: - if _write_byte(writer, (0xc0 | (utf >> 6))) < 0: - return -1 - return _write_byte(writer, (0x80 | (utf & 0x3f))) - elif 0xD800 <= utf <= 0xDFFF: - # surogate pair, ignored - return 0 - elif utf < 0x10000: - if _write_byte(writer, (0xe0 | (utf >> 12))) < 0: - return -1 - if _write_byte(writer, (0x80 | ((utf >> 6) & 0x3f))) < 0: - return -1 - return _write_byte(writer, (0x80 | (utf & 0x3f))) - elif utf > 0x10FFFF: - # symbol is too large - return 0 - else: - if _write_byte(writer, (0xf0 | (utf >> 18))) < 0: - return -1 - if _write_byte(writer, - (0x80 | ((utf >> 12) & 0x3f))) < 0: - return -1 - if _write_byte(writer, - (0x80 | ((utf >> 6) & 0x3f))) < 0: - return -1 - return _write_byte(writer, (0x80 | (utf & 0x3f))) - - -cdef inline int _write_str(Writer* writer, str s): - cdef Py_UCS4 ch - for ch in s: - if _write_utf8(writer, ch) < 0: - return -1 - - -# --------------- _serialize_headers ---------------------- - -cdef str to_str(object s): - typ = type(s) - if typ is str: - return s - elif typ is _istr: - return PyObject_Str(s) - elif not isinstance(s, str): - raise TypeError("Cannot serialize non-str key {!r}".format(s)) - else: - return str(s) - - -def _serialize_headers(str status_line, headers): - cdef Writer writer - cdef object key - cdef object val - cdef bytes ret - - _init_writer(&writer) - - try: - if _write_str(&writer, status_line) < 0: - raise - if _write_byte(&writer, b'\r') < 0: - raise - if _write_byte(&writer, b'\n') < 0: - raise - - for key, val in headers.items(): - if _write_str(&writer, to_str(key)) < 0: - raise - if _write_byte(&writer, b':') < 0: - raise - if _write_byte(&writer, b' ') < 0: - raise - if _write_str(&writer, to_str(val)) < 0: - raise - if _write_byte(&writer, b'\r') < 0: - raise - if _write_byte(&writer, b'\n') < 0: - raise - - if _write_byte(&writer, b'\r') < 0: - raise - if _write_byte(&writer, b'\n') < 0: - raise - - return PyBytes_FromStringAndSize(writer.buf, writer.pos) - finally: - _release_writer(&writer) diff --git a/third_party/python/aiohttp/aiohttp/_websocket.c b/third_party/python/aiohttp/aiohttp/_websocket.c deleted file mode 100644 index 4891c24a8413..000000000000 --- a/third_party/python/aiohttp/aiohttp/_websocket.c +++ /dev/null @@ -1,3588 +0,0 @@ -/* Generated by Cython 0.29.21 */ - -#define PY_SSIZE_T_CLEAN -#include "Python.h" -#ifndef Py_PYTHON_H - #error Python headers needed to compile C extensions, please install development version of Python. -#elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000) - #error Cython requires Python 2.6+ or Python 3.3+. -#else -#define CYTHON_ABI "0_29_21" -#define CYTHON_HEX_VERSION 0x001D15F0 -#define CYTHON_FUTURE_DIVISION 1 -#include -#ifndef offsetof - #define offsetof(type, member) ( (size_t) & ((type*)0) -> member ) -#endif -#if !defined(WIN32) && !defined(MS_WINDOWS) - #ifndef __stdcall - #define __stdcall - #endif - #ifndef __cdecl - #define __cdecl - #endif - #ifndef __fastcall - #define __fastcall - #endif -#endif -#ifndef DL_IMPORT - #define DL_IMPORT(t) t -#endif -#ifndef DL_EXPORT - #define DL_EXPORT(t) t -#endif -#define __PYX_COMMA , -#ifndef HAVE_LONG_LONG - #if PY_VERSION_HEX >= 0x02070000 - #define HAVE_LONG_LONG - #endif -#endif -#ifndef PY_LONG_LONG - #define PY_LONG_LONG LONG_LONG -#endif -#ifndef Py_HUGE_VAL - #define Py_HUGE_VAL HUGE_VAL -#endif -#ifdef PYPY_VERSION - #define CYTHON_COMPILING_IN_PYPY 1 - #define CYTHON_COMPILING_IN_PYSTON 0 - #define CYTHON_COMPILING_IN_CPYTHON 0 - #undef CYTHON_USE_TYPE_SLOTS - #define CYTHON_USE_TYPE_SLOTS 0 - #undef CYTHON_USE_PYTYPE_LOOKUP - #define CYTHON_USE_PYTYPE_LOOKUP 0 - #if PY_VERSION_HEX < 0x03050000 - #undef CYTHON_USE_ASYNC_SLOTS - #define CYTHON_USE_ASYNC_SLOTS 0 - #elif !defined(CYTHON_USE_ASYNC_SLOTS) - #define CYTHON_USE_ASYNC_SLOTS 1 - #endif - #undef CYTHON_USE_PYLIST_INTERNALS - #define CYTHON_USE_PYLIST_INTERNALS 0 - #undef CYTHON_USE_UNICODE_INTERNALS - #define CYTHON_USE_UNICODE_INTERNALS 0 - #undef CYTHON_USE_UNICODE_WRITER - #define CYTHON_USE_UNICODE_WRITER 0 - #undef CYTHON_USE_PYLONG_INTERNALS - #define CYTHON_USE_PYLONG_INTERNALS 0 - #undef CYTHON_AVOID_BORROWED_REFS - #define CYTHON_AVOID_BORROWED_REFS 1 - #undef CYTHON_ASSUME_SAFE_MACROS - #define CYTHON_ASSUME_SAFE_MACROS 0 - #undef CYTHON_UNPACK_METHODS - #define CYTHON_UNPACK_METHODS 0 - #undef CYTHON_FAST_THREAD_STATE - #define CYTHON_FAST_THREAD_STATE 0 - #undef CYTHON_FAST_PYCALL - #define CYTHON_FAST_PYCALL 0 - #undef CYTHON_PEP489_MULTI_PHASE_INIT - #define CYTHON_PEP489_MULTI_PHASE_INIT 0 - #undef CYTHON_USE_TP_FINALIZE - #define CYTHON_USE_TP_FINALIZE 0 - #undef CYTHON_USE_DICT_VERSIONS - #define CYTHON_USE_DICT_VERSIONS 0 - #undef CYTHON_USE_EXC_INFO_STACK - #define CYTHON_USE_EXC_INFO_STACK 0 -#elif defined(PYSTON_VERSION) - #define CYTHON_COMPILING_IN_PYPY 0 - #define CYTHON_COMPILING_IN_PYSTON 1 - #define CYTHON_COMPILING_IN_CPYTHON 0 - #ifndef CYTHON_USE_TYPE_SLOTS - #define CYTHON_USE_TYPE_SLOTS 1 - #endif - #undef CYTHON_USE_PYTYPE_LOOKUP - #define CYTHON_USE_PYTYPE_LOOKUP 0 - #undef CYTHON_USE_ASYNC_SLOTS - #define CYTHON_USE_ASYNC_SLOTS 0 - #undef CYTHON_USE_PYLIST_INTERNALS - #define CYTHON_USE_PYLIST_INTERNALS 0 - #ifndef CYTHON_USE_UNICODE_INTERNALS - #define CYTHON_USE_UNICODE_INTERNALS 1 - #endif - #undef CYTHON_USE_UNICODE_WRITER - #define CYTHON_USE_UNICODE_WRITER 0 - #undef CYTHON_USE_PYLONG_INTERNALS - #define CYTHON_USE_PYLONG_INTERNALS 0 - #ifndef CYTHON_AVOID_BORROWED_REFS - #define CYTHON_AVOID_BORROWED_REFS 0 - #endif - #ifndef CYTHON_ASSUME_SAFE_MACROS - #define CYTHON_ASSUME_SAFE_MACROS 1 - #endif - #ifndef CYTHON_UNPACK_METHODS - #define CYTHON_UNPACK_METHODS 1 - #endif - #undef CYTHON_FAST_THREAD_STATE - #define CYTHON_FAST_THREAD_STATE 0 - #undef CYTHON_FAST_PYCALL - #define CYTHON_FAST_PYCALL 0 - #undef CYTHON_PEP489_MULTI_PHASE_INIT - #define CYTHON_PEP489_MULTI_PHASE_INIT 0 - #undef CYTHON_USE_TP_FINALIZE - #define CYTHON_USE_TP_FINALIZE 0 - #undef CYTHON_USE_DICT_VERSIONS - #define CYTHON_USE_DICT_VERSIONS 0 - #undef CYTHON_USE_EXC_INFO_STACK - #define CYTHON_USE_EXC_INFO_STACK 0 -#else - #define CYTHON_COMPILING_IN_PYPY 0 - #define CYTHON_COMPILING_IN_PYSTON 0 - #define CYTHON_COMPILING_IN_CPYTHON 1 - #ifndef CYTHON_USE_TYPE_SLOTS - #define CYTHON_USE_TYPE_SLOTS 1 - #endif - #if PY_VERSION_HEX < 0x02070000 - #undef CYTHON_USE_PYTYPE_LOOKUP - #define CYTHON_USE_PYTYPE_LOOKUP 0 - #elif !defined(CYTHON_USE_PYTYPE_LOOKUP) - #define CYTHON_USE_PYTYPE_LOOKUP 1 - #endif - #if PY_MAJOR_VERSION < 3 - #undef CYTHON_USE_ASYNC_SLOTS - #define CYTHON_USE_ASYNC_SLOTS 0 - #elif !defined(CYTHON_USE_ASYNC_SLOTS) - #define CYTHON_USE_ASYNC_SLOTS 1 - #endif - #if PY_VERSION_HEX < 0x02070000 - #undef CYTHON_USE_PYLONG_INTERNALS - #define CYTHON_USE_PYLONG_INTERNALS 0 - #elif !defined(CYTHON_USE_PYLONG_INTERNALS) - #define CYTHON_USE_PYLONG_INTERNALS 1 - #endif - #ifndef CYTHON_USE_PYLIST_INTERNALS - #define CYTHON_USE_PYLIST_INTERNALS 1 - #endif - #ifndef CYTHON_USE_UNICODE_INTERNALS - #define CYTHON_USE_UNICODE_INTERNALS 1 - #endif - #if PY_VERSION_HEX < 0x030300F0 - #undef CYTHON_USE_UNICODE_WRITER - #define CYTHON_USE_UNICODE_WRITER 0 - #elif !defined(CYTHON_USE_UNICODE_WRITER) - #define CYTHON_USE_UNICODE_WRITER 1 - #endif - #ifndef CYTHON_AVOID_BORROWED_REFS - #define CYTHON_AVOID_BORROWED_REFS 0 - #endif - #ifndef CYTHON_ASSUME_SAFE_MACROS - #define CYTHON_ASSUME_SAFE_MACROS 1 - #endif - #ifndef CYTHON_UNPACK_METHODS - #define CYTHON_UNPACK_METHODS 1 - #endif - #ifndef CYTHON_FAST_THREAD_STATE - #define CYTHON_FAST_THREAD_STATE 1 - #endif - #ifndef CYTHON_FAST_PYCALL - #define CYTHON_FAST_PYCALL 1 - #endif - #ifndef CYTHON_PEP489_MULTI_PHASE_INIT - #define CYTHON_PEP489_MULTI_PHASE_INIT (PY_VERSION_HEX >= 0x03050000) - #endif - #ifndef CYTHON_USE_TP_FINALIZE - #define CYTHON_USE_TP_FINALIZE (PY_VERSION_HEX >= 0x030400a1) - #endif - #ifndef CYTHON_USE_DICT_VERSIONS - #define CYTHON_USE_DICT_VERSIONS (PY_VERSION_HEX >= 0x030600B1) - #endif - #ifndef CYTHON_USE_EXC_INFO_STACK - #define CYTHON_USE_EXC_INFO_STACK (PY_VERSION_HEX >= 0x030700A3) - #endif -#endif -#if !defined(CYTHON_FAST_PYCCALL) -#define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1) -#endif -#if CYTHON_USE_PYLONG_INTERNALS - #include "longintrepr.h" - #undef SHIFT - #undef BASE - #undef MASK - #ifdef SIZEOF_VOID_P - enum { __pyx_check_sizeof_voidp = 1 / (int)(SIZEOF_VOID_P == sizeof(void*)) }; - #endif -#endif -#ifndef __has_attribute - #define __has_attribute(x) 0 -#endif -#ifndef __has_cpp_attribute - #define __has_cpp_attribute(x) 0 -#endif -#ifndef CYTHON_RESTRICT - #if defined(__GNUC__) - #define CYTHON_RESTRICT __restrict__ - #elif defined(_MSC_VER) && _MSC_VER >= 1400 - #define CYTHON_RESTRICT __restrict - #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L - #define CYTHON_RESTRICT restrict - #else - #define CYTHON_RESTRICT - #endif -#endif -#ifndef CYTHON_UNUSED -# if defined(__GNUC__) -# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) -# define CYTHON_UNUSED __attribute__ ((__unused__)) -# else -# define CYTHON_UNUSED -# endif -# elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER)) -# define CYTHON_UNUSED __attribute__ ((__unused__)) -# else -# define CYTHON_UNUSED -# endif -#endif -#ifndef CYTHON_MAYBE_UNUSED_VAR -# if defined(__cplusplus) - template void CYTHON_MAYBE_UNUSED_VAR( const T& ) { } -# else -# define CYTHON_MAYBE_UNUSED_VAR(x) (void)(x) -# endif -#endif -#ifndef CYTHON_NCP_UNUSED -# if CYTHON_COMPILING_IN_CPYTHON -# define CYTHON_NCP_UNUSED -# else -# define CYTHON_NCP_UNUSED CYTHON_UNUSED -# endif -#endif -#define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None) -#ifdef _MSC_VER - #ifndef _MSC_STDINT_H_ - #if _MSC_VER < 1300 - typedef unsigned char uint8_t; - typedef unsigned int uint32_t; - #else - typedef unsigned __int8 uint8_t; - typedef unsigned __int32 uint32_t; - #endif - #endif -#else - #include -#endif -#ifndef CYTHON_FALLTHROUGH - #if defined(__cplusplus) && __cplusplus >= 201103L - #if __has_cpp_attribute(fallthrough) - #define CYTHON_FALLTHROUGH [[fallthrough]] - #elif __has_cpp_attribute(clang::fallthrough) - #define CYTHON_FALLTHROUGH [[clang::fallthrough]] - #elif __has_cpp_attribute(gnu::fallthrough) - #define CYTHON_FALLTHROUGH [[gnu::fallthrough]] - #endif - #endif - #ifndef CYTHON_FALLTHROUGH - #if __has_attribute(fallthrough) - #define CYTHON_FALLTHROUGH __attribute__((fallthrough)) - #else - #define CYTHON_FALLTHROUGH - #endif - #endif - #if defined(__clang__ ) && defined(__apple_build_version__) - #if __apple_build_version__ < 7000000 - #undef CYTHON_FALLTHROUGH - #define CYTHON_FALLTHROUGH - #endif - #endif -#endif - -#ifndef CYTHON_INLINE - #if defined(__clang__) - #define CYTHON_INLINE __inline__ __attribute__ ((__unused__)) - #elif defined(__GNUC__) - #define CYTHON_INLINE __inline__ - #elif defined(_MSC_VER) - #define CYTHON_INLINE __inline - #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L - #define CYTHON_INLINE inline - #else - #define CYTHON_INLINE - #endif -#endif - -#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x02070600 && !defined(Py_OptimizeFlag) - #define Py_OptimizeFlag 0 -#endif -#define __PYX_BUILD_PY_SSIZE_T "n" -#define CYTHON_FORMAT_SSIZE_T "z" -#if PY_MAJOR_VERSION < 3 - #define __Pyx_BUILTIN_MODULE_NAME "__builtin__" - #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ - PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) - #define __Pyx_DefaultClassType PyClass_Type -#else - #define __Pyx_BUILTIN_MODULE_NAME "builtins" -#if PY_VERSION_HEX >= 0x030800A4 && PY_VERSION_HEX < 0x030800B2 - #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ - PyCode_New(a, 0, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) -#else - #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ - PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) -#endif - #define __Pyx_DefaultClassType PyType_Type -#endif -#ifndef Py_TPFLAGS_CHECKTYPES - #define Py_TPFLAGS_CHECKTYPES 0 -#endif -#ifndef Py_TPFLAGS_HAVE_INDEX - #define Py_TPFLAGS_HAVE_INDEX 0 -#endif -#ifndef Py_TPFLAGS_HAVE_NEWBUFFER - #define Py_TPFLAGS_HAVE_NEWBUFFER 0 -#endif -#ifndef Py_TPFLAGS_HAVE_FINALIZE - #define Py_TPFLAGS_HAVE_FINALIZE 0 -#endif -#ifndef METH_STACKLESS - #define METH_STACKLESS 0 -#endif -#if PY_VERSION_HEX <= 0x030700A3 || !defined(METH_FASTCALL) - #ifndef METH_FASTCALL - #define METH_FASTCALL 0x80 - #endif - typedef PyObject *(*__Pyx_PyCFunctionFast) (PyObject *self, PyObject *const *args, Py_ssize_t nargs); - typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject *const *args, - Py_ssize_t nargs, PyObject *kwnames); -#else - #define __Pyx_PyCFunctionFast _PyCFunctionFast - #define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords -#endif -#if CYTHON_FAST_PYCCALL -#define __Pyx_PyFastCFunction_Check(func)\ - ((PyCFunction_Check(func) && (METH_FASTCALL == (PyCFunction_GET_FLAGS(func) & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS | METH_STACKLESS))))) -#else -#define __Pyx_PyFastCFunction_Check(func) 0 -#endif -#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc) - #define PyObject_Malloc(s) PyMem_Malloc(s) - #define PyObject_Free(p) PyMem_Free(p) - #define PyObject_Realloc(p) PyMem_Realloc(p) -#endif -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030400A1 - #define PyMem_RawMalloc(n) PyMem_Malloc(n) - #define PyMem_RawRealloc(p, n) PyMem_Realloc(p, n) - #define PyMem_RawFree(p) PyMem_Free(p) -#endif -#if CYTHON_COMPILING_IN_PYSTON - #define __Pyx_PyCode_HasFreeVars(co) PyCode_HasFreeVars(co) - #define __Pyx_PyFrame_SetLineNumber(frame, lineno) PyFrame_SetLineNumber(frame, lineno) -#else - #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) - #define __Pyx_PyFrame_SetLineNumber(frame, lineno) (frame)->f_lineno = (lineno) -#endif -#if !CYTHON_FAST_THREAD_STATE || PY_VERSION_HEX < 0x02070000 - #define __Pyx_PyThreadState_Current PyThreadState_GET() -#elif PY_VERSION_HEX >= 0x03060000 - #define __Pyx_PyThreadState_Current _PyThreadState_UncheckedGet() -#elif PY_VERSION_HEX >= 0x03000000 - #define __Pyx_PyThreadState_Current PyThreadState_GET() -#else - #define __Pyx_PyThreadState_Current _PyThreadState_Current -#endif -#if PY_VERSION_HEX < 0x030700A2 && !defined(PyThread_tss_create) && !defined(Py_tss_NEEDS_INIT) -#include "pythread.h" -#define Py_tss_NEEDS_INIT 0 -typedef int Py_tss_t; -static CYTHON_INLINE int PyThread_tss_create(Py_tss_t *key) { - *key = PyThread_create_key(); - return 0; -} -static CYTHON_INLINE Py_tss_t * PyThread_tss_alloc(void) { - Py_tss_t *key = (Py_tss_t *)PyObject_Malloc(sizeof(Py_tss_t)); - *key = Py_tss_NEEDS_INIT; - return key; -} -static CYTHON_INLINE void PyThread_tss_free(Py_tss_t *key) { - PyObject_Free(key); -} -static CYTHON_INLINE int PyThread_tss_is_created(Py_tss_t *key) { - return *key != Py_tss_NEEDS_INIT; -} -static CYTHON_INLINE void PyThread_tss_delete(Py_tss_t *key) { - PyThread_delete_key(*key); - *key = Py_tss_NEEDS_INIT; -} -static CYTHON_INLINE int PyThread_tss_set(Py_tss_t *key, void *value) { - return PyThread_set_key_value(*key, value); -} -static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) { - return PyThread_get_key_value(*key); -} -#endif -#if CYTHON_COMPILING_IN_CPYTHON || defined(_PyDict_NewPresized) -#define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n)) -#else -#define __Pyx_PyDict_NewPresized(n) PyDict_New() -#endif -#if PY_MAJOR_VERSION >= 3 || CYTHON_FUTURE_DIVISION - #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y) - #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y) -#else - #define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y) - #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y) -#endif -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 && CYTHON_USE_UNICODE_INTERNALS -#define __Pyx_PyDict_GetItemStr(dict, name) _PyDict_GetItem_KnownHash(dict, name, ((PyASCIIObject *) name)->hash) -#else -#define __Pyx_PyDict_GetItemStr(dict, name) PyDict_GetItem(dict, name) -#endif -#if PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND) - #define CYTHON_PEP393_ENABLED 1 - #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\ - 0 : _PyUnicode_Ready((PyObject *)(op))) - #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u) - #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i) - #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u) - #define __Pyx_PyUnicode_KIND(u) PyUnicode_KIND(u) - #define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u) - #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i) - #define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, ch) - #if defined(PyUnicode_IS_READY) && defined(PyUnicode_GET_SIZE) - #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u))) - #else - #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_LENGTH(u)) - #endif -#else - #define CYTHON_PEP393_ENABLED 0 - #define PyUnicode_1BYTE_KIND 1 - #define PyUnicode_2BYTE_KIND 2 - #define PyUnicode_4BYTE_KIND 4 - #define __Pyx_PyUnicode_READY(op) (0) - #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u) - #define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i])) - #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((sizeof(Py_UNICODE) == 2) ? 65535 : 1114111) - #define __Pyx_PyUnicode_KIND(u) (sizeof(Py_UNICODE)) - #define __Pyx_PyUnicode_DATA(u) ((void*)PyUnicode_AS_UNICODE(u)) - #define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i])) - #define __Pyx_PyUnicode_WRITE(k, d, i, ch) (((void)(k)), ((Py_UNICODE*)d)[i] = ch) - #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_SIZE(u)) -#endif -#if CYTHON_COMPILING_IN_PYPY - #define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b) - #define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b) -#else - #define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b) - #define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ?\ - PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b)) -#endif -#if CYTHON_COMPILING_IN_PYPY && !defined(PyUnicode_Contains) - #define PyUnicode_Contains(u, s) PySequence_Contains(u, s) -#endif -#if CYTHON_COMPILING_IN_PYPY && !defined(PyByteArray_Check) - #define PyByteArray_Check(obj) PyObject_TypeCheck(obj, &PyByteArray_Type) -#endif -#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Format) - #define PyObject_Format(obj, fmt) PyObject_CallMethod(obj, "__format__", "O", fmt) -#endif -#define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyString_Check(b) && !PyString_CheckExact(b)))) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b)) -#define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyUnicode_Check(b) && !PyUnicode_CheckExact(b)))) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b)) -#if PY_MAJOR_VERSION >= 3 - #define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b) -#else - #define __Pyx_PyString_Format(a, b) PyString_Format(a, b) -#endif -#if PY_MAJOR_VERSION < 3 && !defined(PyObject_ASCII) - #define PyObject_ASCII(o) PyObject_Repr(o) -#endif -#if PY_MAJOR_VERSION >= 3 - #define PyBaseString_Type PyUnicode_Type - #define PyStringObject PyUnicodeObject - #define PyString_Type PyUnicode_Type - #define PyString_Check PyUnicode_Check - #define PyString_CheckExact PyUnicode_CheckExact -#ifndef PyObject_Unicode - #define PyObject_Unicode PyObject_Str -#endif -#endif -#if PY_MAJOR_VERSION >= 3 - #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj) - #define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj) -#else - #define __Pyx_PyBaseString_Check(obj) (PyString_Check(obj) || PyUnicode_Check(obj)) - #define __Pyx_PyBaseString_CheckExact(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj)) -#endif -#ifndef PySet_CheckExact - #define PySet_CheckExact(obj) (Py_TYPE(obj) == &PySet_Type) -#endif -#if PY_VERSION_HEX >= 0x030900A4 - #define __Pyx_SET_REFCNT(obj, refcnt) Py_SET_REFCNT(obj, refcnt) - #define __Pyx_SET_SIZE(obj, size) Py_SET_SIZE(obj, size) -#else - #define __Pyx_SET_REFCNT(obj, refcnt) Py_REFCNT(obj) = (refcnt) - #define __Pyx_SET_SIZE(obj, size) Py_SIZE(obj) = (size) -#endif -#if CYTHON_ASSUME_SAFE_MACROS - #define __Pyx_PySequence_SIZE(seq) Py_SIZE(seq) -#else - #define __Pyx_PySequence_SIZE(seq) PySequence_Size(seq) -#endif -#if PY_MAJOR_VERSION >= 3 - #define PyIntObject PyLongObject - #define PyInt_Type PyLong_Type - #define PyInt_Check(op) PyLong_Check(op) - #define PyInt_CheckExact(op) PyLong_CheckExact(op) - #define PyInt_FromString PyLong_FromString - #define PyInt_FromUnicode PyLong_FromUnicode - #define PyInt_FromLong PyLong_FromLong - #define PyInt_FromSize_t PyLong_FromSize_t - #define PyInt_FromSsize_t PyLong_FromSsize_t - #define PyInt_AsLong PyLong_AsLong - #define PyInt_AS_LONG PyLong_AS_LONG - #define PyInt_AsSsize_t PyLong_AsSsize_t - #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask - #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask - #define PyNumber_Int PyNumber_Long -#endif -#if PY_MAJOR_VERSION >= 3 - #define PyBoolObject PyLongObject -#endif -#if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_PYPY - #ifndef PyUnicode_InternFromString - #define PyUnicode_InternFromString(s) PyUnicode_FromString(s) - #endif -#endif -#if PY_VERSION_HEX < 0x030200A4 - typedef long Py_hash_t; - #define __Pyx_PyInt_FromHash_t PyInt_FromLong - #define __Pyx_PyInt_AsHash_t PyInt_AsLong -#else - #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t - #define __Pyx_PyInt_AsHash_t PyInt_AsSsize_t -#endif -#if PY_MAJOR_VERSION >= 3 - #define __Pyx_PyMethod_New(func, self, klass) ((self) ? ((void)(klass), PyMethod_New(func, self)) : __Pyx_NewRef(func)) -#else - #define __Pyx_PyMethod_New(func, self, klass) PyMethod_New(func, self, klass) -#endif -#if CYTHON_USE_ASYNC_SLOTS - #if PY_VERSION_HEX >= 0x030500B1 - #define __Pyx_PyAsyncMethodsStruct PyAsyncMethods - #define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async) - #else - #define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved)) - #endif -#else - #define __Pyx_PyType_AsAsync(obj) NULL -#endif -#ifndef __Pyx_PyAsyncMethodsStruct - typedef struct { - unaryfunc am_await; - unaryfunc am_aiter; - unaryfunc am_anext; - } __Pyx_PyAsyncMethodsStruct; -#endif - -#if defined(WIN32) || defined(MS_WINDOWS) - #define _USE_MATH_DEFINES -#endif -#include -#ifdef NAN -#define __PYX_NAN() ((float) NAN) -#else -static CYTHON_INLINE float __PYX_NAN() { - float value; - memset(&value, 0xFF, sizeof(value)); - return value; -} -#endif -#if defined(__CYGWIN__) && defined(_LDBL_EQ_DBL) -#define __Pyx_truncl trunc -#else -#define __Pyx_truncl truncl -#endif - -#define __PYX_MARK_ERR_POS(f_index, lineno) \ - { __pyx_filename = __pyx_f[f_index]; (void)__pyx_filename; __pyx_lineno = lineno; (void)__pyx_lineno; __pyx_clineno = __LINE__; (void)__pyx_clineno; } -#define __PYX_ERR(f_index, lineno, Ln_error) \ - { __PYX_MARK_ERR_POS(f_index, lineno) goto Ln_error; } - -#ifndef __PYX_EXTERN_C - #ifdef __cplusplus - #define __PYX_EXTERN_C extern "C" - #else - #define __PYX_EXTERN_C extern - #endif -#endif - -#define __PYX_HAVE__aiohttp___websocket -#define __PYX_HAVE_API__aiohttp___websocket -/* Early includes */ -#include -#include -#include "pythread.h" -#include -#ifdef _OPENMP -#include -#endif /* _OPENMP */ - -#if defined(PYREX_WITHOUT_ASSERTIONS) && !defined(CYTHON_WITHOUT_ASSERTIONS) -#define CYTHON_WITHOUT_ASSERTIONS -#endif - -typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* encoding; - const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; - -#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0 -#define __PYX_DEFAULT_STRING_ENCODING_IS_UTF8 0 -#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT (PY_MAJOR_VERSION >= 3 && __PYX_DEFAULT_STRING_ENCODING_IS_UTF8) -#define __PYX_DEFAULT_STRING_ENCODING "" -#define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString -#define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize -#define __Pyx_uchar_cast(c) ((unsigned char)c) -#define __Pyx_long_cast(x) ((long)x) -#define __Pyx_fits_Py_ssize_t(v, type, is_signed) (\ - (sizeof(type) < sizeof(Py_ssize_t)) ||\ - (sizeof(type) > sizeof(Py_ssize_t) &&\ - likely(v < (type)PY_SSIZE_T_MAX ||\ - v == (type)PY_SSIZE_T_MAX) &&\ - (!is_signed || likely(v > (type)PY_SSIZE_T_MIN ||\ - v == (type)PY_SSIZE_T_MIN))) ||\ - (sizeof(type) == sizeof(Py_ssize_t) &&\ - (is_signed || likely(v < (type)PY_SSIZE_T_MAX ||\ - v == (type)PY_SSIZE_T_MAX))) ) -static CYTHON_INLINE int __Pyx_is_valid_index(Py_ssize_t i, Py_ssize_t limit) { - return (size_t) i < (size_t) limit; -} -#if defined (__cplusplus) && __cplusplus >= 201103L - #include - #define __Pyx_sst_abs(value) std::abs(value) -#elif SIZEOF_INT >= SIZEOF_SIZE_T - #define __Pyx_sst_abs(value) abs(value) -#elif SIZEOF_LONG >= SIZEOF_SIZE_T - #define __Pyx_sst_abs(value) labs(value) -#elif defined (_MSC_VER) - #define __Pyx_sst_abs(value) ((Py_ssize_t)_abs64(value)) -#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L - #define __Pyx_sst_abs(value) llabs(value) -#elif defined (__GNUC__) - #define __Pyx_sst_abs(value) __builtin_llabs(value) -#else - #define __Pyx_sst_abs(value) ((value<0) ? -value : value) -#endif -static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject*); -static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length); -#define __Pyx_PyByteArray_FromString(s) PyByteArray_FromStringAndSize((const char*)s, strlen((const char*)s)) -#define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l) -#define __Pyx_PyBytes_FromString PyBytes_FromString -#define __Pyx_PyBytes_FromStringAndSize PyBytes_FromStringAndSize -static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*); -#if PY_MAJOR_VERSION < 3 - #define __Pyx_PyStr_FromString __Pyx_PyBytes_FromString - #define __Pyx_PyStr_FromStringAndSize __Pyx_PyBytes_FromStringAndSize -#else - #define __Pyx_PyStr_FromString __Pyx_PyUnicode_FromString - #define __Pyx_PyStr_FromStringAndSize __Pyx_PyUnicode_FromStringAndSize -#endif -#define __Pyx_PyBytes_AsWritableString(s) ((char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsWritableSString(s) ((signed char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsWritableUString(s) ((unsigned char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsString(s) ((const char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsSString(s) ((const signed char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsUString(s) ((const unsigned char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyObject_AsWritableString(s) ((char*) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_AsWritableSString(s) ((signed char*) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_AsWritableUString(s) ((unsigned char*) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_AsSString(s) ((const signed char*) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_AsUString(s) ((const unsigned char*) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_FromCString(s) __Pyx_PyObject_FromString((const char*)s) -#define __Pyx_PyBytes_FromCString(s) __Pyx_PyBytes_FromString((const char*)s) -#define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s) -#define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s) -#define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s) -static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u) { - const Py_UNICODE *u_end = u; - while (*u_end++) ; - return (size_t)(u_end - u - 1); -} -#define __Pyx_PyUnicode_FromUnicode(u) PyUnicode_FromUnicode(u, __Pyx_Py_UNICODE_strlen(u)) -#define __Pyx_PyUnicode_FromUnicodeAndLength PyUnicode_FromUnicode -#define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode -#define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) -#define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) -static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b); -static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); -static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject*); -static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x); -#define __Pyx_PySequence_Tuple(obj)\ - (likely(PyTuple_CheckExact(obj)) ? __Pyx_NewRef(obj) : PySequence_Tuple(obj)) -static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*); -static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t); -#if CYTHON_ASSUME_SAFE_MACROS -#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x)) -#else -#define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x) -#endif -#define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x)) -#if PY_MAJOR_VERSION >= 3 -#define __Pyx_PyNumber_Int(x) (PyLong_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Long(x)) -#else -#define __Pyx_PyNumber_Int(x) (PyInt_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Int(x)) -#endif -#define __Pyx_PyNumber_Float(x) (PyFloat_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Float(x)) -#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII -static int __Pyx_sys_getdefaultencoding_not_ascii; -static int __Pyx_init_sys_getdefaultencoding_params(void) { - PyObject* sys; - PyObject* default_encoding = NULL; - PyObject* ascii_chars_u = NULL; - PyObject* ascii_chars_b = NULL; - const char* default_encoding_c; - sys = PyImport_ImportModule("sys"); - if (!sys) goto bad; - default_encoding = PyObject_CallMethod(sys, (char*) "getdefaultencoding", NULL); - Py_DECREF(sys); - if (!default_encoding) goto bad; - default_encoding_c = PyBytes_AsString(default_encoding); - if (!default_encoding_c) goto bad; - if (strcmp(default_encoding_c, "ascii") == 0) { - __Pyx_sys_getdefaultencoding_not_ascii = 0; - } else { - char ascii_chars[128]; - int c; - for (c = 0; c < 128; c++) { - ascii_chars[c] = c; - } - __Pyx_sys_getdefaultencoding_not_ascii = 1; - ascii_chars_u = PyUnicode_DecodeASCII(ascii_chars, 128, NULL); - if (!ascii_chars_u) goto bad; - ascii_chars_b = PyUnicode_AsEncodedString(ascii_chars_u, default_encoding_c, NULL); - if (!ascii_chars_b || !PyBytes_Check(ascii_chars_b) || memcmp(ascii_chars, PyBytes_AS_STRING(ascii_chars_b), 128) != 0) { - PyErr_Format( - PyExc_ValueError, - "This module compiled with c_string_encoding=ascii, but default encoding '%.200s' is not a superset of ascii.", - default_encoding_c); - goto bad; - } - Py_DECREF(ascii_chars_u); - Py_DECREF(ascii_chars_b); - } - Py_DECREF(default_encoding); - return 0; -bad: - Py_XDECREF(default_encoding); - Py_XDECREF(ascii_chars_u); - Py_XDECREF(ascii_chars_b); - return -1; -} -#endif -#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT && PY_MAJOR_VERSION >= 3 -#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeUTF8(c_str, size, NULL) -#else -#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_Decode(c_str, size, __PYX_DEFAULT_STRING_ENCODING, NULL) -#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT -static char* __PYX_DEFAULT_STRING_ENCODING; -static int __Pyx_init_sys_getdefaultencoding_params(void) { - PyObject* sys; - PyObject* default_encoding = NULL; - char* default_encoding_c; - sys = PyImport_ImportModule("sys"); - if (!sys) goto bad; - default_encoding = PyObject_CallMethod(sys, (char*) (const char*) "getdefaultencoding", NULL); - Py_DECREF(sys); - if (!default_encoding) goto bad; - default_encoding_c = PyBytes_AsString(default_encoding); - if (!default_encoding_c) goto bad; - __PYX_DEFAULT_STRING_ENCODING = (char*) malloc(strlen(default_encoding_c) + 1); - if (!__PYX_DEFAULT_STRING_ENCODING) goto bad; - strcpy(__PYX_DEFAULT_STRING_ENCODING, default_encoding_c); - Py_DECREF(default_encoding); - return 0; -bad: - Py_XDECREF(default_encoding); - return -1; -} -#endif -#endif - - -/* Test for GCC > 2.95 */ -#if defined(__GNUC__) && (__GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95))) - #define likely(x) __builtin_expect(!!(x), 1) - #define unlikely(x) __builtin_expect(!!(x), 0) -#else /* !__GNUC__ or GCC < 2.95 */ - #define likely(x) (x) - #define unlikely(x) (x) -#endif /* __GNUC__ */ -static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; } - -static PyObject *__pyx_m = NULL; -static PyObject *__pyx_d; -static PyObject *__pyx_b; -static PyObject *__pyx_cython_runtime = NULL; -static PyObject *__pyx_empty_tuple; -static PyObject *__pyx_empty_bytes; -static PyObject *__pyx_empty_unicode; -static int __pyx_lineno; -static int __pyx_clineno = 0; -static const char * __pyx_cfilenm= __FILE__; -static const char *__pyx_filename; - - -static const char *__pyx_f[] = { - "aiohttp/_websocket.pyx", - "type.pxd", - "bool.pxd", - "complex.pxd", -}; - -/*--- Type declarations ---*/ - -/* --- Runtime support code (head) --- */ -/* Refnanny.proto */ -#ifndef CYTHON_REFNANNY - #define CYTHON_REFNANNY 0 -#endif -#if CYTHON_REFNANNY - typedef struct { - void (*INCREF)(void*, PyObject*, int); - void (*DECREF)(void*, PyObject*, int); - void (*GOTREF)(void*, PyObject*, int); - void (*GIVEREF)(void*, PyObject*, int); - void* (*SetupContext)(const char*, int, const char*); - void (*FinishContext)(void**); - } __Pyx_RefNannyAPIStruct; - static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL; - static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname); - #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL; -#ifdef WITH_THREAD - #define __Pyx_RefNannySetupContext(name, acquire_gil)\ - if (acquire_gil) {\ - PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ - __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ - PyGILState_Release(__pyx_gilstate_save);\ - } else {\ - __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ - } -#else - #define __Pyx_RefNannySetupContext(name, acquire_gil)\ - __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__) -#endif - #define __Pyx_RefNannyFinishContext()\ - __Pyx_RefNanny->FinishContext(&__pyx_refnanny) - #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), __LINE__) - #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), __LINE__) - #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), __LINE__) - #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), __LINE__) - #define __Pyx_XINCREF(r) do { if((r) != NULL) {__Pyx_INCREF(r); }} while(0) - #define __Pyx_XDECREF(r) do { if((r) != NULL) {__Pyx_DECREF(r); }} while(0) - #define __Pyx_XGOTREF(r) do { if((r) != NULL) {__Pyx_GOTREF(r); }} while(0) - #define __Pyx_XGIVEREF(r) do { if((r) != NULL) {__Pyx_GIVEREF(r);}} while(0) -#else - #define __Pyx_RefNannyDeclarations - #define __Pyx_RefNannySetupContext(name, acquire_gil) - #define __Pyx_RefNannyFinishContext() - #define __Pyx_INCREF(r) Py_INCREF(r) - #define __Pyx_DECREF(r) Py_DECREF(r) - #define __Pyx_GOTREF(r) - #define __Pyx_GIVEREF(r) - #define __Pyx_XINCREF(r) Py_XINCREF(r) - #define __Pyx_XDECREF(r) Py_XDECREF(r) - #define __Pyx_XGOTREF(r) - #define __Pyx_XGIVEREF(r) -#endif -#define __Pyx_XDECREF_SET(r, v) do {\ - PyObject *tmp = (PyObject *) r;\ - r = v; __Pyx_XDECREF(tmp);\ - } while (0) -#define __Pyx_DECREF_SET(r, v) do {\ - PyObject *tmp = (PyObject *) r;\ - r = v; __Pyx_DECREF(tmp);\ - } while (0) -#define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0) -#define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0) - -/* PyObjectGetAttrStr.proto */ -#if CYTHON_USE_TYPE_SLOTS -static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name); -#else -#define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n) -#endif - -/* GetBuiltinName.proto */ -static PyObject *__Pyx_GetBuiltinName(PyObject *name); - -/* RaiseArgTupleInvalid.proto */ -static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact, - Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found); - -/* RaiseDoubleKeywords.proto */ -static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name); - -/* ParseKeywords.proto */ -static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject **argnames[],\ - PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args,\ - const char* function_name); - -/* PyCFunctionFastCall.proto */ -#if CYTHON_FAST_PYCCALL -static CYTHON_INLINE PyObject *__Pyx_PyCFunction_FastCall(PyObject *func, PyObject **args, Py_ssize_t nargs); -#else -#define __Pyx_PyCFunction_FastCall(func, args, nargs) (assert(0), NULL) -#endif - -/* PyFunctionFastCall.proto */ -#if CYTHON_FAST_PYCALL -#define __Pyx_PyFunction_FastCall(func, args, nargs)\ - __Pyx_PyFunction_FastCallDict((func), (args), (nargs), NULL) -#if 1 || PY_VERSION_HEX < 0x030600B1 -static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs); -#else -#define __Pyx_PyFunction_FastCallDict(func, args, nargs, kwargs) _PyFunction_FastCallDict(func, args, nargs, kwargs) -#endif -#define __Pyx_BUILD_ASSERT_EXPR(cond)\ - (sizeof(char [1 - 2*!(cond)]) - 1) -#ifndef Py_MEMBER_SIZE -#define Py_MEMBER_SIZE(type, member) sizeof(((type *)0)->member) -#endif - static size_t __pyx_pyframe_localsplus_offset = 0; - #include "frameobject.h" - #define __Pxy_PyFrame_Initialize_Offsets()\ - ((void)__Pyx_BUILD_ASSERT_EXPR(sizeof(PyFrameObject) == offsetof(PyFrameObject, f_localsplus) + Py_MEMBER_SIZE(PyFrameObject, f_localsplus)),\ - (void)(__pyx_pyframe_localsplus_offset = ((size_t)PyFrame_Type.tp_basicsize) - Py_MEMBER_SIZE(PyFrameObject, f_localsplus))) - #define __Pyx_PyFrame_GetLocalsplus(frame)\ - (assert(__pyx_pyframe_localsplus_offset), (PyObject **)(((char *)(frame)) + __pyx_pyframe_localsplus_offset)) -#endif - -/* PyObjectCall.proto */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw); -#else -#define __Pyx_PyObject_Call(func, arg, kw) PyObject_Call(func, arg, kw) -#endif - -/* PyObjectCallMethO.proto */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg); -#endif - -/* PyObjectCallOneArg.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg); - -/* TypeImport.proto */ -#ifndef __PYX_HAVE_RT_ImportType_proto -#define __PYX_HAVE_RT_ImportType_proto -enum __Pyx_ImportType_CheckSize { - __Pyx_ImportType_CheckSize_Error = 0, - __Pyx_ImportType_CheckSize_Warn = 1, - __Pyx_ImportType_CheckSize_Ignore = 2 -}; -static PyTypeObject *__Pyx_ImportType(PyObject* module, const char *module_name, const char *class_name, size_t size, enum __Pyx_ImportType_CheckSize check_size); -#endif - -/* PyDictVersioning.proto */ -#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS -#define __PYX_DICT_VERSION_INIT ((PY_UINT64_T) -1) -#define __PYX_GET_DICT_VERSION(dict) (((PyDictObject*)(dict))->ma_version_tag) -#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)\ - (version_var) = __PYX_GET_DICT_VERSION(dict);\ - (cache_var) = (value); -#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) {\ - static PY_UINT64_T __pyx_dict_version = 0;\ - static PyObject *__pyx_dict_cached_value = NULL;\ - if (likely(__PYX_GET_DICT_VERSION(DICT) == __pyx_dict_version)) {\ - (VAR) = __pyx_dict_cached_value;\ - } else {\ - (VAR) = __pyx_dict_cached_value = (LOOKUP);\ - __pyx_dict_version = __PYX_GET_DICT_VERSION(DICT);\ - }\ -} -static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj); -static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj); -static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version); -#else -#define __PYX_GET_DICT_VERSION(dict) (0) -#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var) -#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) (VAR) = (LOOKUP); -#endif - -/* PyThreadStateGet.proto */ -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_PyThreadState_declare PyThreadState *__pyx_tstate; -#define __Pyx_PyThreadState_assign __pyx_tstate = __Pyx_PyThreadState_Current; -#define __Pyx_PyErr_Occurred() __pyx_tstate->curexc_type -#else -#define __Pyx_PyThreadState_declare -#define __Pyx_PyThreadState_assign -#define __Pyx_PyErr_Occurred() PyErr_Occurred() -#endif - -/* PyErrFetchRestore.proto */ -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_PyErr_Clear() __Pyx_ErrRestore(NULL, NULL, NULL) -#define __Pyx_ErrRestoreWithState(type, value, tb) __Pyx_ErrRestoreInState(PyThreadState_GET(), type, value, tb) -#define __Pyx_ErrFetchWithState(type, value, tb) __Pyx_ErrFetchInState(PyThreadState_GET(), type, value, tb) -#define __Pyx_ErrRestore(type, value, tb) __Pyx_ErrRestoreInState(__pyx_tstate, type, value, tb) -#define __Pyx_ErrFetch(type, value, tb) __Pyx_ErrFetchInState(__pyx_tstate, type, value, tb) -static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); -static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); -#if CYTHON_COMPILING_IN_CPYTHON -#define __Pyx_PyErr_SetNone(exc) (Py_INCREF(exc), __Pyx_ErrRestore((exc), NULL, NULL)) -#else -#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) -#endif -#else -#define __Pyx_PyErr_Clear() PyErr_Clear() -#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) -#define __Pyx_ErrRestoreWithState(type, value, tb) PyErr_Restore(type, value, tb) -#define __Pyx_ErrFetchWithState(type, value, tb) PyErr_Fetch(type, value, tb) -#define __Pyx_ErrRestoreInState(tstate, type, value, tb) PyErr_Restore(type, value, tb) -#define __Pyx_ErrFetchInState(tstate, type, value, tb) PyErr_Fetch(type, value, tb) -#define __Pyx_ErrRestore(type, value, tb) PyErr_Restore(type, value, tb) -#define __Pyx_ErrFetch(type, value, tb) PyErr_Fetch(type, value, tb) -#endif - -/* CLineInTraceback.proto */ -#ifdef CYTHON_CLINE_IN_TRACEBACK -#define __Pyx_CLineForTraceback(tstate, c_line) (((CYTHON_CLINE_IN_TRACEBACK)) ? c_line : 0) -#else -static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line); -#endif - -/* CodeObjectCache.proto */ -typedef struct { - PyCodeObject* code_object; - int code_line; -} __Pyx_CodeObjectCacheEntry; -struct __Pyx_CodeObjectCache { - int count; - int max_count; - __Pyx_CodeObjectCacheEntry* entries; -}; -static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL}; -static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line); -static PyCodeObject *__pyx_find_code_object(int code_line); -static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object); - -/* AddTraceback.proto */ -static void __Pyx_AddTraceback(const char *funcname, int c_line, - int py_line, const char *filename); - -/* CIntToPy.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value); - -/* CIntFromPy.proto */ -static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *); - -/* CIntFromPy.proto */ -static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *); - -/* FastTypeChecks.proto */ -#if CYTHON_COMPILING_IN_CPYTHON -#define __Pyx_TypeCheck(obj, type) __Pyx_IsSubtype(Py_TYPE(obj), (PyTypeObject *)type) -static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b); -static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject *type); -static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *type1, PyObject *type2); -#else -#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type) -#define __Pyx_PyErr_GivenExceptionMatches(err, type) PyErr_GivenExceptionMatches(err, type) -#define __Pyx_PyErr_GivenExceptionMatches2(err, type1, type2) (PyErr_GivenExceptionMatches(err, type1) || PyErr_GivenExceptionMatches(err, type2)) -#endif -#define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception) - -/* CheckBinaryVersion.proto */ -static int __Pyx_check_binary_version(void); - -/* InitStrings.proto */ -static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); - - -/* Module declarations from 'cpython.version' */ - -/* Module declarations from '__builtin__' */ - -/* Module declarations from 'cpython.type' */ -static PyTypeObject *__pyx_ptype_7cpython_4type_type = 0; - -/* Module declarations from 'libc.string' */ - -/* Module declarations from 'libc.stdio' */ - -/* Module declarations from 'cpython.object' */ - -/* Module declarations from 'cpython.ref' */ - -/* Module declarations from 'cpython.exc' */ - -/* Module declarations from 'cpython.module' */ - -/* Module declarations from 'cpython.mem' */ - -/* Module declarations from 'cpython.tuple' */ - -/* Module declarations from 'cpython.list' */ - -/* Module declarations from 'cpython.sequence' */ - -/* Module declarations from 'cpython.mapping' */ - -/* Module declarations from 'cpython.iterator' */ - -/* Module declarations from 'cpython.number' */ - -/* Module declarations from 'cpython.int' */ - -/* Module declarations from '__builtin__' */ - -/* Module declarations from 'cpython.bool' */ -static PyTypeObject *__pyx_ptype_7cpython_4bool_bool = 0; - -/* Module declarations from 'cpython.long' */ - -/* Module declarations from 'cpython.float' */ - -/* Module declarations from '__builtin__' */ - -/* Module declarations from 'cpython.complex' */ -static PyTypeObject *__pyx_ptype_7cpython_7complex_complex = 0; - -/* Module declarations from 'cpython.string' */ - -/* Module declarations from 'cpython.unicode' */ - -/* Module declarations from 'cpython.dict' */ - -/* Module declarations from 'cpython.instance' */ - -/* Module declarations from 'cpython.function' */ - -/* Module declarations from 'cpython.method' */ - -/* Module declarations from 'cpython.weakref' */ - -/* Module declarations from 'cpython.getargs' */ - -/* Module declarations from 'cpython.pythread' */ - -/* Module declarations from 'cpython.pystate' */ - -/* Module declarations from 'cpython.cobject' */ - -/* Module declarations from 'cpython.oldbuffer' */ - -/* Module declarations from 'cpython.set' */ - -/* Module declarations from 'cpython.buffer' */ - -/* Module declarations from 'cpython.bytes' */ - -/* Module declarations from 'cpython.pycapsule' */ - -/* Module declarations from 'cpython' */ - -/* Module declarations from 'libc.stdint' */ - -/* Module declarations from 'aiohttp._websocket' */ -#define __Pyx_MODULE_NAME "aiohttp._websocket" -extern int __pyx_module_is_main_aiohttp___websocket; -int __pyx_module_is_main_aiohttp___websocket = 0; - -/* Implementation of 'aiohttp._websocket' */ -static PyObject *__pyx_builtin_range; -static const char __pyx_k_i[] = "i"; -static const char __pyx_k_data[] = "data"; -static const char __pyx_k_main[] = "__main__"; -static const char __pyx_k_mask[] = "mask"; -static const char __pyx_k_name[] = "__name__"; -static const char __pyx_k_test[] = "__test__"; -static const char __pyx_k_range[] = "range"; -static const char __pyx_k_in_buf[] = "in_buf"; -static const char __pyx_k_data_len[] = "data_len"; -static const char __pyx_k_mask_buf[] = "mask_buf"; -static const char __pyx_k_uint32_msk[] = "uint32_msk"; -static const char __pyx_k_uint64_msk[] = "uint64_msk"; -static const char __pyx_k_aiohttp__websocket[] = "aiohttp._websocket"; -static const char __pyx_k_cline_in_traceback[] = "cline_in_traceback"; -static const char __pyx_k_websocket_mask_cython[] = "_websocket_mask_cython"; -static const char __pyx_k_aiohttp__websocket_pyx[] = "aiohttp/_websocket.pyx"; -static PyObject *__pyx_n_s_aiohttp__websocket; -static PyObject *__pyx_kp_s_aiohttp__websocket_pyx; -static PyObject *__pyx_n_s_cline_in_traceback; -static PyObject *__pyx_n_s_data; -static PyObject *__pyx_n_s_data_len; -static PyObject *__pyx_n_s_i; -static PyObject *__pyx_n_s_in_buf; -static PyObject *__pyx_n_s_main; -static PyObject *__pyx_n_s_mask; -static PyObject *__pyx_n_s_mask_buf; -static PyObject *__pyx_n_s_name; -static PyObject *__pyx_n_s_range; -static PyObject *__pyx_n_s_test; -static PyObject *__pyx_n_s_uint32_msk; -static PyObject *__pyx_n_s_uint64_msk; -static PyObject *__pyx_n_s_websocket_mask_cython; -static PyObject *__pyx_pf_7aiohttp_10_websocket__websocket_mask_cython(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_mask, PyObject *__pyx_v_data); /* proto */ -static PyObject *__pyx_tuple_; -static PyObject *__pyx_codeobj__2; -/* Late includes */ - -/* "aiohttp/_websocket.pyx":11 - * - * - * def _websocket_mask_cython(object mask, object data): # <<<<<<<<<<<<<< - * """Note, this function mutates its `data` argument - * """ - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_7aiohttp_10_websocket_1_websocket_mask_cython(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ -static char __pyx_doc_7aiohttp_10_websocket__websocket_mask_cython[] = "Note, this function mutates its `data` argument\n "; -static PyMethodDef __pyx_mdef_7aiohttp_10_websocket_1_websocket_mask_cython = {"_websocket_mask_cython", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_7aiohttp_10_websocket_1_websocket_mask_cython, METH_VARARGS|METH_KEYWORDS, __pyx_doc_7aiohttp_10_websocket__websocket_mask_cython}; -static PyObject *__pyx_pw_7aiohttp_10_websocket_1_websocket_mask_cython(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { - PyObject *__pyx_v_mask = 0; - PyObject *__pyx_v_data = 0; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("_websocket_mask_cython (wrapper)", 0); - { - static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_mask,&__pyx_n_s_data,0}; - PyObject* values[2] = {0,0}; - if (unlikely(__pyx_kwds)) { - Py_ssize_t kw_args; - const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); - switch (pos_args) { - case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); - CYTHON_FALLTHROUGH; - case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = PyDict_Size(__pyx_kwds); - switch (pos_args) { - case 0: - if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_mask)) != 0)) kw_args--; - else goto __pyx_L5_argtuple_error; - CYTHON_FALLTHROUGH; - case 1: - if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_data)) != 0)) kw_args--; - else { - __Pyx_RaiseArgtupleInvalid("_websocket_mask_cython", 1, 2, 2, 1); __PYX_ERR(0, 11, __pyx_L3_error) - } - } - if (unlikely(kw_args > 0)) { - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "_websocket_mask_cython") < 0)) __PYX_ERR(0, 11, __pyx_L3_error) - } - } else if (PyTuple_GET_SIZE(__pyx_args) != 2) { - goto __pyx_L5_argtuple_error; - } else { - values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - values[1] = PyTuple_GET_ITEM(__pyx_args, 1); - } - __pyx_v_mask = values[0]; - __pyx_v_data = values[1]; - } - goto __pyx_L4_argument_unpacking_done; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("_websocket_mask_cython", 1, 2, 2, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 11, __pyx_L3_error) - __pyx_L3_error:; - __Pyx_AddTraceback("aiohttp._websocket._websocket_mask_cython", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_7aiohttp_10_websocket__websocket_mask_cython(__pyx_self, __pyx_v_mask, __pyx_v_data); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_7aiohttp_10_websocket__websocket_mask_cython(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_mask, PyObject *__pyx_v_data) { - Py_ssize_t __pyx_v_data_len; - Py_ssize_t __pyx_v_i; - unsigned char *__pyx_v_in_buf; - unsigned char const *__pyx_v_mask_buf; - uint32_t __pyx_v_uint32_msk; - uint64_t __pyx_v_uint64_msk; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - Py_ssize_t __pyx_t_1; - int __pyx_t_2; - int __pyx_t_3; - PyObject *__pyx_t_4 = NULL; - char *__pyx_t_5; - uint64_t *__pyx_t_6; - long __pyx_t_7; - uint32_t *__pyx_t_8; - Py_ssize_t __pyx_t_9; - Py_ssize_t __pyx_t_10; - Py_ssize_t __pyx_t_11; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("_websocket_mask_cython", 0); - __Pyx_INCREF(__pyx_v_mask); - __Pyx_INCREF(__pyx_v_data); - - /* "aiohttp/_websocket.pyx":22 - * uint64_t uint64_msk - * - * assert len(mask) == 4 # <<<<<<<<<<<<<< - * - * if not isinstance(mask, bytes): - */ - #ifndef CYTHON_WITHOUT_ASSERTIONS - if (unlikely(!Py_OptimizeFlag)) { - __pyx_t_1 = PyObject_Length(__pyx_v_mask); if (unlikely(__pyx_t_1 == ((Py_ssize_t)-1))) __PYX_ERR(0, 22, __pyx_L1_error) - if (unlikely(!((__pyx_t_1 == 4) != 0))) { - PyErr_SetNone(PyExc_AssertionError); - __PYX_ERR(0, 22, __pyx_L1_error) - } - } - #endif - - /* "aiohttp/_websocket.pyx":24 - * assert len(mask) == 4 - * - * if not isinstance(mask, bytes): # <<<<<<<<<<<<<< - * mask = bytes(mask) - * - */ - __pyx_t_2 = PyBytes_Check(__pyx_v_mask); - __pyx_t_3 = ((!(__pyx_t_2 != 0)) != 0); - if (__pyx_t_3) { - - /* "aiohttp/_websocket.pyx":25 - * - * if not isinstance(mask, bytes): - * mask = bytes(mask) # <<<<<<<<<<<<<< - * - * if isinstance(data, bytearray): - */ - __pyx_t_4 = __Pyx_PyObject_CallOneArg(((PyObject *)(&PyBytes_Type)), __pyx_v_mask); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 25, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF_SET(__pyx_v_mask, __pyx_t_4); - __pyx_t_4 = 0; - - /* "aiohttp/_websocket.pyx":24 - * assert len(mask) == 4 - * - * if not isinstance(mask, bytes): # <<<<<<<<<<<<<< - * mask = bytes(mask) - * - */ - } - - /* "aiohttp/_websocket.pyx":27 - * mask = bytes(mask) - * - * if isinstance(data, bytearray): # <<<<<<<<<<<<<< - * data = data - * else: - */ - __pyx_t_3 = PyByteArray_Check(__pyx_v_data); - __pyx_t_2 = (__pyx_t_3 != 0); - if (__pyx_t_2) { - - /* "aiohttp/_websocket.pyx":28 - * - * if isinstance(data, bytearray): - * data = data # <<<<<<<<<<<<<< - * else: - * data = bytearray(data) - */ - __pyx_t_4 = __pyx_v_data; - __Pyx_INCREF(__pyx_t_4); - __Pyx_DECREF_SET(__pyx_v_data, __pyx_t_4); - __pyx_t_4 = 0; - - /* "aiohttp/_websocket.pyx":27 - * mask = bytes(mask) - * - * if isinstance(data, bytearray): # <<<<<<<<<<<<<< - * data = data - * else: - */ - goto __pyx_L4; - } - - /* "aiohttp/_websocket.pyx":30 - * data = data - * else: - * data = bytearray(data) # <<<<<<<<<<<<<< - * - * data_len = len(data) - */ - /*else*/ { - __pyx_t_4 = __Pyx_PyObject_CallOneArg(((PyObject *)(&PyByteArray_Type)), __pyx_v_data); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 30, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF_SET(__pyx_v_data, __pyx_t_4); - __pyx_t_4 = 0; - } - __pyx_L4:; - - /* "aiohttp/_websocket.pyx":32 - * data = bytearray(data) - * - * data_len = len(data) # <<<<<<<<<<<<<< - * in_buf = PyByteArray_AsString(data) - * mask_buf = PyBytes_AsString(mask) - */ - __pyx_t_1 = PyObject_Length(__pyx_v_data); if (unlikely(__pyx_t_1 == ((Py_ssize_t)-1))) __PYX_ERR(0, 32, __pyx_L1_error) - __pyx_v_data_len = __pyx_t_1; - - /* "aiohttp/_websocket.pyx":33 - * - * data_len = len(data) - * in_buf = PyByteArray_AsString(data) # <<<<<<<<<<<<<< - * mask_buf = PyBytes_AsString(mask) - * uint32_msk = (mask_buf)[0] - */ - if (!(likely(PyByteArray_CheckExact(__pyx_v_data))||((__pyx_v_data) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "bytearray", Py_TYPE(__pyx_v_data)->tp_name), 0))) __PYX_ERR(0, 33, __pyx_L1_error) - __pyx_t_5 = PyByteArray_AsString(((PyObject*)__pyx_v_data)); if (unlikely(__pyx_t_5 == ((char *)NULL))) __PYX_ERR(0, 33, __pyx_L1_error) - __pyx_v_in_buf = ((unsigned char *)__pyx_t_5); - - /* "aiohttp/_websocket.pyx":34 - * data_len = len(data) - * in_buf = PyByteArray_AsString(data) - * mask_buf = PyBytes_AsString(mask) # <<<<<<<<<<<<<< - * uint32_msk = (mask_buf)[0] - * - */ - __pyx_t_5 = PyBytes_AsString(__pyx_v_mask); if (unlikely(__pyx_t_5 == ((char *)NULL))) __PYX_ERR(0, 34, __pyx_L1_error) - __pyx_v_mask_buf = ((unsigned char const *)__pyx_t_5); - - /* "aiohttp/_websocket.pyx":35 - * in_buf = PyByteArray_AsString(data) - * mask_buf = PyBytes_AsString(mask) - * uint32_msk = (mask_buf)[0] # <<<<<<<<<<<<<< - * - * # TODO: align in_data ptr to achieve even faster speeds - */ - __pyx_v_uint32_msk = (((uint32_t *)__pyx_v_mask_buf)[0]); - - /* "aiohttp/_websocket.pyx":40 - * # does it need in python ?! malloc() always aligns to sizeof(long) bytes - * - * if sizeof(size_t) >= 8: # <<<<<<<<<<<<<< - * uint64_msk = uint32_msk - * uint64_msk = (uint64_msk << 32) | uint32_msk - */ - __pyx_t_2 = (((sizeof(size_t)) >= 8) != 0); - if (__pyx_t_2) { - - /* "aiohttp/_websocket.pyx":41 - * - * if sizeof(size_t) >= 8: - * uint64_msk = uint32_msk # <<<<<<<<<<<<<< - * uint64_msk = (uint64_msk << 32) | uint32_msk - * - */ - __pyx_v_uint64_msk = __pyx_v_uint32_msk; - - /* "aiohttp/_websocket.pyx":42 - * if sizeof(size_t) >= 8: - * uint64_msk = uint32_msk - * uint64_msk = (uint64_msk << 32) | uint32_msk # <<<<<<<<<<<<<< - * - * while data_len >= 8: - */ - __pyx_v_uint64_msk = ((__pyx_v_uint64_msk << 32) | __pyx_v_uint32_msk); - - /* "aiohttp/_websocket.pyx":44 - * uint64_msk = (uint64_msk << 32) | uint32_msk - * - * while data_len >= 8: # <<<<<<<<<<<<<< - * (in_buf)[0] ^= uint64_msk - * in_buf += 8 - */ - while (1) { - __pyx_t_2 = ((__pyx_v_data_len >= 8) != 0); - if (!__pyx_t_2) break; - - /* "aiohttp/_websocket.pyx":45 - * - * while data_len >= 8: - * (in_buf)[0] ^= uint64_msk # <<<<<<<<<<<<<< - * in_buf += 8 - * data_len -= 8 - */ - __pyx_t_6 = ((uint64_t *)__pyx_v_in_buf); - __pyx_t_7 = 0; - (__pyx_t_6[__pyx_t_7]) = ((__pyx_t_6[__pyx_t_7]) ^ __pyx_v_uint64_msk); - - /* "aiohttp/_websocket.pyx":46 - * while data_len >= 8: - * (in_buf)[0] ^= uint64_msk - * in_buf += 8 # <<<<<<<<<<<<<< - * data_len -= 8 - * - */ - __pyx_v_in_buf = (__pyx_v_in_buf + 8); - - /* "aiohttp/_websocket.pyx":47 - * (in_buf)[0] ^= uint64_msk - * in_buf += 8 - * data_len -= 8 # <<<<<<<<<<<<<< - * - * - */ - __pyx_v_data_len = (__pyx_v_data_len - 8); - } - - /* "aiohttp/_websocket.pyx":40 - * # does it need in python ?! malloc() always aligns to sizeof(long) bytes - * - * if sizeof(size_t) >= 8: # <<<<<<<<<<<<<< - * uint64_msk = uint32_msk - * uint64_msk = (uint64_msk << 32) | uint32_msk - */ - } - - /* "aiohttp/_websocket.pyx":50 - * - * - * while data_len >= 4: # <<<<<<<<<<<<<< - * (in_buf)[0] ^= uint32_msk - * in_buf += 4 - */ - while (1) { - __pyx_t_2 = ((__pyx_v_data_len >= 4) != 0); - if (!__pyx_t_2) break; - - /* "aiohttp/_websocket.pyx":51 - * - * while data_len >= 4: - * (in_buf)[0] ^= uint32_msk # <<<<<<<<<<<<<< - * in_buf += 4 - * data_len -= 4 - */ - __pyx_t_8 = ((uint32_t *)__pyx_v_in_buf); - __pyx_t_7 = 0; - (__pyx_t_8[__pyx_t_7]) = ((__pyx_t_8[__pyx_t_7]) ^ __pyx_v_uint32_msk); - - /* "aiohttp/_websocket.pyx":52 - * while data_len >= 4: - * (in_buf)[0] ^= uint32_msk - * in_buf += 4 # <<<<<<<<<<<<<< - * data_len -= 4 - * - */ - __pyx_v_in_buf = (__pyx_v_in_buf + 4); - - /* "aiohttp/_websocket.pyx":53 - * (in_buf)[0] ^= uint32_msk - * in_buf += 4 - * data_len -= 4 # <<<<<<<<<<<<<< - * - * for i in range(0, data_len): - */ - __pyx_v_data_len = (__pyx_v_data_len - 4); - } - - /* "aiohttp/_websocket.pyx":55 - * data_len -= 4 - * - * for i in range(0, data_len): # <<<<<<<<<<<<<< - * in_buf[i] ^= mask_buf[i] - */ - __pyx_t_1 = __pyx_v_data_len; - __pyx_t_9 = __pyx_t_1; - for (__pyx_t_10 = 0; __pyx_t_10 < __pyx_t_9; __pyx_t_10+=1) { - __pyx_v_i = __pyx_t_10; - - /* "aiohttp/_websocket.pyx":56 - * - * for i in range(0, data_len): - * in_buf[i] ^= mask_buf[i] # <<<<<<<<<<<<<< - */ - __pyx_t_11 = __pyx_v_i; - (__pyx_v_in_buf[__pyx_t_11]) = ((__pyx_v_in_buf[__pyx_t_11]) ^ (__pyx_v_mask_buf[__pyx_v_i])); - } - - /* "aiohttp/_websocket.pyx":11 - * - * - * def _websocket_mask_cython(object mask, object data): # <<<<<<<<<<<<<< - * """Note, this function mutates its `data` argument - * """ - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_4); - __Pyx_AddTraceback("aiohttp._websocket._websocket_mask_cython", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v_mask); - __Pyx_XDECREF(__pyx_v_data); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyMethodDef __pyx_methods[] = { - {0, 0, 0, 0} -}; - -#if PY_MAJOR_VERSION >= 3 -#if CYTHON_PEP489_MULTI_PHASE_INIT -static PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def); /*proto*/ -static int __pyx_pymod_exec__websocket(PyObject* module); /*proto*/ -static PyModuleDef_Slot __pyx_moduledef_slots[] = { - {Py_mod_create, (void*)__pyx_pymod_create}, - {Py_mod_exec, (void*)__pyx_pymod_exec__websocket}, - {0, NULL} -}; -#endif - -static struct PyModuleDef __pyx_moduledef = { - PyModuleDef_HEAD_INIT, - "_websocket", - 0, /* m_doc */ - #if CYTHON_PEP489_MULTI_PHASE_INIT - 0, /* m_size */ - #else - -1, /* m_size */ - #endif - __pyx_methods /* m_methods */, - #if CYTHON_PEP489_MULTI_PHASE_INIT - __pyx_moduledef_slots, /* m_slots */ - #else - NULL, /* m_reload */ - #endif - NULL, /* m_traverse */ - NULL, /* m_clear */ - NULL /* m_free */ -}; -#endif -#ifndef CYTHON_SMALL_CODE -#if defined(__clang__) - #define CYTHON_SMALL_CODE -#elif defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3)) - #define CYTHON_SMALL_CODE __attribute__((cold)) -#else - #define CYTHON_SMALL_CODE -#endif -#endif - -static __Pyx_StringTabEntry __pyx_string_tab[] = { - {&__pyx_n_s_aiohttp__websocket, __pyx_k_aiohttp__websocket, sizeof(__pyx_k_aiohttp__websocket), 0, 0, 1, 1}, - {&__pyx_kp_s_aiohttp__websocket_pyx, __pyx_k_aiohttp__websocket_pyx, sizeof(__pyx_k_aiohttp__websocket_pyx), 0, 0, 1, 0}, - {&__pyx_n_s_cline_in_traceback, __pyx_k_cline_in_traceback, sizeof(__pyx_k_cline_in_traceback), 0, 0, 1, 1}, - {&__pyx_n_s_data, __pyx_k_data, sizeof(__pyx_k_data), 0, 0, 1, 1}, - {&__pyx_n_s_data_len, __pyx_k_data_len, sizeof(__pyx_k_data_len), 0, 0, 1, 1}, - {&__pyx_n_s_i, __pyx_k_i, sizeof(__pyx_k_i), 0, 0, 1, 1}, - {&__pyx_n_s_in_buf, __pyx_k_in_buf, sizeof(__pyx_k_in_buf), 0, 0, 1, 1}, - {&__pyx_n_s_main, __pyx_k_main, sizeof(__pyx_k_main), 0, 0, 1, 1}, - {&__pyx_n_s_mask, __pyx_k_mask, sizeof(__pyx_k_mask), 0, 0, 1, 1}, - {&__pyx_n_s_mask_buf, __pyx_k_mask_buf, sizeof(__pyx_k_mask_buf), 0, 0, 1, 1}, - {&__pyx_n_s_name, __pyx_k_name, sizeof(__pyx_k_name), 0, 0, 1, 1}, - {&__pyx_n_s_range, __pyx_k_range, sizeof(__pyx_k_range), 0, 0, 1, 1}, - {&__pyx_n_s_test, __pyx_k_test, sizeof(__pyx_k_test), 0, 0, 1, 1}, - {&__pyx_n_s_uint32_msk, __pyx_k_uint32_msk, sizeof(__pyx_k_uint32_msk), 0, 0, 1, 1}, - {&__pyx_n_s_uint64_msk, __pyx_k_uint64_msk, sizeof(__pyx_k_uint64_msk), 0, 0, 1, 1}, - {&__pyx_n_s_websocket_mask_cython, __pyx_k_websocket_mask_cython, sizeof(__pyx_k_websocket_mask_cython), 0, 0, 1, 1}, - {0, 0, 0, 0, 0, 0, 0} -}; -static CYTHON_SMALL_CODE int __Pyx_InitCachedBuiltins(void) { - __pyx_builtin_range = __Pyx_GetBuiltinName(__pyx_n_s_range); if (!__pyx_builtin_range) __PYX_ERR(0, 55, __pyx_L1_error) - return 0; - __pyx_L1_error:; - return -1; -} - -static CYTHON_SMALL_CODE int __Pyx_InitCachedConstants(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0); - - /* "aiohttp/_websocket.pyx":11 - * - * - * def _websocket_mask_cython(object mask, object data): # <<<<<<<<<<<<<< - * """Note, this function mutates its `data` argument - * """ - */ - __pyx_tuple_ = PyTuple_Pack(8, __pyx_n_s_mask, __pyx_n_s_data, __pyx_n_s_data_len, __pyx_n_s_i, __pyx_n_s_in_buf, __pyx_n_s_mask_buf, __pyx_n_s_uint32_msk, __pyx_n_s_uint64_msk); if (unlikely(!__pyx_tuple_)) __PYX_ERR(0, 11, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple_); - __Pyx_GIVEREF(__pyx_tuple_); - __pyx_codeobj__2 = (PyObject*)__Pyx_PyCode_New(2, 0, 8, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple_, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_aiohttp__websocket_pyx, __pyx_n_s_websocket_mask_cython, 11, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__2)) __PYX_ERR(0, 11, __pyx_L1_error) - __Pyx_RefNannyFinishContext(); - return 0; - __pyx_L1_error:; - __Pyx_RefNannyFinishContext(); - return -1; -} - -static CYTHON_SMALL_CODE int __Pyx_InitGlobals(void) { - if (__Pyx_InitStrings(__pyx_string_tab) < 0) __PYX_ERR(0, 1, __pyx_L1_error); - return 0; - __pyx_L1_error:; - return -1; -} - -static CYTHON_SMALL_CODE int __Pyx_modinit_global_init_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_variable_export_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_function_export_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_type_init_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_type_import_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_variable_import_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_function_import_code(void); /*proto*/ - -static int __Pyx_modinit_global_init_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_global_init_code", 0); - /*--- Global init code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_variable_export_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_variable_export_code", 0); - /*--- Variable export code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_function_export_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_function_export_code", 0); - /*--- Function export code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_type_init_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_type_init_code", 0); - /*--- Type init code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_type_import_code(void) { - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__Pyx_modinit_type_import_code", 0); - /*--- Type import code ---*/ - __pyx_t_1 = PyImport_ImportModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 9, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_ptype_7cpython_4type_type = __Pyx_ImportType(__pyx_t_1, __Pyx_BUILTIN_MODULE_NAME, "type", - #if defined(PYPY_VERSION_NUM) && PYPY_VERSION_NUM < 0x050B0000 - sizeof(PyTypeObject), - #else - sizeof(PyHeapTypeObject), - #endif - __Pyx_ImportType_CheckSize_Warn); - if (!__pyx_ptype_7cpython_4type_type) __PYX_ERR(1, 9, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyImport_ImportModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 8, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_ptype_7cpython_4bool_bool = __Pyx_ImportType(__pyx_t_1, __Pyx_BUILTIN_MODULE_NAME, "bool", sizeof(PyBoolObject), __Pyx_ImportType_CheckSize_Warn); - if (!__pyx_ptype_7cpython_4bool_bool) __PYX_ERR(2, 8, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyImport_ImportModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_t_1)) __PYX_ERR(3, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_ptype_7cpython_7complex_complex = __Pyx_ImportType(__pyx_t_1, __Pyx_BUILTIN_MODULE_NAME, "complex", sizeof(PyComplexObject), __Pyx_ImportType_CheckSize_Warn); - if (!__pyx_ptype_7cpython_7complex_complex) __PYX_ERR(3, 15, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_RefNannyFinishContext(); - return 0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_RefNannyFinishContext(); - return -1; -} - -static int __Pyx_modinit_variable_import_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_variable_import_code", 0); - /*--- Variable import code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_function_import_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_function_import_code", 0); - /*--- Function import code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - - -#ifndef CYTHON_NO_PYINIT_EXPORT -#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC -#elif PY_MAJOR_VERSION < 3 -#ifdef __cplusplus -#define __Pyx_PyMODINIT_FUNC extern "C" void -#else -#define __Pyx_PyMODINIT_FUNC void -#endif -#else -#ifdef __cplusplus -#define __Pyx_PyMODINIT_FUNC extern "C" PyObject * -#else -#define __Pyx_PyMODINIT_FUNC PyObject * -#endif -#endif - - -#if PY_MAJOR_VERSION < 3 -__Pyx_PyMODINIT_FUNC init_websocket(void) CYTHON_SMALL_CODE; /*proto*/ -__Pyx_PyMODINIT_FUNC init_websocket(void) -#else -__Pyx_PyMODINIT_FUNC PyInit__websocket(void) CYTHON_SMALL_CODE; /*proto*/ -__Pyx_PyMODINIT_FUNC PyInit__websocket(void) -#if CYTHON_PEP489_MULTI_PHASE_INIT -{ - return PyModuleDef_Init(&__pyx_moduledef); -} -static CYTHON_SMALL_CODE int __Pyx_check_single_interpreter(void) { - #if PY_VERSION_HEX >= 0x030700A1 - static PY_INT64_T main_interpreter_id = -1; - PY_INT64_T current_id = PyInterpreterState_GetID(PyThreadState_Get()->interp); - if (main_interpreter_id == -1) { - main_interpreter_id = current_id; - return (unlikely(current_id == -1)) ? -1 : 0; - } else if (unlikely(main_interpreter_id != current_id)) - #else - static PyInterpreterState *main_interpreter = NULL; - PyInterpreterState *current_interpreter = PyThreadState_Get()->interp; - if (!main_interpreter) { - main_interpreter = current_interpreter; - } else if (unlikely(main_interpreter != current_interpreter)) - #endif - { - PyErr_SetString( - PyExc_ImportError, - "Interpreter change detected - this module can only be loaded into one interpreter per process."); - return -1; - } - return 0; -} -static CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *moddict, const char* from_name, const char* to_name, int allow_none) { - PyObject *value = PyObject_GetAttrString(spec, from_name); - int result = 0; - if (likely(value)) { - if (allow_none || value != Py_None) { - result = PyDict_SetItemString(moddict, to_name, value); - } - Py_DECREF(value); - } else if (PyErr_ExceptionMatches(PyExc_AttributeError)) { - PyErr_Clear(); - } else { - result = -1; - } - return result; -} -static CYTHON_SMALL_CODE PyObject* __pyx_pymod_create(PyObject *spec, CYTHON_UNUSED PyModuleDef *def) { - PyObject *module = NULL, *moddict, *modname; - if (__Pyx_check_single_interpreter()) - return NULL; - if (__pyx_m) - return __Pyx_NewRef(__pyx_m); - modname = PyObject_GetAttrString(spec, "name"); - if (unlikely(!modname)) goto bad; - module = PyModule_NewObject(modname); - Py_DECREF(modname); - if (unlikely(!module)) goto bad; - moddict = PyModule_GetDict(module); - if (unlikely(!moddict)) goto bad; - if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "loader", "__loader__", 1) < 0)) goto bad; - if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "origin", "__file__", 1) < 0)) goto bad; - if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "parent", "__package__", 1) < 0)) goto bad; - if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "submodule_search_locations", "__path__", 0) < 0)) goto bad; - return module; -bad: - Py_XDECREF(module); - return NULL; -} - - -static CYTHON_SMALL_CODE int __pyx_pymod_exec__websocket(PyObject *__pyx_pyinit_module) -#endif -#endif -{ - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannyDeclarations - #if CYTHON_PEP489_MULTI_PHASE_INIT - if (__pyx_m) { - if (__pyx_m == __pyx_pyinit_module) return 0; - PyErr_SetString(PyExc_RuntimeError, "Module '_websocket' has already been imported. Re-initialisation is not supported."); - return -1; - } - #elif PY_MAJOR_VERSION >= 3 - if (__pyx_m) return __Pyx_NewRef(__pyx_m); - #endif - #if CYTHON_REFNANNY -__Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny"); -if (!__Pyx_RefNanny) { - PyErr_Clear(); - __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny"); - if (!__Pyx_RefNanny) - Py_FatalError("failed to import 'refnanny' module"); -} -#endif - __Pyx_RefNannySetupContext("__Pyx_PyMODINIT_FUNC PyInit__websocket(void)", 0); - if (__Pyx_check_binary_version() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #ifdef __Pxy_PyFrame_Initialize_Offsets - __Pxy_PyFrame_Initialize_Offsets(); - #endif - __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) __PYX_ERR(0, 1, __pyx_L1_error) - __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) __PYX_ERR(0, 1, __pyx_L1_error) - __pyx_empty_unicode = PyUnicode_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_unicode)) __PYX_ERR(0, 1, __pyx_L1_error) - #ifdef __Pyx_CyFunction_USED - if (__pyx_CyFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_FusedFunction_USED - if (__pyx_FusedFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_Coroutine_USED - if (__pyx_Coroutine_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_Generator_USED - if (__pyx_Generator_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_AsyncGen_USED - if (__pyx_AsyncGen_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_StopAsyncIteration_USED - if (__pyx_StopAsyncIteration_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - /*--- Library function declarations ---*/ - /*--- Threads initialization code ---*/ - #if defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS - #ifdef WITH_THREAD /* Python build with threading support? */ - PyEval_InitThreads(); - #endif - #endif - /*--- Module creation code ---*/ - #if CYTHON_PEP489_MULTI_PHASE_INIT - __pyx_m = __pyx_pyinit_module; - Py_INCREF(__pyx_m); - #else - #if PY_MAJOR_VERSION < 3 - __pyx_m = Py_InitModule4("_websocket", __pyx_methods, 0, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m); - #else - __pyx_m = PyModule_Create(&__pyx_moduledef); - #endif - if (unlikely(!__pyx_m)) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - __pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(0, 1, __pyx_L1_error) - Py_INCREF(__pyx_d); - __pyx_b = PyImport_AddModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(0, 1, __pyx_L1_error) - Py_INCREF(__pyx_b); - __pyx_cython_runtime = PyImport_AddModule((char *) "cython_runtime"); if (unlikely(!__pyx_cython_runtime)) __PYX_ERR(0, 1, __pyx_L1_error) - Py_INCREF(__pyx_cython_runtime); - if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(0, 1, __pyx_L1_error); - /*--- Initialize various global constants etc. ---*/ - if (__Pyx_InitGlobals() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #if PY_MAJOR_VERSION < 3 && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT) - if (__Pyx_init_sys_getdefaultencoding_params() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - if (__pyx_module_is_main_aiohttp___websocket) { - if (PyObject_SetAttr(__pyx_m, __pyx_n_s_name, __pyx_n_s_main) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - } - #if PY_MAJOR_VERSION >= 3 - { - PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) __PYX_ERR(0, 1, __pyx_L1_error) - if (!PyDict_GetItemString(modules, "aiohttp._websocket")) { - if (unlikely(PyDict_SetItemString(modules, "aiohttp._websocket", __pyx_m) < 0)) __PYX_ERR(0, 1, __pyx_L1_error) - } - } - #endif - /*--- Builtin init code ---*/ - if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - /*--- Constants init code ---*/ - if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - /*--- Global type/function init code ---*/ - (void)__Pyx_modinit_global_init_code(); - (void)__Pyx_modinit_variable_export_code(); - (void)__Pyx_modinit_function_export_code(); - (void)__Pyx_modinit_type_init_code(); - if (unlikely(__Pyx_modinit_type_import_code() < 0)) __PYX_ERR(0, 1, __pyx_L1_error) - (void)__Pyx_modinit_variable_import_code(); - (void)__Pyx_modinit_function_import_code(); - /*--- Execution code ---*/ - #if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) - if (__Pyx_patch_abc() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - - /* "aiohttp/_websocket.pyx":11 - * - * - * def _websocket_mask_cython(object mask, object data): # <<<<<<<<<<<<<< - * """Note, this function mutates its `data` argument - * """ - */ - __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_7aiohttp_10_websocket_1_websocket_mask_cython, NULL, __pyx_n_s_aiohttp__websocket); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 11, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_websocket_mask_cython, __pyx_t_1) < 0) __PYX_ERR(0, 11, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "aiohttp/_websocket.pyx":1 - * from cpython cimport PyBytes_AsString # <<<<<<<<<<<<<< - * - * - */ - __pyx_t_1 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_1) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /*--- Wrapped vars code ---*/ - - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - if (__pyx_m) { - if (__pyx_d) { - __Pyx_AddTraceback("init aiohttp._websocket", __pyx_clineno, __pyx_lineno, __pyx_filename); - } - Py_CLEAR(__pyx_m); - } else if (!PyErr_Occurred()) { - PyErr_SetString(PyExc_ImportError, "init aiohttp._websocket"); - } - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - #if CYTHON_PEP489_MULTI_PHASE_INIT - return (__pyx_m != NULL) ? 0 : -1; - #elif PY_MAJOR_VERSION >= 3 - return __pyx_m; - #else - return; - #endif -} - -/* --- Runtime support code --- */ -/* Refnanny */ -#if CYTHON_REFNANNY -static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) { - PyObject *m = NULL, *p = NULL; - void *r = NULL; - m = PyImport_ImportModule(modname); - if (!m) goto end; - p = PyObject_GetAttrString(m, "RefNannyAPI"); - if (!p) goto end; - r = PyLong_AsVoidPtr(p); -end: - Py_XDECREF(p); - Py_XDECREF(m); - return (__Pyx_RefNannyAPIStruct *)r; -} -#endif - -/* PyObjectGetAttrStr */ -#if CYTHON_USE_TYPE_SLOTS -static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name) { - PyTypeObject* tp = Py_TYPE(obj); - if (likely(tp->tp_getattro)) - return tp->tp_getattro(obj, attr_name); -#if PY_MAJOR_VERSION < 3 - if (likely(tp->tp_getattr)) - return tp->tp_getattr(obj, PyString_AS_STRING(attr_name)); -#endif - return PyObject_GetAttr(obj, attr_name); -} -#endif - -/* GetBuiltinName */ -static PyObject *__Pyx_GetBuiltinName(PyObject *name) { - PyObject* result = __Pyx_PyObject_GetAttrStr(__pyx_b, name); - if (unlikely(!result)) { - PyErr_Format(PyExc_NameError, -#if PY_MAJOR_VERSION >= 3 - "name '%U' is not defined", name); -#else - "name '%.200s' is not defined", PyString_AS_STRING(name)); -#endif - } - return result; -} - -/* RaiseArgTupleInvalid */ -static void __Pyx_RaiseArgtupleInvalid( - const char* func_name, - int exact, - Py_ssize_t num_min, - Py_ssize_t num_max, - Py_ssize_t num_found) -{ - Py_ssize_t num_expected; - const char *more_or_less; - if (num_found < num_min) { - num_expected = num_min; - more_or_less = "at least"; - } else { - num_expected = num_max; - more_or_less = "at most"; - } - if (exact) { - more_or_less = "exactly"; - } - PyErr_Format(PyExc_TypeError, - "%.200s() takes %.8s %" CYTHON_FORMAT_SSIZE_T "d positional argument%.1s (%" CYTHON_FORMAT_SSIZE_T "d given)", - func_name, more_or_less, num_expected, - (num_expected == 1) ? "" : "s", num_found); -} - -/* RaiseDoubleKeywords */ -static void __Pyx_RaiseDoubleKeywordsError( - const char* func_name, - PyObject* kw_name) -{ - PyErr_Format(PyExc_TypeError, - #if PY_MAJOR_VERSION >= 3 - "%s() got multiple values for keyword argument '%U'", func_name, kw_name); - #else - "%s() got multiple values for keyword argument '%s'", func_name, - PyString_AsString(kw_name)); - #endif -} - -/* ParseKeywords */ -static int __Pyx_ParseOptionalKeywords( - PyObject *kwds, - PyObject **argnames[], - PyObject *kwds2, - PyObject *values[], - Py_ssize_t num_pos_args, - const char* function_name) -{ - PyObject *key = 0, *value = 0; - Py_ssize_t pos = 0; - PyObject*** name; - PyObject*** first_kw_arg = argnames + num_pos_args; - while (PyDict_Next(kwds, &pos, &key, &value)) { - name = first_kw_arg; - while (*name && (**name != key)) name++; - if (*name) { - values[name-argnames] = value; - continue; - } - name = first_kw_arg; - #if PY_MAJOR_VERSION < 3 - if (likely(PyString_Check(key))) { - while (*name) { - if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key)) - && _PyString_Eq(**name, key)) { - values[name-argnames] = value; - break; - } - name++; - } - if (*name) continue; - else { - PyObject*** argname = argnames; - while (argname != first_kw_arg) { - if ((**argname == key) || ( - (CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**argname) == PyString_GET_SIZE(key)) - && _PyString_Eq(**argname, key))) { - goto arg_passed_twice; - } - argname++; - } - } - } else - #endif - if (likely(PyUnicode_Check(key))) { - while (*name) { - int cmp = (**name == key) ? 0 : - #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 - (__Pyx_PyUnicode_GET_LENGTH(**name) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 : - #endif - PyUnicode_Compare(**name, key); - if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; - if (cmp == 0) { - values[name-argnames] = value; - break; - } - name++; - } - if (*name) continue; - else { - PyObject*** argname = argnames; - while (argname != first_kw_arg) { - int cmp = (**argname == key) ? 0 : - #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 - (__Pyx_PyUnicode_GET_LENGTH(**argname) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 : - #endif - PyUnicode_Compare(**argname, key); - if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; - if (cmp == 0) goto arg_passed_twice; - argname++; - } - } - } else - goto invalid_keyword_type; - if (kwds2) { - if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad; - } else { - goto invalid_keyword; - } - } - return 0; -arg_passed_twice: - __Pyx_RaiseDoubleKeywordsError(function_name, key); - goto bad; -invalid_keyword_type: - PyErr_Format(PyExc_TypeError, - "%.200s() keywords must be strings", function_name); - goto bad; -invalid_keyword: - PyErr_Format(PyExc_TypeError, - #if PY_MAJOR_VERSION < 3 - "%.200s() got an unexpected keyword argument '%.200s'", - function_name, PyString_AsString(key)); - #else - "%s() got an unexpected keyword argument '%U'", - function_name, key); - #endif -bad: - return -1; -} - -/* PyCFunctionFastCall */ -#if CYTHON_FAST_PYCCALL -static CYTHON_INLINE PyObject * __Pyx_PyCFunction_FastCall(PyObject *func_obj, PyObject **args, Py_ssize_t nargs) { - PyCFunctionObject *func = (PyCFunctionObject*)func_obj; - PyCFunction meth = PyCFunction_GET_FUNCTION(func); - PyObject *self = PyCFunction_GET_SELF(func); - int flags = PyCFunction_GET_FLAGS(func); - assert(PyCFunction_Check(func)); - assert(METH_FASTCALL == (flags & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS | METH_STACKLESS))); - assert(nargs >= 0); - assert(nargs == 0 || args != NULL); - /* _PyCFunction_FastCallDict() must not be called with an exception set, - because it may clear it (directly or indirectly) and so the - caller loses its exception */ - assert(!PyErr_Occurred()); - if ((PY_VERSION_HEX < 0x030700A0) || unlikely(flags & METH_KEYWORDS)) { - return (*((__Pyx_PyCFunctionFastWithKeywords)(void*)meth)) (self, args, nargs, NULL); - } else { - return (*((__Pyx_PyCFunctionFast)(void*)meth)) (self, args, nargs); - } -} -#endif - -/* PyFunctionFastCall */ -#if CYTHON_FAST_PYCALL -static PyObject* __Pyx_PyFunction_FastCallNoKw(PyCodeObject *co, PyObject **args, Py_ssize_t na, - PyObject *globals) { - PyFrameObject *f; - PyThreadState *tstate = __Pyx_PyThreadState_Current; - PyObject **fastlocals; - Py_ssize_t i; - PyObject *result; - assert(globals != NULL); - /* XXX Perhaps we should create a specialized - PyFrame_New() that doesn't take locals, but does - take builtins without sanity checking them. - */ - assert(tstate != NULL); - f = PyFrame_New(tstate, co, globals, NULL); - if (f == NULL) { - return NULL; - } - fastlocals = __Pyx_PyFrame_GetLocalsplus(f); - for (i = 0; i < na; i++) { - Py_INCREF(*args); - fastlocals[i] = *args++; - } - result = PyEval_EvalFrameEx(f,0); - ++tstate->recursion_depth; - Py_DECREF(f); - --tstate->recursion_depth; - return result; -} -#if 1 || PY_VERSION_HEX < 0x030600B1 -static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs) { - PyCodeObject *co = (PyCodeObject *)PyFunction_GET_CODE(func); - PyObject *globals = PyFunction_GET_GLOBALS(func); - PyObject *argdefs = PyFunction_GET_DEFAULTS(func); - PyObject *closure; -#if PY_MAJOR_VERSION >= 3 - PyObject *kwdefs; -#endif - PyObject *kwtuple, **k; - PyObject **d; - Py_ssize_t nd; - Py_ssize_t nk; - PyObject *result; - assert(kwargs == NULL || PyDict_Check(kwargs)); - nk = kwargs ? PyDict_Size(kwargs) : 0; - if (Py_EnterRecursiveCall((char*)" while calling a Python object")) { - return NULL; - } - if ( -#if PY_MAJOR_VERSION >= 3 - co->co_kwonlyargcount == 0 && -#endif - likely(kwargs == NULL || nk == 0) && - co->co_flags == (CO_OPTIMIZED | CO_NEWLOCALS | CO_NOFREE)) { - if (argdefs == NULL && co->co_argcount == nargs) { - result = __Pyx_PyFunction_FastCallNoKw(co, args, nargs, globals); - goto done; - } - else if (nargs == 0 && argdefs != NULL - && co->co_argcount == Py_SIZE(argdefs)) { - /* function called with no arguments, but all parameters have - a default value: use default values as arguments .*/ - args = &PyTuple_GET_ITEM(argdefs, 0); - result =__Pyx_PyFunction_FastCallNoKw(co, args, Py_SIZE(argdefs), globals); - goto done; - } - } - if (kwargs != NULL) { - Py_ssize_t pos, i; - kwtuple = PyTuple_New(2 * nk); - if (kwtuple == NULL) { - result = NULL; - goto done; - } - k = &PyTuple_GET_ITEM(kwtuple, 0); - pos = i = 0; - while (PyDict_Next(kwargs, &pos, &k[i], &k[i+1])) { - Py_INCREF(k[i]); - Py_INCREF(k[i+1]); - i += 2; - } - nk = i / 2; - } - else { - kwtuple = NULL; - k = NULL; - } - closure = PyFunction_GET_CLOSURE(func); -#if PY_MAJOR_VERSION >= 3 - kwdefs = PyFunction_GET_KW_DEFAULTS(func); -#endif - if (argdefs != NULL) { - d = &PyTuple_GET_ITEM(argdefs, 0); - nd = Py_SIZE(argdefs); - } - else { - d = NULL; - nd = 0; - } -#if PY_MAJOR_VERSION >= 3 - result = PyEval_EvalCodeEx((PyObject*)co, globals, (PyObject *)NULL, - args, (int)nargs, - k, (int)nk, - d, (int)nd, kwdefs, closure); -#else - result = PyEval_EvalCodeEx(co, globals, (PyObject *)NULL, - args, (int)nargs, - k, (int)nk, - d, (int)nd, closure); -#endif - Py_XDECREF(kwtuple); -done: - Py_LeaveRecursiveCall(); - return result; -} -#endif -#endif - -/* PyObjectCall */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) { - PyObject *result; - ternaryfunc call = func->ob_type->tp_call; - if (unlikely(!call)) - return PyObject_Call(func, arg, kw); - if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) - return NULL; - result = (*call)(func, arg, kw); - Py_LeaveRecursiveCall(); - if (unlikely(!result) && unlikely(!PyErr_Occurred())) { - PyErr_SetString( - PyExc_SystemError, - "NULL result without error in PyObject_Call"); - } - return result; -} -#endif - -/* PyObjectCallMethO */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg) { - PyObject *self, *result; - PyCFunction cfunc; - cfunc = PyCFunction_GET_FUNCTION(func); - self = PyCFunction_GET_SELF(func); - if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) - return NULL; - result = cfunc(self, arg); - Py_LeaveRecursiveCall(); - if (unlikely(!result) && unlikely(!PyErr_Occurred())) { - PyErr_SetString( - PyExc_SystemError, - "NULL result without error in PyObject_Call"); - } - return result; -} -#endif - -/* PyObjectCallOneArg */ -#if CYTHON_COMPILING_IN_CPYTHON -static PyObject* __Pyx__PyObject_CallOneArg(PyObject *func, PyObject *arg) { - PyObject *result; - PyObject *args = PyTuple_New(1); - if (unlikely(!args)) return NULL; - Py_INCREF(arg); - PyTuple_SET_ITEM(args, 0, arg); - result = __Pyx_PyObject_Call(func, args, NULL); - Py_DECREF(args); - return result; -} -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { -#if CYTHON_FAST_PYCALL - if (PyFunction_Check(func)) { - return __Pyx_PyFunction_FastCall(func, &arg, 1); - } -#endif - if (likely(PyCFunction_Check(func))) { - if (likely(PyCFunction_GET_FLAGS(func) & METH_O)) { - return __Pyx_PyObject_CallMethO(func, arg); -#if CYTHON_FAST_PYCCALL - } else if (PyCFunction_GET_FLAGS(func) & METH_FASTCALL) { - return __Pyx_PyCFunction_FastCall(func, &arg, 1); -#endif - } - } - return __Pyx__PyObject_CallOneArg(func, arg); -} -#else -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { - PyObject *result; - PyObject *args = PyTuple_Pack(1, arg); - if (unlikely(!args)) return NULL; - result = __Pyx_PyObject_Call(func, args, NULL); - Py_DECREF(args); - return result; -} -#endif - -/* TypeImport */ -#ifndef __PYX_HAVE_RT_ImportType -#define __PYX_HAVE_RT_ImportType -static PyTypeObject *__Pyx_ImportType(PyObject *module, const char *module_name, const char *class_name, - size_t size, enum __Pyx_ImportType_CheckSize check_size) -{ - PyObject *result = 0; - char warning[200]; - Py_ssize_t basicsize; -#ifdef Py_LIMITED_API - PyObject *py_basicsize; -#endif - result = PyObject_GetAttrString(module, class_name); - if (!result) - goto bad; - if (!PyType_Check(result)) { - PyErr_Format(PyExc_TypeError, - "%.200s.%.200s is not a type object", - module_name, class_name); - goto bad; - } -#ifndef Py_LIMITED_API - basicsize = ((PyTypeObject *)result)->tp_basicsize; -#else - py_basicsize = PyObject_GetAttrString(result, "__basicsize__"); - if (!py_basicsize) - goto bad; - basicsize = PyLong_AsSsize_t(py_basicsize); - Py_DECREF(py_basicsize); - py_basicsize = 0; - if (basicsize == (Py_ssize_t)-1 && PyErr_Occurred()) - goto bad; -#endif - if ((size_t)basicsize < size) { - PyErr_Format(PyExc_ValueError, - "%.200s.%.200s size changed, may indicate binary incompatibility. " - "Expected %zd from C header, got %zd from PyObject", - module_name, class_name, size, basicsize); - goto bad; - } - if (check_size == __Pyx_ImportType_CheckSize_Error && (size_t)basicsize != size) { - PyErr_Format(PyExc_ValueError, - "%.200s.%.200s size changed, may indicate binary incompatibility. " - "Expected %zd from C header, got %zd from PyObject", - module_name, class_name, size, basicsize); - goto bad; - } - else if (check_size == __Pyx_ImportType_CheckSize_Warn && (size_t)basicsize > size) { - PyOS_snprintf(warning, sizeof(warning), - "%s.%s size changed, may indicate binary incompatibility. " - "Expected %zd from C header, got %zd from PyObject", - module_name, class_name, size, basicsize); - if (PyErr_WarnEx(NULL, warning, 0) < 0) goto bad; - } - return (PyTypeObject *)result; -bad: - Py_XDECREF(result); - return NULL; -} -#endif - -/* PyDictVersioning */ -#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS -static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj) { - PyObject *dict = Py_TYPE(obj)->tp_dict; - return likely(dict) ? __PYX_GET_DICT_VERSION(dict) : 0; -} -static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj) { - PyObject **dictptr = NULL; - Py_ssize_t offset = Py_TYPE(obj)->tp_dictoffset; - if (offset) { -#if CYTHON_COMPILING_IN_CPYTHON - dictptr = (likely(offset > 0)) ? (PyObject **) ((char *)obj + offset) : _PyObject_GetDictPtr(obj); -#else - dictptr = _PyObject_GetDictPtr(obj); -#endif - } - return (dictptr && *dictptr) ? __PYX_GET_DICT_VERSION(*dictptr) : 0; -} -static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version) { - PyObject *dict = Py_TYPE(obj)->tp_dict; - if (unlikely(!dict) || unlikely(tp_dict_version != __PYX_GET_DICT_VERSION(dict))) - return 0; - return obj_dict_version == __Pyx_get_object_dict_version(obj); -} -#endif - -/* PyErrFetchRestore */ -#if CYTHON_FAST_THREAD_STATE -static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { - PyObject *tmp_type, *tmp_value, *tmp_tb; - tmp_type = tstate->curexc_type; - tmp_value = tstate->curexc_value; - tmp_tb = tstate->curexc_traceback; - tstate->curexc_type = type; - tstate->curexc_value = value; - tstate->curexc_traceback = tb; - Py_XDECREF(tmp_type); - Py_XDECREF(tmp_value); - Py_XDECREF(tmp_tb); -} -static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { - *type = tstate->curexc_type; - *value = tstate->curexc_value; - *tb = tstate->curexc_traceback; - tstate->curexc_type = 0; - tstate->curexc_value = 0; - tstate->curexc_traceback = 0; -} -#endif - -/* CLineInTraceback */ -#ifndef CYTHON_CLINE_IN_TRACEBACK -static int __Pyx_CLineForTraceback(CYTHON_NCP_UNUSED PyThreadState *tstate, int c_line) { - PyObject *use_cline; - PyObject *ptype, *pvalue, *ptraceback; -#if CYTHON_COMPILING_IN_CPYTHON - PyObject **cython_runtime_dict; -#endif - if (unlikely(!__pyx_cython_runtime)) { - return c_line; - } - __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); -#if CYTHON_COMPILING_IN_CPYTHON - cython_runtime_dict = _PyObject_GetDictPtr(__pyx_cython_runtime); - if (likely(cython_runtime_dict)) { - __PYX_PY_DICT_LOOKUP_IF_MODIFIED( - use_cline, *cython_runtime_dict, - __Pyx_PyDict_GetItemStr(*cython_runtime_dict, __pyx_n_s_cline_in_traceback)) - } else -#endif - { - PyObject *use_cline_obj = __Pyx_PyObject_GetAttrStr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback); - if (use_cline_obj) { - use_cline = PyObject_Not(use_cline_obj) ? Py_False : Py_True; - Py_DECREF(use_cline_obj); - } else { - PyErr_Clear(); - use_cline = NULL; - } - } - if (!use_cline) { - c_line = 0; - PyObject_SetAttr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback, Py_False); - } - else if (use_cline == Py_False || (use_cline != Py_True && PyObject_Not(use_cline) != 0)) { - c_line = 0; - } - __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); - return c_line; -} -#endif - -/* CodeObjectCache */ -static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) { - int start = 0, mid = 0, end = count - 1; - if (end >= 0 && code_line > entries[end].code_line) { - return count; - } - while (start < end) { - mid = start + (end - start) / 2; - if (code_line < entries[mid].code_line) { - end = mid; - } else if (code_line > entries[mid].code_line) { - start = mid + 1; - } else { - return mid; - } - } - if (code_line <= entries[mid].code_line) { - return mid; - } else { - return mid + 1; - } -} -static PyCodeObject *__pyx_find_code_object(int code_line) { - PyCodeObject* code_object; - int pos; - if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) { - return NULL; - } - pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); - if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) { - return NULL; - } - code_object = __pyx_code_cache.entries[pos].code_object; - Py_INCREF(code_object); - return code_object; -} -static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) { - int pos, i; - __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries; - if (unlikely(!code_line)) { - return; - } - if (unlikely(!entries)) { - entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry)); - if (likely(entries)) { - __pyx_code_cache.entries = entries; - __pyx_code_cache.max_count = 64; - __pyx_code_cache.count = 1; - entries[0].code_line = code_line; - entries[0].code_object = code_object; - Py_INCREF(code_object); - } - return; - } - pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); - if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) { - PyCodeObject* tmp = entries[pos].code_object; - entries[pos].code_object = code_object; - Py_DECREF(tmp); - return; - } - if (__pyx_code_cache.count == __pyx_code_cache.max_count) { - int new_max = __pyx_code_cache.max_count + 64; - entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc( - __pyx_code_cache.entries, ((size_t)new_max) * sizeof(__Pyx_CodeObjectCacheEntry)); - if (unlikely(!entries)) { - return; - } - __pyx_code_cache.entries = entries; - __pyx_code_cache.max_count = new_max; - } - for (i=__pyx_code_cache.count; i>pos; i--) { - entries[i] = entries[i-1]; - } - entries[pos].code_line = code_line; - entries[pos].code_object = code_object; - __pyx_code_cache.count++; - Py_INCREF(code_object); -} - -/* AddTraceback */ -#include "compile.h" -#include "frameobject.h" -#include "traceback.h" -static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( - const char *funcname, int c_line, - int py_line, const char *filename) { - PyCodeObject *py_code = 0; - PyObject *py_srcfile = 0; - PyObject *py_funcname = 0; - #if PY_MAJOR_VERSION < 3 - py_srcfile = PyString_FromString(filename); - #else - py_srcfile = PyUnicode_FromString(filename); - #endif - if (!py_srcfile) goto bad; - if (c_line) { - #if PY_MAJOR_VERSION < 3 - py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); - #else - py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); - #endif - } - else { - #if PY_MAJOR_VERSION < 3 - py_funcname = PyString_FromString(funcname); - #else - py_funcname = PyUnicode_FromString(funcname); - #endif - } - if (!py_funcname) goto bad; - py_code = __Pyx_PyCode_New( - 0, - 0, - 0, - 0, - 0, - __pyx_empty_bytes, /*PyObject *code,*/ - __pyx_empty_tuple, /*PyObject *consts,*/ - __pyx_empty_tuple, /*PyObject *names,*/ - __pyx_empty_tuple, /*PyObject *varnames,*/ - __pyx_empty_tuple, /*PyObject *freevars,*/ - __pyx_empty_tuple, /*PyObject *cellvars,*/ - py_srcfile, /*PyObject *filename,*/ - py_funcname, /*PyObject *name,*/ - py_line, - __pyx_empty_bytes /*PyObject *lnotab*/ - ); - Py_DECREF(py_srcfile); - Py_DECREF(py_funcname); - return py_code; -bad: - Py_XDECREF(py_srcfile); - Py_XDECREF(py_funcname); - return NULL; -} -static void __Pyx_AddTraceback(const char *funcname, int c_line, - int py_line, const char *filename) { - PyCodeObject *py_code = 0; - PyFrameObject *py_frame = 0; - PyThreadState *tstate = __Pyx_PyThreadState_Current; - if (c_line) { - c_line = __Pyx_CLineForTraceback(tstate, c_line); - } - py_code = __pyx_find_code_object(c_line ? -c_line : py_line); - if (!py_code) { - py_code = __Pyx_CreateCodeObjectForTraceback( - funcname, c_line, py_line, filename); - if (!py_code) goto bad; - __pyx_insert_code_object(c_line ? -c_line : py_line, py_code); - } - py_frame = PyFrame_New( - tstate, /*PyThreadState *tstate,*/ - py_code, /*PyCodeObject *code,*/ - __pyx_d, /*PyObject *globals,*/ - 0 /*PyObject *locals*/ - ); - if (!py_frame) goto bad; - __Pyx_PyFrame_SetLineNumber(py_frame, py_line); - PyTraceBack_Here(py_frame); -bad: - Py_XDECREF(py_code); - Py_XDECREF(py_frame); -} - -/* CIntToPy */ -static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) { - const long neg_one = (long) ((long) 0 - (long) 1), const_zero = (long) 0; - const int is_unsigned = neg_one > const_zero; - if (is_unsigned) { - if (sizeof(long) < sizeof(long)) { - return PyInt_FromLong((long) value); - } else if (sizeof(long) <= sizeof(unsigned long)) { - return PyLong_FromUnsignedLong((unsigned long) value); -#ifdef HAVE_LONG_LONG - } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { - return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); -#endif - } - } else { - if (sizeof(long) <= sizeof(long)) { - return PyInt_FromLong((long) value); -#ifdef HAVE_LONG_LONG - } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { - return PyLong_FromLongLong((PY_LONG_LONG) value); -#endif - } - } - { - int one = 1; int little = (int)*(unsigned char *)&one; - unsigned char *bytes = (unsigned char *)&value; - return _PyLong_FromByteArray(bytes, sizeof(long), - little, !is_unsigned); - } -} - -/* CIntFromPyVerify */ -#define __PYX_VERIFY_RETURN_INT(target_type, func_type, func_value)\ - __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 0) -#define __PYX_VERIFY_RETURN_INT_EXC(target_type, func_type, func_value)\ - __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 1) -#define __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, exc)\ - {\ - func_type value = func_value;\ - if (sizeof(target_type) < sizeof(func_type)) {\ - if (unlikely(value != (func_type) (target_type) value)) {\ - func_type zero = 0;\ - if (exc && unlikely(value == (func_type)-1 && PyErr_Occurred()))\ - return (target_type) -1;\ - if (is_unsigned && unlikely(value < zero))\ - goto raise_neg_overflow;\ - else\ - goto raise_overflow;\ - }\ - }\ - return (target_type) value;\ - } - -/* CIntFromPy */ -static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) { - const long neg_one = (long) ((long) 0 - (long) 1), const_zero = (long) 0; - const int is_unsigned = neg_one > const_zero; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_Check(x))) { - if (sizeof(long) < sizeof(long)) { - __PYX_VERIFY_RETURN_INT(long, long, PyInt_AS_LONG(x)) - } else { - long val = PyInt_AS_LONG(x); - if (is_unsigned && unlikely(val < 0)) { - goto raise_neg_overflow; - } - return (long) val; - } - } else -#endif - if (likely(PyLong_Check(x))) { - if (is_unsigned) { -#if CYTHON_USE_PYLONG_INTERNALS - const digit* digits = ((PyLongObject*)x)->ob_digit; - switch (Py_SIZE(x)) { - case 0: return (long) 0; - case 1: __PYX_VERIFY_RETURN_INT(long, digit, digits[0]) - case 2: - if (8 * sizeof(long) > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) >= 2 * PyLong_SHIFT) { - return (long) (((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); - } - } - break; - case 3: - if (8 * sizeof(long) > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) >= 3 * PyLong_SHIFT) { - return (long) (((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); - } - } - break; - case 4: - if (8 * sizeof(long) > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) >= 4 * PyLong_SHIFT) { - return (long) (((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); - } - } - break; - } -#endif -#if CYTHON_COMPILING_IN_CPYTHON - if (unlikely(Py_SIZE(x) < 0)) { - goto raise_neg_overflow; - } -#else - { - int result = PyObject_RichCompareBool(x, Py_False, Py_LT); - if (unlikely(result < 0)) - return (long) -1; - if (unlikely(result == 1)) - goto raise_neg_overflow; - } -#endif - if (sizeof(long) <= sizeof(unsigned long)) { - __PYX_VERIFY_RETURN_INT_EXC(long, unsigned long, PyLong_AsUnsignedLong(x)) -#ifdef HAVE_LONG_LONG - } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { - __PYX_VERIFY_RETURN_INT_EXC(long, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) -#endif - } - } else { -#if CYTHON_USE_PYLONG_INTERNALS - const digit* digits = ((PyLongObject*)x)->ob_digit; - switch (Py_SIZE(x)) { - case 0: return (long) 0; - case -1: __PYX_VERIFY_RETURN_INT(long, sdigit, (sdigit) (-(sdigit)digits[0])) - case 1: __PYX_VERIFY_RETURN_INT(long, digit, +digits[0]) - case -2: - if (8 * sizeof(long) - 1 > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { - return (long) (((long)-1)*(((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case 2: - if (8 * sizeof(long) > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { - return (long) ((((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case -3: - if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { - return (long) (((long)-1)*(((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case 3: - if (8 * sizeof(long) > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { - return (long) ((((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case -4: - if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { - return (long) (((long)-1)*(((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case 4: - if (8 * sizeof(long) > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { - return (long) ((((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - } -#endif - if (sizeof(long) <= sizeof(long)) { - __PYX_VERIFY_RETURN_INT_EXC(long, long, PyLong_AsLong(x)) -#ifdef HAVE_LONG_LONG - } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { - __PYX_VERIFY_RETURN_INT_EXC(long, PY_LONG_LONG, PyLong_AsLongLong(x)) -#endif - } - } - { -#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) - PyErr_SetString(PyExc_RuntimeError, - "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); -#else - long val; - PyObject *v = __Pyx_PyNumber_IntOrLong(x); - #if PY_MAJOR_VERSION < 3 - if (likely(v) && !PyLong_Check(v)) { - PyObject *tmp = v; - v = PyNumber_Long(tmp); - Py_DECREF(tmp); - } - #endif - if (likely(v)) { - int one = 1; int is_little = (int)*(unsigned char *)&one; - unsigned char *bytes = (unsigned char *)&val; - int ret = _PyLong_AsByteArray((PyLongObject *)v, - bytes, sizeof(val), - is_little, !is_unsigned); - Py_DECREF(v); - if (likely(!ret)) - return val; - } -#endif - return (long) -1; - } - } else { - long val; - PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); - if (!tmp) return (long) -1; - val = __Pyx_PyInt_As_long(tmp); - Py_DECREF(tmp); - return val; - } -raise_overflow: - PyErr_SetString(PyExc_OverflowError, - "value too large to convert to long"); - return (long) -1; -raise_neg_overflow: - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to long"); - return (long) -1; -} - -/* CIntFromPy */ -static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *x) { - const int neg_one = (int) ((int) 0 - (int) 1), const_zero = (int) 0; - const int is_unsigned = neg_one > const_zero; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_Check(x))) { - if (sizeof(int) < sizeof(long)) { - __PYX_VERIFY_RETURN_INT(int, long, PyInt_AS_LONG(x)) - } else { - long val = PyInt_AS_LONG(x); - if (is_unsigned && unlikely(val < 0)) { - goto raise_neg_overflow; - } - return (int) val; - } - } else -#endif - if (likely(PyLong_Check(x))) { - if (is_unsigned) { -#if CYTHON_USE_PYLONG_INTERNALS - const digit* digits = ((PyLongObject*)x)->ob_digit; - switch (Py_SIZE(x)) { - case 0: return (int) 0; - case 1: __PYX_VERIFY_RETURN_INT(int, digit, digits[0]) - case 2: - if (8 * sizeof(int) > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) >= 2 * PyLong_SHIFT) { - return (int) (((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); - } - } - break; - case 3: - if (8 * sizeof(int) > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) >= 3 * PyLong_SHIFT) { - return (int) (((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); - } - } - break; - case 4: - if (8 * sizeof(int) > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) >= 4 * PyLong_SHIFT) { - return (int) (((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); - } - } - break; - } -#endif -#if CYTHON_COMPILING_IN_CPYTHON - if (unlikely(Py_SIZE(x) < 0)) { - goto raise_neg_overflow; - } -#else - { - int result = PyObject_RichCompareBool(x, Py_False, Py_LT); - if (unlikely(result < 0)) - return (int) -1; - if (unlikely(result == 1)) - goto raise_neg_overflow; - } -#endif - if (sizeof(int) <= sizeof(unsigned long)) { - __PYX_VERIFY_RETURN_INT_EXC(int, unsigned long, PyLong_AsUnsignedLong(x)) -#ifdef HAVE_LONG_LONG - } else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) { - __PYX_VERIFY_RETURN_INT_EXC(int, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) -#endif - } - } else { -#if CYTHON_USE_PYLONG_INTERNALS - const digit* digits = ((PyLongObject*)x)->ob_digit; - switch (Py_SIZE(x)) { - case 0: return (int) 0; - case -1: __PYX_VERIFY_RETURN_INT(int, sdigit, (sdigit) (-(sdigit)digits[0])) - case 1: __PYX_VERIFY_RETURN_INT(int, digit, +digits[0]) - case -2: - if (8 * sizeof(int) - 1 > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { - return (int) (((int)-1)*(((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case 2: - if (8 * sizeof(int) > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { - return (int) ((((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case -3: - if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { - return (int) (((int)-1)*(((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case 3: - if (8 * sizeof(int) > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { - return (int) ((((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case -4: - if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { - return (int) (((int)-1)*(((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case 4: - if (8 * sizeof(int) > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { - return (int) ((((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - } -#endif - if (sizeof(int) <= sizeof(long)) { - __PYX_VERIFY_RETURN_INT_EXC(int, long, PyLong_AsLong(x)) -#ifdef HAVE_LONG_LONG - } else if (sizeof(int) <= sizeof(PY_LONG_LONG)) { - __PYX_VERIFY_RETURN_INT_EXC(int, PY_LONG_LONG, PyLong_AsLongLong(x)) -#endif - } - } - { -#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) - PyErr_SetString(PyExc_RuntimeError, - "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); -#else - int val; - PyObject *v = __Pyx_PyNumber_IntOrLong(x); - #if PY_MAJOR_VERSION < 3 - if (likely(v) && !PyLong_Check(v)) { - PyObject *tmp = v; - v = PyNumber_Long(tmp); - Py_DECREF(tmp); - } - #endif - if (likely(v)) { - int one = 1; int is_little = (int)*(unsigned char *)&one; - unsigned char *bytes = (unsigned char *)&val; - int ret = _PyLong_AsByteArray((PyLongObject *)v, - bytes, sizeof(val), - is_little, !is_unsigned); - Py_DECREF(v); - if (likely(!ret)) - return val; - } -#endif - return (int) -1; - } - } else { - int val; - PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); - if (!tmp) return (int) -1; - val = __Pyx_PyInt_As_int(tmp); - Py_DECREF(tmp); - return val; - } -raise_overflow: - PyErr_SetString(PyExc_OverflowError, - "value too large to convert to int"); - return (int) -1; -raise_neg_overflow: - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to int"); - return (int) -1; -} - -/* FastTypeChecks */ -#if CYTHON_COMPILING_IN_CPYTHON -static int __Pyx_InBases(PyTypeObject *a, PyTypeObject *b) { - while (a) { - a = a->tp_base; - if (a == b) - return 1; - } - return b == &PyBaseObject_Type; -} -static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b) { - PyObject *mro; - if (a == b) return 1; - mro = a->tp_mro; - if (likely(mro)) { - Py_ssize_t i, n; - n = PyTuple_GET_SIZE(mro); - for (i = 0; i < n; i++) { - if (PyTuple_GET_ITEM(mro, i) == (PyObject *)b) - return 1; - } - return 0; - } - return __Pyx_InBases(a, b); -} -#if PY_MAJOR_VERSION == 2 -static int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject* exc_type2) { - PyObject *exception, *value, *tb; - int res; - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - __Pyx_ErrFetch(&exception, &value, &tb); - res = exc_type1 ? PyObject_IsSubclass(err, exc_type1) : 0; - if (unlikely(res == -1)) { - PyErr_WriteUnraisable(err); - res = 0; - } - if (!res) { - res = PyObject_IsSubclass(err, exc_type2); - if (unlikely(res == -1)) { - PyErr_WriteUnraisable(err); - res = 0; - } - } - __Pyx_ErrRestore(exception, value, tb); - return res; -} -#else -static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject *exc_type2) { - int res = exc_type1 ? __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type1) : 0; - if (!res) { - res = __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type2); - } - return res; -} -#endif -static int __Pyx_PyErr_GivenExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { - Py_ssize_t i, n; - assert(PyExceptionClass_Check(exc_type)); - n = PyTuple_GET_SIZE(tuple); -#if PY_MAJOR_VERSION >= 3 - for (i=0; ip) { - #if PY_MAJOR_VERSION < 3 - if (t->is_unicode) { - *t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL); - } else if (t->intern) { - *t->p = PyString_InternFromString(t->s); - } else { - *t->p = PyString_FromStringAndSize(t->s, t->n - 1); - } - #else - if (t->is_unicode | t->is_str) { - if (t->intern) { - *t->p = PyUnicode_InternFromString(t->s); - } else if (t->encoding) { - *t->p = PyUnicode_Decode(t->s, t->n - 1, t->encoding, NULL); - } else { - *t->p = PyUnicode_FromStringAndSize(t->s, t->n - 1); - } - } else { - *t->p = PyBytes_FromStringAndSize(t->s, t->n - 1); - } - #endif - if (!*t->p) - return -1; - if (PyObject_Hash(*t->p) == -1) - return -1; - ++t; - } - return 0; -} - -static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char* c_str) { - return __Pyx_PyUnicode_FromStringAndSize(c_str, (Py_ssize_t)strlen(c_str)); -} -static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject* o) { - Py_ssize_t ignore; - return __Pyx_PyObject_AsStringAndSize(o, &ignore); -} -#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT -#if !CYTHON_PEP393_ENABLED -static const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { - char* defenc_c; - PyObject* defenc = _PyUnicode_AsDefaultEncodedString(o, NULL); - if (!defenc) return NULL; - defenc_c = PyBytes_AS_STRING(defenc); -#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII - { - char* end = defenc_c + PyBytes_GET_SIZE(defenc); - char* c; - for (c = defenc_c; c < end; c++) { - if ((unsigned char) (*c) >= 128) { - PyUnicode_AsASCIIString(o); - return NULL; - } - } - } -#endif - *length = PyBytes_GET_SIZE(defenc); - return defenc_c; -} -#else -static CYTHON_INLINE const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { - if (unlikely(__Pyx_PyUnicode_READY(o) == -1)) return NULL; -#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII - if (likely(PyUnicode_IS_ASCII(o))) { - *length = PyUnicode_GET_LENGTH(o); - return PyUnicode_AsUTF8(o); - } else { - PyUnicode_AsASCIIString(o); - return NULL; - } -#else - return PyUnicode_AsUTF8AndSize(o, length); -#endif -} -#endif -#endif -static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) { -#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT - if ( -#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII - __Pyx_sys_getdefaultencoding_not_ascii && -#endif - PyUnicode_Check(o)) { - return __Pyx_PyUnicode_AsStringAndSize(o, length); - } else -#endif -#if (!CYTHON_COMPILING_IN_PYPY) || (defined(PyByteArray_AS_STRING) && defined(PyByteArray_GET_SIZE)) - if (PyByteArray_Check(o)) { - *length = PyByteArray_GET_SIZE(o); - return PyByteArray_AS_STRING(o); - } else -#endif - { - char* result; - int r = PyBytes_AsStringAndSize(o, &result, length); - if (unlikely(r < 0)) { - return NULL; - } else { - return result; - } - } -} -static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) { - int is_true = x == Py_True; - if (is_true | (x == Py_False) | (x == Py_None)) return is_true; - else return PyObject_IsTrue(x); -} -static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject* x) { - int retval; - if (unlikely(!x)) return -1; - retval = __Pyx_PyObject_IsTrue(x); - Py_DECREF(x); - return retval; -} -static PyObject* __Pyx_PyNumber_IntOrLongWrongResultType(PyObject* result, const char* type_name) { -#if PY_MAJOR_VERSION >= 3 - if (PyLong_Check(result)) { - if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1, - "__int__ returned non-int (type %.200s). " - "The ability to return an instance of a strict subclass of int " - "is deprecated, and may be removed in a future version of Python.", - Py_TYPE(result)->tp_name)) { - Py_DECREF(result); - return NULL; - } - return result; - } -#endif - PyErr_Format(PyExc_TypeError, - "__%.4s__ returned non-%.4s (type %.200s)", - type_name, type_name, Py_TYPE(result)->tp_name); - Py_DECREF(result); - return NULL; -} -static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x) { -#if CYTHON_USE_TYPE_SLOTS - PyNumberMethods *m; -#endif - const char *name = NULL; - PyObject *res = NULL; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_Check(x) || PyLong_Check(x))) -#else - if (likely(PyLong_Check(x))) -#endif - return __Pyx_NewRef(x); -#if CYTHON_USE_TYPE_SLOTS - m = Py_TYPE(x)->tp_as_number; - #if PY_MAJOR_VERSION < 3 - if (m && m->nb_int) { - name = "int"; - res = m->nb_int(x); - } - else if (m && m->nb_long) { - name = "long"; - res = m->nb_long(x); - } - #else - if (likely(m && m->nb_int)) { - name = "int"; - res = m->nb_int(x); - } - #endif -#else - if (!PyBytes_CheckExact(x) && !PyUnicode_CheckExact(x)) { - res = PyNumber_Int(x); - } -#endif - if (likely(res)) { -#if PY_MAJOR_VERSION < 3 - if (unlikely(!PyInt_Check(res) && !PyLong_Check(res))) { -#else - if (unlikely(!PyLong_CheckExact(res))) { -#endif - return __Pyx_PyNumber_IntOrLongWrongResultType(res, name); - } - } - else if (!PyErr_Occurred()) { - PyErr_SetString(PyExc_TypeError, - "an integer is required"); - } - return res; -} -static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { - Py_ssize_t ival; - PyObject *x; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_CheckExact(b))) { - if (sizeof(Py_ssize_t) >= sizeof(long)) - return PyInt_AS_LONG(b); - else - return PyInt_AsSsize_t(b); - } -#endif - if (likely(PyLong_CheckExact(b))) { - #if CYTHON_USE_PYLONG_INTERNALS - const digit* digits = ((PyLongObject*)b)->ob_digit; - const Py_ssize_t size = Py_SIZE(b); - if (likely(__Pyx_sst_abs(size) <= 1)) { - ival = likely(size) ? digits[0] : 0; - if (size == -1) ival = -ival; - return ival; - } else { - switch (size) { - case 2: - if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { - return (Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case -2: - if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { - return -(Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case 3: - if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { - return (Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case -3: - if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { - return -(Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case 4: - if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { - return (Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case -4: - if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { - return -(Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - } - } - #endif - return PyLong_AsSsize_t(b); - } - x = PyNumber_Index(b); - if (!x) return -1; - ival = PyInt_AsSsize_t(x); - Py_DECREF(x); - return ival; -} -static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b) { - return b ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False); -} -static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { - return PyInt_FromSize_t(ival); -} - - -#endif /* Py_PYTHON_H */ diff --git a/third_party/python/aiohttp/aiohttp/_websocket.pyx b/third_party/python/aiohttp/aiohttp/_websocket.pyx deleted file mode 100644 index 94318d2b1bec..000000000000 --- a/third_party/python/aiohttp/aiohttp/_websocket.pyx +++ /dev/null @@ -1,56 +0,0 @@ -from cpython cimport PyBytes_AsString - - -#from cpython cimport PyByteArray_AsString # cython still not exports that -cdef extern from "Python.h": - char* PyByteArray_AsString(bytearray ba) except NULL - -from libc.stdint cimport uint32_t, uint64_t, uintmax_t - - -def _websocket_mask_cython(object mask, object data): - """Note, this function mutates its `data` argument - """ - cdef: - Py_ssize_t data_len, i - # bit operations on signed integers are implementation-specific - unsigned char * in_buf - const unsigned char * mask_buf - uint32_t uint32_msk - uint64_t uint64_msk - - assert len(mask) == 4 - - if not isinstance(mask, bytes): - mask = bytes(mask) - - if isinstance(data, bytearray): - data = data - else: - data = bytearray(data) - - data_len = len(data) - in_buf = PyByteArray_AsString(data) - mask_buf = PyBytes_AsString(mask) - uint32_msk = (mask_buf)[0] - - # TODO: align in_data ptr to achieve even faster speeds - # does it need in python ?! malloc() always aligns to sizeof(long) bytes - - if sizeof(size_t) >= 8: - uint64_msk = uint32_msk - uint64_msk = (uint64_msk << 32) | uint32_msk - - while data_len >= 8: - (in_buf)[0] ^= uint64_msk - in_buf += 8 - data_len -= 8 - - - while data_len >= 4: - (in_buf)[0] ^= uint32_msk - in_buf += 4 - data_len -= 4 - - for i in range(0, data_len): - in_buf[i] ^= mask_buf[i] diff --git a/third_party/python/aiohttp/aiohttp/abc.py b/third_party/python/aiohttp/aiohttp/abc.py deleted file mode 100644 index 4abfd798d7d1..000000000000 --- a/third_party/python/aiohttp/aiohttp/abc.py +++ /dev/null @@ -1,200 +0,0 @@ -import asyncio -import logging -from abc import ABC, abstractmethod -from collections.abc import Sized -from http.cookies import BaseCookie, Morsel -from typing import ( - TYPE_CHECKING, - Any, - Awaitable, - Callable, - Dict, - Generator, - Iterable, - List, - Optional, - Tuple, -) - -from multidict import CIMultiDict -from yarl import URL - -from .helpers import get_running_loop -from .typedefs import LooseCookies - -if TYPE_CHECKING: # pragma: no cover - from .web_app import Application - from .web_exceptions import HTTPException - from .web_request import BaseRequest, Request - from .web_response import StreamResponse -else: - BaseRequest = Request = Application = StreamResponse = None - HTTPException = None - - -class AbstractRouter(ABC): - def __init__(self) -> None: - self._frozen = False - - def post_init(self, app: Application) -> None: - """Post init stage. - - Not an abstract method for sake of backward compatibility, - but if the router wants to be aware of the application - it can override this. - """ - - @property - def frozen(self) -> bool: - return self._frozen - - def freeze(self) -> None: - """Freeze router.""" - self._frozen = True - - @abstractmethod - async def resolve(self, request: Request) -> "AbstractMatchInfo": - """Return MATCH_INFO for given request""" - - -class AbstractMatchInfo(ABC): - @property # pragma: no branch - @abstractmethod - def handler(self) -> Callable[[Request], Awaitable[StreamResponse]]: - """Execute matched request handler""" - - @property - @abstractmethod - def expect_handler(self) -> Callable[[Request], Awaitable[None]]: - """Expect handler for 100-continue processing""" - - @property # pragma: no branch - @abstractmethod - def http_exception(self) -> Optional[HTTPException]: - """HTTPException instance raised on router's resolving, or None""" - - @abstractmethod # pragma: no branch - def get_info(self) -> Dict[str, Any]: - """Return a dict with additional info useful for introspection""" - - @property # pragma: no branch - @abstractmethod - def apps(self) -> Tuple[Application, ...]: - """Stack of nested applications. - - Top level application is left-most element. - - """ - - @abstractmethod - def add_app(self, app: Application) -> None: - """Add application to the nested apps stack.""" - - @abstractmethod - def freeze(self) -> None: - """Freeze the match info. - - The method is called after route resolution. - - After the call .add_app() is forbidden. - - """ - - -class AbstractView(ABC): - """Abstract class based view.""" - - def __init__(self, request: Request) -> None: - self._request = request - - @property - def request(self) -> Request: - """Request instance.""" - return self._request - - @abstractmethod - def __await__(self) -> Generator[Any, None, StreamResponse]: - """Execute the view handler.""" - - -class AbstractResolver(ABC): - """Abstract DNS resolver.""" - - @abstractmethod - async def resolve(self, host: str, port: int, family: int) -> List[Dict[str, Any]]: - """Return IP address for given hostname""" - - @abstractmethod - async def close(self) -> None: - """Release resolver""" - - -if TYPE_CHECKING: # pragma: no cover - IterableBase = Iterable[Morsel[str]] -else: - IterableBase = Iterable - - -class AbstractCookieJar(Sized, IterableBase): - """Abstract Cookie Jar.""" - - def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None: - self._loop = get_running_loop(loop) - - @abstractmethod - def clear(self) -> None: - """Clear all cookies.""" - - @abstractmethod - def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None: - """Update cookies.""" - - @abstractmethod - def filter_cookies(self, request_url: URL) -> "BaseCookie[str]": - """Return the jar's cookies filtered by their attributes.""" - - -class AbstractStreamWriter(ABC): - """Abstract stream writer.""" - - buffer_size = 0 - output_size = 0 - length = 0 # type: Optional[int] - - @abstractmethod - async def write(self, chunk: bytes) -> None: - """Write chunk into stream.""" - - @abstractmethod - async def write_eof(self, chunk: bytes = b"") -> None: - """Write last chunk.""" - - @abstractmethod - async def drain(self) -> None: - """Flush the write buffer.""" - - @abstractmethod - def enable_compression(self, encoding: str = "deflate") -> None: - """Enable HTTP body compression""" - - @abstractmethod - def enable_chunking(self) -> None: - """Enable HTTP chunked mode""" - - @abstractmethod - async def write_headers( - self, status_line: str, headers: "CIMultiDict[str]" - ) -> None: - """Write HTTP headers""" - - -class AbstractAccessLogger(ABC): - """Abstract writer to access log.""" - - def __init__(self, logger: logging.Logger, log_format: str) -> None: - self.logger = logger - self.log_format = log_format - - @abstractmethod - def log(self, request: BaseRequest, response: StreamResponse, time: float) -> None: - """Emit log to logger.""" diff --git a/third_party/python/aiohttp/aiohttp/base_protocol.py b/third_party/python/aiohttp/aiohttp/base_protocol.py deleted file mode 100644 index 01e18310b471..000000000000 --- a/third_party/python/aiohttp/aiohttp/base_protocol.py +++ /dev/null @@ -1,87 +0,0 @@ -import asyncio -from typing import Optional, cast - -from .tcp_helpers import tcp_nodelay - - -class BaseProtocol(asyncio.Protocol): - __slots__ = ( - "_loop", - "_paused", - "_drain_waiter", - "_connection_lost", - "_reading_paused", - "transport", - ) - - def __init__(self, loop: asyncio.AbstractEventLoop) -> None: - self._loop = loop # type: asyncio.AbstractEventLoop - self._paused = False - self._drain_waiter = None # type: Optional[asyncio.Future[None]] - self._connection_lost = False - self._reading_paused = False - - self.transport = None # type: Optional[asyncio.Transport] - - def pause_writing(self) -> None: - assert not self._paused - self._paused = True - - def resume_writing(self) -> None: - assert self._paused - self._paused = False - - waiter = self._drain_waiter - if waiter is not None: - self._drain_waiter = None - if not waiter.done(): - waiter.set_result(None) - - def pause_reading(self) -> None: - if not self._reading_paused and self.transport is not None: - try: - self.transport.pause_reading() - except (AttributeError, NotImplementedError, RuntimeError): - pass - self._reading_paused = True - - def resume_reading(self) -> None: - if self._reading_paused and self.transport is not None: - try: - self.transport.resume_reading() - except (AttributeError, NotImplementedError, RuntimeError): - pass - self._reading_paused = False - - def connection_made(self, transport: asyncio.BaseTransport) -> None: - tr = cast(asyncio.Transport, transport) - tcp_nodelay(tr, True) - self.transport = tr - - def connection_lost(self, exc: Optional[BaseException]) -> None: - self._connection_lost = True - # Wake up the writer if currently paused. - self.transport = None - if not self._paused: - return - waiter = self._drain_waiter - if waiter is None: - return - self._drain_waiter = None - if waiter.done(): - return - if exc is None: - waiter.set_result(None) - else: - waiter.set_exception(exc) - - async def _drain_helper(self) -> None: - if self._connection_lost: - raise ConnectionResetError("Connection lost") - if not self._paused: - return - waiter = self._drain_waiter - assert waiter is None or waiter.cancelled() - waiter = self._loop.create_future() - self._drain_waiter = waiter - await waiter diff --git a/third_party/python/aiohttp/aiohttp/client.py b/third_party/python/aiohttp/aiohttp/client.py deleted file mode 100644 index a9da8e155d51..000000000000 --- a/third_party/python/aiohttp/aiohttp/client.py +++ /dev/null @@ -1,1275 +0,0 @@ -"""HTTP Client for asyncio.""" - -import asyncio -import base64 -import hashlib -import json -import os -import sys -import traceback -import warnings -from types import SimpleNamespace, TracebackType -from typing import ( - Any, - Awaitable, - Callable, - Coroutine, - FrozenSet, - Generator, - Generic, - Iterable, - List, - Mapping, - Optional, - Set, - Tuple, - Type, - TypeVar, - Union, -) - -import attr -from multidict import CIMultiDict, MultiDict, MultiDictProxy, istr -from yarl import URL - -from . import hdrs, http, payload -from .abc import AbstractCookieJar -from .client_exceptions import ( - ClientConnectionError as ClientConnectionError, - ClientConnectorCertificateError as ClientConnectorCertificateError, - ClientConnectorError as ClientConnectorError, - ClientConnectorSSLError as ClientConnectorSSLError, - ClientError as ClientError, - ClientHttpProxyError as ClientHttpProxyError, - ClientOSError as ClientOSError, - ClientPayloadError as ClientPayloadError, - ClientProxyConnectionError as ClientProxyConnectionError, - ClientResponseError as ClientResponseError, - ClientSSLError as ClientSSLError, - ContentTypeError as ContentTypeError, - InvalidURL as InvalidURL, - ServerConnectionError as ServerConnectionError, - ServerDisconnectedError as ServerDisconnectedError, - ServerFingerprintMismatch as ServerFingerprintMismatch, - ServerTimeoutError as ServerTimeoutError, - TooManyRedirects as TooManyRedirects, - WSServerHandshakeError as WSServerHandshakeError, -) -from .client_reqrep import ( - ClientRequest as ClientRequest, - ClientResponse as ClientResponse, - Fingerprint as Fingerprint, - RequestInfo as RequestInfo, - _merge_ssl_params, -) -from .client_ws import ClientWebSocketResponse as ClientWebSocketResponse -from .connector import ( - BaseConnector as BaseConnector, - NamedPipeConnector as NamedPipeConnector, - TCPConnector as TCPConnector, - UnixConnector as UnixConnector, -) -from .cookiejar import CookieJar -from .helpers import ( - DEBUG, - PY_36, - BasicAuth, - CeilTimeout, - TimeoutHandle, - get_running_loop, - proxies_from_env, - sentinel, - strip_auth_from_url, -) -from .http import WS_KEY, HttpVersion, WebSocketReader, WebSocketWriter -from .http_websocket import WSHandshakeError, WSMessage, ws_ext_gen, ws_ext_parse -from .streams import FlowControlDataQueue -from .tracing import Trace, TraceConfig -from .typedefs import JSONEncoder, LooseCookies, LooseHeaders, StrOrURL - -__all__ = ( - # client_exceptions - "ClientConnectionError", - "ClientConnectorCertificateError", - "ClientConnectorError", - "ClientConnectorSSLError", - "ClientError", - "ClientHttpProxyError", - "ClientOSError", - "ClientPayloadError", - "ClientProxyConnectionError", - "ClientResponseError", - "ClientSSLError", - "ContentTypeError", - "InvalidURL", - "ServerConnectionError", - "ServerDisconnectedError", - "ServerFingerprintMismatch", - "ServerTimeoutError", - "TooManyRedirects", - "WSServerHandshakeError", - # client_reqrep - "ClientRequest", - "ClientResponse", - "Fingerprint", - "RequestInfo", - # connector - "BaseConnector", - "TCPConnector", - "UnixConnector", - "NamedPipeConnector", - # client_ws - "ClientWebSocketResponse", - # client - "ClientSession", - "ClientTimeout", - "request", -) - - -try: - from ssl import SSLContext -except ImportError: # pragma: no cover - SSLContext = object # type: ignore - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class ClientTimeout: - total: Optional[float] = None - connect: Optional[float] = None - sock_read: Optional[float] = None - sock_connect: Optional[float] = None - - # pool_queue_timeout: Optional[float] = None - # dns_resolution_timeout: Optional[float] = None - # socket_connect_timeout: Optional[float] = None - # connection_acquiring_timeout: Optional[float] = None - # new_connection_timeout: Optional[float] = None - # http_header_timeout: Optional[float] = None - # response_body_timeout: Optional[float] = None - - # to create a timeout specific for a single request, either - # - create a completely new one to overwrite the default - # - or use http://www.attrs.org/en/stable/api.html#attr.evolve - # to overwrite the defaults - - -# 5 Minute default read timeout -DEFAULT_TIMEOUT = ClientTimeout(total=5 * 60) - -_RetType = TypeVar("_RetType") - - -class ClientSession: - """First-class interface for making HTTP requests.""" - - ATTRS = frozenset( - [ - "_source_traceback", - "_connector", - "requote_redirect_url", - "_loop", - "_cookie_jar", - "_connector_owner", - "_default_auth", - "_version", - "_json_serialize", - "_requote_redirect_url", - "_timeout", - "_raise_for_status", - "_auto_decompress", - "_trust_env", - "_default_headers", - "_skip_auto_headers", - "_request_class", - "_response_class", - "_ws_response_class", - "_trace_configs", - "_read_bufsize", - ] - ) - - _source_traceback = None - - def __init__( - self, - *, - connector: Optional[BaseConnector] = None, - loop: Optional[asyncio.AbstractEventLoop] = None, - cookies: Optional[LooseCookies] = None, - headers: Optional[LooseHeaders] = None, - skip_auto_headers: Optional[Iterable[str]] = None, - auth: Optional[BasicAuth] = None, - json_serialize: JSONEncoder = json.dumps, - request_class: Type[ClientRequest] = ClientRequest, - response_class: Type[ClientResponse] = ClientResponse, - ws_response_class: Type[ClientWebSocketResponse] = ClientWebSocketResponse, - version: HttpVersion = http.HttpVersion11, - cookie_jar: Optional[AbstractCookieJar] = None, - connector_owner: bool = True, - raise_for_status: bool = False, - read_timeout: Union[float, object] = sentinel, - conn_timeout: Optional[float] = None, - timeout: Union[object, ClientTimeout] = sentinel, - auto_decompress: bool = True, - trust_env: bool = False, - requote_redirect_url: bool = True, - trace_configs: Optional[List[TraceConfig]] = None, - read_bufsize: int = 2 ** 16, - ) -> None: - - if loop is None: - if connector is not None: - loop = connector._loop - - loop = get_running_loop(loop) - - if connector is None: - connector = TCPConnector(loop=loop) - - if connector._loop is not loop: - raise RuntimeError("Session and connector has to use same event loop") - - self._loop = loop - - if loop.get_debug(): - self._source_traceback = traceback.extract_stack(sys._getframe(1)) - - if cookie_jar is None: - cookie_jar = CookieJar(loop=loop) - self._cookie_jar = cookie_jar - - if cookies is not None: - self._cookie_jar.update_cookies(cookies) - - self._connector = connector # type: Optional[BaseConnector] - self._connector_owner = connector_owner - self._default_auth = auth - self._version = version - self._json_serialize = json_serialize - if timeout is sentinel: - self._timeout = DEFAULT_TIMEOUT - if read_timeout is not sentinel: - warnings.warn( - "read_timeout is deprecated, " "use timeout argument instead", - DeprecationWarning, - stacklevel=2, - ) - self._timeout = attr.evolve(self._timeout, total=read_timeout) - if conn_timeout is not None: - self._timeout = attr.evolve(self._timeout, connect=conn_timeout) - warnings.warn( - "conn_timeout is deprecated, " "use timeout argument instead", - DeprecationWarning, - stacklevel=2, - ) - else: - self._timeout = timeout # type: ignore - if read_timeout is not sentinel: - raise ValueError( - "read_timeout and timeout parameters " - "conflict, please setup " - "timeout.read" - ) - if conn_timeout is not None: - raise ValueError( - "conn_timeout and timeout parameters " - "conflict, please setup " - "timeout.connect" - ) - self._raise_for_status = raise_for_status - self._auto_decompress = auto_decompress - self._trust_env = trust_env - self._requote_redirect_url = requote_redirect_url - self._read_bufsize = read_bufsize - - # Convert to list of tuples - if headers: - real_headers = CIMultiDict(headers) # type: CIMultiDict[str] - else: - real_headers = CIMultiDict() - self._default_headers = real_headers # type: CIMultiDict[str] - if skip_auto_headers is not None: - self._skip_auto_headers = frozenset([istr(i) for i in skip_auto_headers]) - else: - self._skip_auto_headers = frozenset() - - self._request_class = request_class - self._response_class = response_class - self._ws_response_class = ws_response_class - - self._trace_configs = trace_configs or [] - for trace_config in self._trace_configs: - trace_config.freeze() - - def __init_subclass__(cls: Type["ClientSession"]) -> None: - warnings.warn( - "Inheritance class {} from ClientSession " - "is discouraged".format(cls.__name__), - DeprecationWarning, - stacklevel=2, - ) - - if DEBUG: - - def __setattr__(self, name: str, val: Any) -> None: - if name not in self.ATTRS: - warnings.warn( - "Setting custom ClientSession.{} attribute " - "is discouraged".format(name), - DeprecationWarning, - stacklevel=2, - ) - super().__setattr__(name, val) - - def __del__(self, _warnings: Any = warnings) -> None: - if not self.closed: - if PY_36: - kwargs = {"source": self} - else: - kwargs = {} - _warnings.warn( - f"Unclosed client session {self!r}", ResourceWarning, **kwargs - ) - context = {"client_session": self, "message": "Unclosed client session"} - if self._source_traceback is not None: - context["source_traceback"] = self._source_traceback - self._loop.call_exception_handler(context) - - def request( - self, method: str, url: StrOrURL, **kwargs: Any - ) -> "_RequestContextManager": - """Perform HTTP request.""" - return _RequestContextManager(self._request(method, url, **kwargs)) - - async def _request( - self, - method: str, - str_or_url: StrOrURL, - *, - params: Optional[Mapping[str, str]] = None, - data: Any = None, - json: Any = None, - cookies: Optional[LooseCookies] = None, - headers: Optional[LooseHeaders] = None, - skip_auto_headers: Optional[Iterable[str]] = None, - auth: Optional[BasicAuth] = None, - allow_redirects: bool = True, - max_redirects: int = 10, - compress: Optional[str] = None, - chunked: Optional[bool] = None, - expect100: bool = False, - raise_for_status: Optional[bool] = None, - read_until_eof: bool = True, - proxy: Optional[StrOrURL] = None, - proxy_auth: Optional[BasicAuth] = None, - timeout: Union[ClientTimeout, object] = sentinel, - verify_ssl: Optional[bool] = None, - fingerprint: Optional[bytes] = None, - ssl_context: Optional[SSLContext] = None, - ssl: Optional[Union[SSLContext, bool, Fingerprint]] = None, - proxy_headers: Optional[LooseHeaders] = None, - trace_request_ctx: Optional[SimpleNamespace] = None, - read_bufsize: Optional[int] = None, - ) -> ClientResponse: - - # NOTE: timeout clamps existing connect and read timeouts. We cannot - # set the default to None because we need to detect if the user wants - # to use the existing timeouts by setting timeout to None. - - if self.closed: - raise RuntimeError("Session is closed") - - ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint) - - if data is not None and json is not None: - raise ValueError( - "data and json parameters can not be used at the same time" - ) - elif json is not None: - data = payload.JsonPayload(json, dumps=self._json_serialize) - - if not isinstance(chunked, bool) and chunked is not None: - warnings.warn("Chunk size is deprecated #1615", DeprecationWarning) - - redirects = 0 - history = [] - version = self._version - - # Merge with default headers and transform to CIMultiDict - headers = self._prepare_headers(headers) - proxy_headers = self._prepare_headers(proxy_headers) - - try: - url = URL(str_or_url) - except ValueError as e: - raise InvalidURL(str_or_url) from e - - skip_headers = set(self._skip_auto_headers) - if skip_auto_headers is not None: - for i in skip_auto_headers: - skip_headers.add(istr(i)) - - if proxy is not None: - try: - proxy = URL(proxy) - except ValueError as e: - raise InvalidURL(proxy) from e - - if timeout is sentinel: - real_timeout = self._timeout # type: ClientTimeout - else: - if not isinstance(timeout, ClientTimeout): - real_timeout = ClientTimeout(total=timeout) # type: ignore - else: - real_timeout = timeout - # timeout is cumulative for all request operations - # (request, redirects, responses, data consuming) - tm = TimeoutHandle(self._loop, real_timeout.total) - handle = tm.start() - - if read_bufsize is None: - read_bufsize = self._read_bufsize - - traces = [ - Trace( - self, - trace_config, - trace_config.trace_config_ctx(trace_request_ctx=trace_request_ctx), - ) - for trace_config in self._trace_configs - ] - - for trace in traces: - await trace.send_request_start(method, url, headers) - - timer = tm.timer() - try: - with timer: - while True: - url, auth_from_url = strip_auth_from_url(url) - if auth and auth_from_url: - raise ValueError( - "Cannot combine AUTH argument with " - "credentials encoded in URL" - ) - - if auth is None: - auth = auth_from_url - if auth is None: - auth = self._default_auth - # It would be confusing if we support explicit - # Authorization header with auth argument - if ( - headers is not None - and auth is not None - and hdrs.AUTHORIZATION in headers - ): - raise ValueError( - "Cannot combine AUTHORIZATION header " - "with AUTH argument or credentials " - "encoded in URL" - ) - - all_cookies = self._cookie_jar.filter_cookies(url) - - if cookies is not None: - tmp_cookie_jar = CookieJar() - tmp_cookie_jar.update_cookies(cookies) - req_cookies = tmp_cookie_jar.filter_cookies(url) - if req_cookies: - all_cookies.load(req_cookies) - - if proxy is not None: - proxy = URL(proxy) - elif self._trust_env: - for scheme, proxy_info in proxies_from_env().items(): - if scheme == url.scheme: - proxy = proxy_info.proxy - proxy_auth = proxy_info.proxy_auth - break - - req = self._request_class( - method, - url, - params=params, - headers=headers, - skip_auto_headers=skip_headers, - data=data, - cookies=all_cookies, - auth=auth, - version=version, - compress=compress, - chunked=chunked, - expect100=expect100, - loop=self._loop, - response_class=self._response_class, - proxy=proxy, - proxy_auth=proxy_auth, - timer=timer, - session=self, - ssl=ssl, - proxy_headers=proxy_headers, - traces=traces, - ) - - # connection timeout - try: - with CeilTimeout(real_timeout.connect, loop=self._loop): - assert self._connector is not None - conn = await self._connector.connect( - req, traces=traces, timeout=real_timeout - ) - except asyncio.TimeoutError as exc: - raise ServerTimeoutError( - "Connection timeout " "to host {}".format(url) - ) from exc - - assert conn.transport is not None - - assert conn.protocol is not None - conn.protocol.set_response_params( - timer=timer, - skip_payload=method.upper() == "HEAD", - read_until_eof=read_until_eof, - auto_decompress=self._auto_decompress, - read_timeout=real_timeout.sock_read, - read_bufsize=read_bufsize, - ) - - try: - try: - resp = await req.send(conn) - try: - await resp.start(conn) - except BaseException: - resp.close() - raise - except BaseException: - conn.close() - raise - except ClientError: - raise - except OSError as exc: - raise ClientOSError(*exc.args) from exc - - self._cookie_jar.update_cookies(resp.cookies, resp.url) - - # redirects - if resp.status in (301, 302, 303, 307, 308) and allow_redirects: - - for trace in traces: - await trace.send_request_redirect( - method, url, headers, resp - ) - - redirects += 1 - history.append(resp) - if max_redirects and redirects >= max_redirects: - resp.close() - raise TooManyRedirects( - history[0].request_info, tuple(history) - ) - - # For 301 and 302, mimic IE, now changed in RFC - # https://github.com/kennethreitz/requests/pull/269 - if (resp.status == 303 and resp.method != hdrs.METH_HEAD) or ( - resp.status in (301, 302) and resp.method == hdrs.METH_POST - ): - method = hdrs.METH_GET - data = None - if headers.get(hdrs.CONTENT_LENGTH): - headers.pop(hdrs.CONTENT_LENGTH) - - r_url = resp.headers.get(hdrs.LOCATION) or resp.headers.get( - hdrs.URI - ) - if r_url is None: - # see github.com/aio-libs/aiohttp/issues/2022 - break - else: - # reading from correct redirection - # response is forbidden - resp.release() - - try: - parsed_url = URL( - r_url, encoded=not self._requote_redirect_url - ) - - except ValueError as e: - raise InvalidURL(r_url) from e - - scheme = parsed_url.scheme - if scheme not in ("http", "https", ""): - resp.close() - raise ValueError("Can redirect only to http or https") - elif not scheme: - parsed_url = url.join(parsed_url) - - if url.origin() != parsed_url.origin(): - auth = None - headers.pop(hdrs.AUTHORIZATION, None) - - url = parsed_url - params = None - resp.release() - continue - - break - - # check response status - if raise_for_status is None: - raise_for_status = self._raise_for_status - if raise_for_status: - resp.raise_for_status() - - # register connection - if handle is not None: - if resp.connection is not None: - resp.connection.add_callback(handle.cancel) - else: - handle.cancel() - - resp._history = tuple(history) - - for trace in traces: - await trace.send_request_end(method, url, headers, resp) - return resp - - except BaseException as e: - # cleanup timer - tm.close() - if handle: - handle.cancel() - handle = None - - for trace in traces: - await trace.send_request_exception(method, url, headers, e) - raise - - def ws_connect( - self, - url: StrOrURL, - *, - method: str = hdrs.METH_GET, - protocols: Iterable[str] = (), - timeout: float = 10.0, - receive_timeout: Optional[float] = None, - autoclose: bool = True, - autoping: bool = True, - heartbeat: Optional[float] = None, - auth: Optional[BasicAuth] = None, - origin: Optional[str] = None, - headers: Optional[LooseHeaders] = None, - proxy: Optional[StrOrURL] = None, - proxy_auth: Optional[BasicAuth] = None, - ssl: Union[SSLContext, bool, None, Fingerprint] = None, - verify_ssl: Optional[bool] = None, - fingerprint: Optional[bytes] = None, - ssl_context: Optional[SSLContext] = None, - proxy_headers: Optional[LooseHeaders] = None, - compress: int = 0, - max_msg_size: int = 4 * 1024 * 1024, - ) -> "_WSRequestContextManager": - """Initiate websocket connection.""" - return _WSRequestContextManager( - self._ws_connect( - url, - method=method, - protocols=protocols, - timeout=timeout, - receive_timeout=receive_timeout, - autoclose=autoclose, - autoping=autoping, - heartbeat=heartbeat, - auth=auth, - origin=origin, - headers=headers, - proxy=proxy, - proxy_auth=proxy_auth, - ssl=ssl, - verify_ssl=verify_ssl, - fingerprint=fingerprint, - ssl_context=ssl_context, - proxy_headers=proxy_headers, - compress=compress, - max_msg_size=max_msg_size, - ) - ) - - async def _ws_connect( - self, - url: StrOrURL, - *, - method: str = hdrs.METH_GET, - protocols: Iterable[str] = (), - timeout: float = 10.0, - receive_timeout: Optional[float] = None, - autoclose: bool = True, - autoping: bool = True, - heartbeat: Optional[float] = None, - auth: Optional[BasicAuth] = None, - origin: Optional[str] = None, - headers: Optional[LooseHeaders] = None, - proxy: Optional[StrOrURL] = None, - proxy_auth: Optional[BasicAuth] = None, - ssl: Union[SSLContext, bool, None, Fingerprint] = None, - verify_ssl: Optional[bool] = None, - fingerprint: Optional[bytes] = None, - ssl_context: Optional[SSLContext] = None, - proxy_headers: Optional[LooseHeaders] = None, - compress: int = 0, - max_msg_size: int = 4 * 1024 * 1024, - ) -> ClientWebSocketResponse: - - if headers is None: - real_headers = CIMultiDict() # type: CIMultiDict[str] - else: - real_headers = CIMultiDict(headers) - - default_headers = { - hdrs.UPGRADE: "websocket", - hdrs.CONNECTION: "upgrade", - hdrs.SEC_WEBSOCKET_VERSION: "13", - } - - for key, value in default_headers.items(): - real_headers.setdefault(key, value) - - sec_key = base64.b64encode(os.urandom(16)) - real_headers[hdrs.SEC_WEBSOCKET_KEY] = sec_key.decode() - - if protocols: - real_headers[hdrs.SEC_WEBSOCKET_PROTOCOL] = ",".join(protocols) - if origin is not None: - real_headers[hdrs.ORIGIN] = origin - if compress: - extstr = ws_ext_gen(compress=compress) - real_headers[hdrs.SEC_WEBSOCKET_EXTENSIONS] = extstr - - ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint) - - # send request - resp = await self.request( - method, - url, - headers=real_headers, - read_until_eof=False, - auth=auth, - proxy=proxy, - proxy_auth=proxy_auth, - ssl=ssl, - proxy_headers=proxy_headers, - ) - - try: - # check handshake - if resp.status != 101: - raise WSServerHandshakeError( - resp.request_info, - resp.history, - message="Invalid response status", - status=resp.status, - headers=resp.headers, - ) - - if resp.headers.get(hdrs.UPGRADE, "").lower() != "websocket": - raise WSServerHandshakeError( - resp.request_info, - resp.history, - message="Invalid upgrade header", - status=resp.status, - headers=resp.headers, - ) - - if resp.headers.get(hdrs.CONNECTION, "").lower() != "upgrade": - raise WSServerHandshakeError( - resp.request_info, - resp.history, - message="Invalid connection header", - status=resp.status, - headers=resp.headers, - ) - - # key calculation - r_key = resp.headers.get(hdrs.SEC_WEBSOCKET_ACCEPT, "") - match = base64.b64encode(hashlib.sha1(sec_key + WS_KEY).digest()).decode() - if r_key != match: - raise WSServerHandshakeError( - resp.request_info, - resp.history, - message="Invalid challenge response", - status=resp.status, - headers=resp.headers, - ) - - # websocket protocol - protocol = None - if protocols and hdrs.SEC_WEBSOCKET_PROTOCOL in resp.headers: - resp_protocols = [ - proto.strip() - for proto in resp.headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(",") - ] - - for proto in resp_protocols: - if proto in protocols: - protocol = proto - break - - # websocket compress - notakeover = False - if compress: - compress_hdrs = resp.headers.get(hdrs.SEC_WEBSOCKET_EXTENSIONS) - if compress_hdrs: - try: - compress, notakeover = ws_ext_parse(compress_hdrs) - except WSHandshakeError as exc: - raise WSServerHandshakeError( - resp.request_info, - resp.history, - message=exc.args[0], - status=resp.status, - headers=resp.headers, - ) from exc - else: - compress = 0 - notakeover = False - - conn = resp.connection - assert conn is not None - conn_proto = conn.protocol - assert conn_proto is not None - transport = conn.transport - assert transport is not None - reader = FlowControlDataQueue( - conn_proto, 2 ** 16, loop=self._loop - ) # type: FlowControlDataQueue[WSMessage] - conn_proto.set_parser(WebSocketReader(reader, max_msg_size), reader) - writer = WebSocketWriter( - conn_proto, - transport, - use_mask=True, - compress=compress, - notakeover=notakeover, - ) - except BaseException: - resp.close() - raise - else: - return self._ws_response_class( - reader, - writer, - protocol, - resp, - timeout, - autoclose, - autoping, - self._loop, - receive_timeout=receive_timeout, - heartbeat=heartbeat, - compress=compress, - client_notakeover=notakeover, - ) - - def _prepare_headers(self, headers: Optional[LooseHeaders]) -> "CIMultiDict[str]": - """Add default headers and transform it to CIMultiDict""" - # Convert headers to MultiDict - result = CIMultiDict(self._default_headers) - if headers: - if not isinstance(headers, (MultiDictProxy, MultiDict)): - headers = CIMultiDict(headers) - added_names = set() # type: Set[str] - for key, value in headers.items(): - if key in added_names: - result.add(key, value) - else: - result[key] = value - added_names.add(key) - return result - - def get( - self, url: StrOrURL, *, allow_redirects: bool = True, **kwargs: Any - ) -> "_RequestContextManager": - """Perform HTTP GET request.""" - return _RequestContextManager( - self._request(hdrs.METH_GET, url, allow_redirects=allow_redirects, **kwargs) - ) - - def options( - self, url: StrOrURL, *, allow_redirects: bool = True, **kwargs: Any - ) -> "_RequestContextManager": - """Perform HTTP OPTIONS request.""" - return _RequestContextManager( - self._request( - hdrs.METH_OPTIONS, url, allow_redirects=allow_redirects, **kwargs - ) - ) - - def head( - self, url: StrOrURL, *, allow_redirects: bool = False, **kwargs: Any - ) -> "_RequestContextManager": - """Perform HTTP HEAD request.""" - return _RequestContextManager( - self._request( - hdrs.METH_HEAD, url, allow_redirects=allow_redirects, **kwargs - ) - ) - - def post( - self, url: StrOrURL, *, data: Any = None, **kwargs: Any - ) -> "_RequestContextManager": - """Perform HTTP POST request.""" - return _RequestContextManager( - self._request(hdrs.METH_POST, url, data=data, **kwargs) - ) - - def put( - self, url: StrOrURL, *, data: Any = None, **kwargs: Any - ) -> "_RequestContextManager": - """Perform HTTP PUT request.""" - return _RequestContextManager( - self._request(hdrs.METH_PUT, url, data=data, **kwargs) - ) - - def patch( - self, url: StrOrURL, *, data: Any = None, **kwargs: Any - ) -> "_RequestContextManager": - """Perform HTTP PATCH request.""" - return _RequestContextManager( - self._request(hdrs.METH_PATCH, url, data=data, **kwargs) - ) - - def delete(self, url: StrOrURL, **kwargs: Any) -> "_RequestContextManager": - """Perform HTTP DELETE request.""" - return _RequestContextManager(self._request(hdrs.METH_DELETE, url, **kwargs)) - - async def close(self) -> None: - """Close underlying connector. - - Release all acquired resources. - """ - if not self.closed: - if self._connector is not None and self._connector_owner: - await self._connector.close() - self._connector = None - - @property - def closed(self) -> bool: - """Is client session closed. - - A readonly property. - """ - return self._connector is None or self._connector.closed - - @property - def connector(self) -> Optional[BaseConnector]: - """Connector instance used for the session.""" - return self._connector - - @property - def cookie_jar(self) -> AbstractCookieJar: - """The session cookies.""" - return self._cookie_jar - - @property - def version(self) -> Tuple[int, int]: - """The session HTTP protocol version.""" - return self._version - - @property - def requote_redirect_url(self) -> bool: - """Do URL requoting on redirection handling.""" - return self._requote_redirect_url - - @requote_redirect_url.setter - def requote_redirect_url(self, val: bool) -> None: - """Do URL requoting on redirection handling.""" - warnings.warn( - "session.requote_redirect_url modification " "is deprecated #2778", - DeprecationWarning, - stacklevel=2, - ) - self._requote_redirect_url = val - - @property - def loop(self) -> asyncio.AbstractEventLoop: - """Session's loop.""" - warnings.warn( - "client.loop property is deprecated", DeprecationWarning, stacklevel=2 - ) - return self._loop - - @property - def timeout(self) -> Union[object, ClientTimeout]: - """Timeout for the session.""" - return self._timeout - - @property - def headers(self) -> "CIMultiDict[str]": - """The default headers of the client session.""" - return self._default_headers - - @property - def skip_auto_headers(self) -> FrozenSet[istr]: - """Headers for which autogeneration should be skipped""" - return self._skip_auto_headers - - @property - def auth(self) -> Optional[BasicAuth]: - """An object that represents HTTP Basic Authorization""" - return self._default_auth - - @property - def json_serialize(self) -> JSONEncoder: - """Json serializer callable""" - return self._json_serialize - - @property - def connector_owner(self) -> bool: - """Should connector be closed on session closing""" - return self._connector_owner - - @property - def raise_for_status( - self, - ) -> Union[bool, Callable[[ClientResponse], Awaitable[None]]]: - """ - Should `ClientResponse.raise_for_status()` - be called for each response - """ - return self._raise_for_status - - @property - def auto_decompress(self) -> bool: - """Should the body response be automatically decompressed""" - return self._auto_decompress - - @property - def trust_env(self) -> bool: - """ - Should get proxies information - from HTTP_PROXY / HTTPS_PROXY environment variables - or ~/.netrc file if present - """ - return self._trust_env - - @property - def trace_configs(self) -> List[TraceConfig]: - """A list of TraceConfig instances used for client tracing""" - return self._trace_configs - - def detach(self) -> None: - """Detach connector from session without closing the former. - - Session is switched to closed state anyway. - """ - self._connector = None - - def __enter__(self) -> None: - raise TypeError("Use async with instead") - - def __exit__( - self, - exc_type: Optional[Type[BaseException]], - exc_val: Optional[BaseException], - exc_tb: Optional[TracebackType], - ) -> None: - # __exit__ should exist in pair with __enter__ but never executed - pass # pragma: no cover - - async def __aenter__(self) -> "ClientSession": - return self - - async def __aexit__( - self, - exc_type: Optional[Type[BaseException]], - exc_val: Optional[BaseException], - exc_tb: Optional[TracebackType], - ) -> None: - await self.close() - - -class _BaseRequestContextManager(Coroutine[Any, Any, _RetType], Generic[_RetType]): - - __slots__ = ("_coro", "_resp") - - def __init__(self, coro: Coroutine["asyncio.Future[Any]", None, _RetType]) -> None: - self._coro = coro - - def send(self, arg: None) -> "asyncio.Future[Any]": - return self._coro.send(arg) - - def throw(self, arg: BaseException) -> None: # type: ignore - self._coro.throw(arg) - - def close(self) -> None: - return self._coro.close() - - def __await__(self) -> Generator[Any, None, _RetType]: - ret = self._coro.__await__() - return ret - - def __iter__(self) -> Generator[Any, None, _RetType]: - return self.__await__() - - async def __aenter__(self) -> _RetType: - self._resp = await self._coro - return self._resp - - -class _RequestContextManager(_BaseRequestContextManager[ClientResponse]): - async def __aexit__( - self, - exc_type: Optional[Type[BaseException]], - exc: Optional[BaseException], - tb: Optional[TracebackType], - ) -> None: - # We're basing behavior on the exception as it can be caused by - # user code unrelated to the status of the connection. If you - # would like to close a connection you must do that - # explicitly. Otherwise connection error handling should kick in - # and close/recycle the connection as required. - self._resp.release() - - -class _WSRequestContextManager(_BaseRequestContextManager[ClientWebSocketResponse]): - async def __aexit__( - self, - exc_type: Optional[Type[BaseException]], - exc: Optional[BaseException], - tb: Optional[TracebackType], - ) -> None: - await self._resp.close() - - -class _SessionRequestContextManager: - - __slots__ = ("_coro", "_resp", "_session") - - def __init__( - self, - coro: Coroutine["asyncio.Future[Any]", None, ClientResponse], - session: ClientSession, - ) -> None: - self._coro = coro - self._resp = None # type: Optional[ClientResponse] - self._session = session - - async def __aenter__(self) -> ClientResponse: - try: - self._resp = await self._coro - except BaseException: - await self._session.close() - raise - else: - return self._resp - - async def __aexit__( - self, - exc_type: Optional[Type[BaseException]], - exc: Optional[BaseException], - tb: Optional[TracebackType], - ) -> None: - assert self._resp is not None - self._resp.close() - await self._session.close() - - -def request( - method: str, - url: StrOrURL, - *, - params: Optional[Mapping[str, str]] = None, - data: Any = None, - json: Any = None, - headers: Optional[LooseHeaders] = None, - skip_auto_headers: Optional[Iterable[str]] = None, - auth: Optional[BasicAuth] = None, - allow_redirects: bool = True, - max_redirects: int = 10, - compress: Optional[str] = None, - chunked: Optional[bool] = None, - expect100: bool = False, - raise_for_status: Optional[bool] = None, - read_until_eof: bool = True, - proxy: Optional[StrOrURL] = None, - proxy_auth: Optional[BasicAuth] = None, - timeout: Union[ClientTimeout, object] = sentinel, - cookies: Optional[LooseCookies] = None, - version: HttpVersion = http.HttpVersion11, - connector: Optional[BaseConnector] = None, - read_bufsize: Optional[int] = None, - loop: Optional[asyncio.AbstractEventLoop] = None, -) -> _SessionRequestContextManager: - """Constructs and sends a request. Returns response object. - method - HTTP method - url - request url - params - (optional) Dictionary or bytes to be sent in the query - string of the new request - data - (optional) Dictionary, bytes, or file-like object to - send in the body of the request - json - (optional) Any json compatible python object - headers - (optional) Dictionary of HTTP Headers to send with - the request - cookies - (optional) Dict object to send with the request - auth - (optional) BasicAuth named tuple represent HTTP Basic Auth - auth - aiohttp.helpers.BasicAuth - allow_redirects - (optional) If set to False, do not follow - redirects - version - Request HTTP version. - compress - Set to True if request has to be compressed - with deflate encoding. - chunked - Set to chunk size for chunked transfer encoding. - expect100 - Expect 100-continue response from server. - connector - BaseConnector sub-class instance to support - connection pooling. - read_until_eof - Read response until eof if response - does not have Content-Length header. - loop - Optional event loop. - timeout - Optional ClientTimeout settings structure, 5min - total timeout by default. - Usage:: - >>> import aiohttp - >>> resp = await aiohttp.request('GET', 'http://python.org/') - >>> resp - - >>> data = await resp.read() - """ - connector_owner = False - if connector is None: - connector_owner = True - connector = TCPConnector(loop=loop, force_close=True) - - session = ClientSession( - loop=loop, - cookies=cookies, - version=version, - timeout=timeout, - connector=connector, - connector_owner=connector_owner, - ) - - return _SessionRequestContextManager( - session._request( - method, - url, - params=params, - data=data, - json=json, - headers=headers, - skip_auto_headers=skip_auto_headers, - auth=auth, - allow_redirects=allow_redirects, - max_redirects=max_redirects, - compress=compress, - chunked=chunked, - expect100=expect100, - raise_for_status=raise_for_status, - read_until_eof=read_until_eof, - proxy=proxy, - proxy_auth=proxy_auth, - read_bufsize=read_bufsize, - ), - session, - ) diff --git a/third_party/python/aiohttp/aiohttp/client_exceptions.py b/third_party/python/aiohttp/aiohttp/client_exceptions.py deleted file mode 100644 index f4be3bfb5e2d..000000000000 --- a/third_party/python/aiohttp/aiohttp/client_exceptions.py +++ /dev/null @@ -1,317 +0,0 @@ -"""HTTP related errors.""" - -import asyncio -import warnings -from typing import TYPE_CHECKING, Any, Optional, Tuple, Union - -from .typedefs import LooseHeaders - -try: - import ssl - - SSLContext = ssl.SSLContext -except ImportError: # pragma: no cover - ssl = SSLContext = None # type: ignore - - -if TYPE_CHECKING: # pragma: no cover - from .client_reqrep import ClientResponse, ConnectionKey, Fingerprint, RequestInfo -else: - RequestInfo = ClientResponse = ConnectionKey = None - -__all__ = ( - "ClientError", - "ClientConnectionError", - "ClientOSError", - "ClientConnectorError", - "ClientProxyConnectionError", - "ClientSSLError", - "ClientConnectorSSLError", - "ClientConnectorCertificateError", - "ServerConnectionError", - "ServerTimeoutError", - "ServerDisconnectedError", - "ServerFingerprintMismatch", - "ClientResponseError", - "ClientHttpProxyError", - "WSServerHandshakeError", - "ContentTypeError", - "ClientPayloadError", - "InvalidURL", -) - - -class ClientError(Exception): - """Base class for client connection errors.""" - - -class ClientResponseError(ClientError): - """Connection error during reading response. - - request_info: instance of RequestInfo - """ - - def __init__( - self, - request_info: RequestInfo, - history: Tuple[ClientResponse, ...], - *, - code: Optional[int] = None, - status: Optional[int] = None, - message: str = "", - headers: Optional[LooseHeaders] = None, - ) -> None: - self.request_info = request_info - if code is not None: - if status is not None: - raise ValueError( - "Both code and status arguments are provided; " - "code is deprecated, use status instead" - ) - warnings.warn( - "code argument is deprecated, use status instead", - DeprecationWarning, - stacklevel=2, - ) - if status is not None: - self.status = status - elif code is not None: - self.status = code - else: - self.status = 0 - self.message = message - self.headers = headers - self.history = history - self.args = (request_info, history) - - def __str__(self) -> str: - return "{}, message={!r}, url={!r}".format( - self.status, - self.message, - self.request_info.real_url, - ) - - def __repr__(self) -> str: - args = f"{self.request_info!r}, {self.history!r}" - if self.status != 0: - args += f", status={self.status!r}" - if self.message != "": - args += f", message={self.message!r}" - if self.headers is not None: - args += f", headers={self.headers!r}" - return "{}({})".format(type(self).__name__, args) - - @property - def code(self) -> int: - warnings.warn( - "code property is deprecated, use status instead", - DeprecationWarning, - stacklevel=2, - ) - return self.status - - @code.setter - def code(self, value: int) -> None: - warnings.warn( - "code property is deprecated, use status instead", - DeprecationWarning, - stacklevel=2, - ) - self.status = value - - -class ContentTypeError(ClientResponseError): - """ContentType found is not valid.""" - - -class WSServerHandshakeError(ClientResponseError): - """websocket server handshake error.""" - - -class ClientHttpProxyError(ClientResponseError): - """HTTP proxy error. - - Raised in :class:`aiohttp.connector.TCPConnector` if - proxy responds with status other than ``200 OK`` - on ``CONNECT`` request. - """ - - -class TooManyRedirects(ClientResponseError): - """Client was redirected too many times.""" - - -class ClientConnectionError(ClientError): - """Base class for client socket errors.""" - - -class ClientOSError(ClientConnectionError, OSError): - """OSError error.""" - - -class ClientConnectorError(ClientOSError): - """Client connector error. - - Raised in :class:`aiohttp.connector.TCPConnector` if - connection to proxy can not be established. - """ - - def __init__(self, connection_key: ConnectionKey, os_error: OSError) -> None: - self._conn_key = connection_key - self._os_error = os_error - super().__init__(os_error.errno, os_error.strerror) - self.args = (connection_key, os_error) - - @property - def os_error(self) -> OSError: - return self._os_error - - @property - def host(self) -> str: - return self._conn_key.host - - @property - def port(self) -> Optional[int]: - return self._conn_key.port - - @property - def ssl(self) -> Union[SSLContext, None, bool, "Fingerprint"]: - return self._conn_key.ssl - - def __str__(self) -> str: - return "Cannot connect to host {0.host}:{0.port} ssl:{1} [{2}]".format( - self, self.ssl if self.ssl is not None else "default", self.strerror - ) - - # OSError.__reduce__ does too much black magick - __reduce__ = BaseException.__reduce__ - - -class ClientProxyConnectionError(ClientConnectorError): - """Proxy connection error. - - Raised in :class:`aiohttp.connector.TCPConnector` if - connection to proxy can not be established. - """ - - -class ServerConnectionError(ClientConnectionError): - """Server connection errors.""" - - -class ServerDisconnectedError(ServerConnectionError): - """Server disconnected.""" - - def __init__(self, message: Optional[str] = None) -> None: - if message is None: - message = "Server disconnected" - - self.args = (message,) - self.message = message - - -class ServerTimeoutError(ServerConnectionError, asyncio.TimeoutError): - """Server timeout error.""" - - -class ServerFingerprintMismatch(ServerConnectionError): - """SSL certificate does not match expected fingerprint.""" - - def __init__(self, expected: bytes, got: bytes, host: str, port: int) -> None: - self.expected = expected - self.got = got - self.host = host - self.port = port - self.args = (expected, got, host, port) - - def __repr__(self) -> str: - return "<{} expected={!r} got={!r} host={!r} port={!r}>".format( - self.__class__.__name__, self.expected, self.got, self.host, self.port - ) - - -class ClientPayloadError(ClientError): - """Response payload error.""" - - -class InvalidURL(ClientError, ValueError): - """Invalid URL. - - URL used for fetching is malformed, e.g. it doesn't contains host - part.""" - - # Derive from ValueError for backward compatibility - - def __init__(self, url: Any) -> None: - # The type of url is not yarl.URL because the exception can be raised - # on URL(url) call - super().__init__(url) - - @property - def url(self) -> Any: - return self.args[0] - - def __repr__(self) -> str: - return f"<{self.__class__.__name__} {self.url}>" - - -class ClientSSLError(ClientConnectorError): - """Base error for ssl.*Errors.""" - - -if ssl is not None: - cert_errors = (ssl.CertificateError,) - cert_errors_bases = ( - ClientSSLError, - ssl.CertificateError, - ) - - ssl_errors = (ssl.SSLError,) - ssl_error_bases = (ClientSSLError, ssl.SSLError) -else: # pragma: no cover - cert_errors = tuple() - cert_errors_bases = ( - ClientSSLError, - ValueError, - ) - - ssl_errors = tuple() - ssl_error_bases = (ClientSSLError,) - - -class ClientConnectorSSLError(*ssl_error_bases): # type: ignore - """Response ssl error.""" - - -class ClientConnectorCertificateError(*cert_errors_bases): # type: ignore - """Response certificate error.""" - - def __init__( - self, connection_key: ConnectionKey, certificate_error: Exception - ) -> None: - self._conn_key = connection_key - self._certificate_error = certificate_error - self.args = (connection_key, certificate_error) - - @property - def certificate_error(self) -> Exception: - return self._certificate_error - - @property - def host(self) -> str: - return self._conn_key.host - - @property - def port(self) -> Optional[int]: - return self._conn_key.port - - @property - def ssl(self) -> bool: - return self._conn_key.is_ssl - - def __str__(self) -> str: - return ( - "Cannot connect to host {0.host}:{0.port} ssl:{0.ssl} " - "[{0.certificate_error.__class__.__name__}: " - "{0.certificate_error.args}]".format(self) - ) diff --git a/third_party/python/aiohttp/aiohttp/client_proto.py b/third_party/python/aiohttp/aiohttp/client_proto.py deleted file mode 100644 index 2973342e440b..000000000000 --- a/third_party/python/aiohttp/aiohttp/client_proto.py +++ /dev/null @@ -1,251 +0,0 @@ -import asyncio -from contextlib import suppress -from typing import Any, Optional, Tuple - -from .base_protocol import BaseProtocol -from .client_exceptions import ( - ClientOSError, - ClientPayloadError, - ServerDisconnectedError, - ServerTimeoutError, -) -from .helpers import BaseTimerContext -from .http import HttpResponseParser, RawResponseMessage -from .streams import EMPTY_PAYLOAD, DataQueue, StreamReader - - -class ResponseHandler(BaseProtocol, DataQueue[Tuple[RawResponseMessage, StreamReader]]): - """Helper class to adapt between Protocol and StreamReader.""" - - def __init__(self, loop: asyncio.AbstractEventLoop) -> None: - BaseProtocol.__init__(self, loop=loop) - DataQueue.__init__(self, loop) - - self._should_close = False - - self._payload = None - self._skip_payload = False - self._payload_parser = None - - self._timer = None - - self._tail = b"" - self._upgraded = False - self._parser = None # type: Optional[HttpResponseParser] - - self._read_timeout = None # type: Optional[float] - self._read_timeout_handle = None # type: Optional[asyncio.TimerHandle] - - @property - def upgraded(self) -> bool: - return self._upgraded - - @property - def should_close(self) -> bool: - if self._payload is not None and not self._payload.is_eof() or self._upgraded: - return True - - return ( - self._should_close - or self._upgraded - or self.exception() is not None - or self._payload_parser is not None - or len(self) > 0 - or bool(self._tail) - ) - - def force_close(self) -> None: - self._should_close = True - - def close(self) -> None: - transport = self.transport - if transport is not None: - transport.close() - self.transport = None - self._payload = None - self._drop_timeout() - - def is_connected(self) -> bool: - return self.transport is not None and not self.transport.is_closing() - - def connection_lost(self, exc: Optional[BaseException]) -> None: - self._drop_timeout() - - if self._payload_parser is not None: - with suppress(Exception): - self._payload_parser.feed_eof() - - uncompleted = None - if self._parser is not None: - try: - uncompleted = self._parser.feed_eof() - except Exception: - if self._payload is not None: - self._payload.set_exception( - ClientPayloadError("Response payload is not completed") - ) - - if not self.is_eof(): - if isinstance(exc, OSError): - exc = ClientOSError(*exc.args) - if exc is None: - exc = ServerDisconnectedError(uncompleted) - # assigns self._should_close to True as side effect, - # we do it anyway below - self.set_exception(exc) - - self._should_close = True - self._parser = None - self._payload = None - self._payload_parser = None - self._reading_paused = False - - super().connection_lost(exc) - - def eof_received(self) -> None: - # should call parser.feed_eof() most likely - self._drop_timeout() - - def pause_reading(self) -> None: - super().pause_reading() - self._drop_timeout() - - def resume_reading(self) -> None: - super().resume_reading() - self._reschedule_timeout() - - def set_exception(self, exc: BaseException) -> None: - self._should_close = True - self._drop_timeout() - super().set_exception(exc) - - def set_parser(self, parser: Any, payload: Any) -> None: - # TODO: actual types are: - # parser: WebSocketReader - # payload: FlowControlDataQueue - # but they are not generi enough - # Need an ABC for both types - self._payload = payload - self._payload_parser = parser - - self._drop_timeout() - - if self._tail: - data, self._tail = self._tail, b"" - self.data_received(data) - - def set_response_params( - self, - *, - timer: Optional[BaseTimerContext] = None, - skip_payload: bool = False, - read_until_eof: bool = False, - auto_decompress: bool = True, - read_timeout: Optional[float] = None, - read_bufsize: int = 2 ** 16 - ) -> None: - self._skip_payload = skip_payload - - self._read_timeout = read_timeout - self._reschedule_timeout() - - self._parser = HttpResponseParser( - self, - self._loop, - read_bufsize, - timer=timer, - payload_exception=ClientPayloadError, - response_with_body=not skip_payload, - read_until_eof=read_until_eof, - auto_decompress=auto_decompress, - ) - - if self._tail: - data, self._tail = self._tail, b"" - self.data_received(data) - - def _drop_timeout(self) -> None: - if self._read_timeout_handle is not None: - self._read_timeout_handle.cancel() - self._read_timeout_handle = None - - def _reschedule_timeout(self) -> None: - timeout = self._read_timeout - if self._read_timeout_handle is not None: - self._read_timeout_handle.cancel() - - if timeout: - self._read_timeout_handle = self._loop.call_later( - timeout, self._on_read_timeout - ) - else: - self._read_timeout_handle = None - - def _on_read_timeout(self) -> None: - exc = ServerTimeoutError("Timeout on reading data from socket") - self.set_exception(exc) - if self._payload is not None: - self._payload.set_exception(exc) - - def data_received(self, data: bytes) -> None: - self._reschedule_timeout() - - if not data: - return - - # custom payload parser - if self._payload_parser is not None: - eof, tail = self._payload_parser.feed_data(data) - if eof: - self._payload = None - self._payload_parser = None - - if tail: - self.data_received(tail) - return - else: - if self._upgraded or self._parser is None: - # i.e. websocket connection, websocket parser is not set yet - self._tail += data - else: - # parse http messages - try: - messages, upgraded, tail = self._parser.feed_data(data) - except BaseException as exc: - if self.transport is not None: - # connection.release() could be called BEFORE - # data_received(), the transport is already - # closed in this case - self.transport.close() - # should_close is True after the call - self.set_exception(exc) - return - - self._upgraded = upgraded - - payload = None - for message, payload in messages: - if message.should_close: - self._should_close = True - - self._payload = payload - - if self._skip_payload or message.code in (204, 304): - self.feed_data((message, EMPTY_PAYLOAD), 0) # type: ignore - else: - self.feed_data((message, payload), 0) - if payload is not None: - # new message(s) was processed - # register timeout handler unsubscribing - # either on end-of-stream or immediately for - # EMPTY_PAYLOAD - if payload is not EMPTY_PAYLOAD: - payload.on_eof(self._drop_timeout) - else: - self._drop_timeout() - - if tail: - if upgraded: - self.data_received(tail) - else: - self._tail = tail diff --git a/third_party/python/aiohttp/aiohttp/client_reqrep.py b/third_party/python/aiohttp/aiohttp/client_reqrep.py deleted file mode 100644 index d826bfeb7e50..000000000000 --- a/third_party/python/aiohttp/aiohttp/client_reqrep.py +++ /dev/null @@ -1,1127 +0,0 @@ -import asyncio -import codecs -import functools -import io -import re -import sys -import traceback -import warnings -from hashlib import md5, sha1, sha256 -from http.cookies import CookieError, Morsel, SimpleCookie -from types import MappingProxyType, TracebackType -from typing import ( - TYPE_CHECKING, - Any, - Dict, - Iterable, - List, - Mapping, - Optional, - Tuple, - Type, - Union, - cast, -) - -import attr -from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy -from yarl import URL - -from . import hdrs, helpers, http, multipart, payload -from .abc import AbstractStreamWriter -from .client_exceptions import ( - ClientConnectionError, - ClientOSError, - ClientResponseError, - ContentTypeError, - InvalidURL, - ServerFingerprintMismatch, -) -from .formdata import FormData -from .helpers import ( - PY_36, - BaseTimerContext, - BasicAuth, - HeadersMixin, - TimerNoop, - noop, - reify, - set_result, -) -from .http import SERVER_SOFTWARE, HttpVersion10, HttpVersion11, StreamWriter -from .log import client_logger -from .streams import StreamReader -from .typedefs import ( - DEFAULT_JSON_DECODER, - JSONDecoder, - LooseCookies, - LooseHeaders, - RawHeaders, -) - -try: - import ssl - from ssl import SSLContext -except ImportError: # pragma: no cover - ssl = None # type: ignore - SSLContext = object # type: ignore - -try: - import cchardet as chardet -except ImportError: # pragma: no cover - import chardet # type: ignore - - -__all__ = ("ClientRequest", "ClientResponse", "RequestInfo", "Fingerprint") - - -if TYPE_CHECKING: # pragma: no cover - from .client import ClientSession - from .connector import Connection - from .tracing import Trace - - -json_re = re.compile(r"^application/(?:[\w.+-]+?\+)?json") - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class ContentDisposition: - type: Optional[str] - parameters: "MappingProxyType[str, str]" - filename: Optional[str] - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class RequestInfo: - url: URL - method: str - headers: "CIMultiDictProxy[str]" - real_url: URL = attr.ib() - - @real_url.default - def real_url_default(self) -> URL: - return self.url - - -class Fingerprint: - HASHFUNC_BY_DIGESTLEN = { - 16: md5, - 20: sha1, - 32: sha256, - } - - def __init__(self, fingerprint: bytes) -> None: - digestlen = len(fingerprint) - hashfunc = self.HASHFUNC_BY_DIGESTLEN.get(digestlen) - if not hashfunc: - raise ValueError("fingerprint has invalid length") - elif hashfunc is md5 or hashfunc is sha1: - raise ValueError( - "md5 and sha1 are insecure and " "not supported. Use sha256." - ) - self._hashfunc = hashfunc - self._fingerprint = fingerprint - - @property - def fingerprint(self) -> bytes: - return self._fingerprint - - def check(self, transport: asyncio.Transport) -> None: - if not transport.get_extra_info("sslcontext"): - return - sslobj = transport.get_extra_info("ssl_object") - cert = sslobj.getpeercert(binary_form=True) - got = self._hashfunc(cert).digest() - if got != self._fingerprint: - host, port, *_ = transport.get_extra_info("peername") - raise ServerFingerprintMismatch(self._fingerprint, got, host, port) - - -if ssl is not None: - SSL_ALLOWED_TYPES = (ssl.SSLContext, bool, Fingerprint, type(None)) -else: # pragma: no cover - SSL_ALLOWED_TYPES = type(None) - - -def _merge_ssl_params( - ssl: Union["SSLContext", bool, Fingerprint, None], - verify_ssl: Optional[bool], - ssl_context: Optional["SSLContext"], - fingerprint: Optional[bytes], -) -> Union["SSLContext", bool, Fingerprint, None]: - if verify_ssl is not None and not verify_ssl: - warnings.warn( - "verify_ssl is deprecated, use ssl=False instead", - DeprecationWarning, - stacklevel=3, - ) - if ssl is not None: - raise ValueError( - "verify_ssl, ssl_context, fingerprint and ssl " - "parameters are mutually exclusive" - ) - else: - ssl = False - if ssl_context is not None: - warnings.warn( - "ssl_context is deprecated, use ssl=context instead", - DeprecationWarning, - stacklevel=3, - ) - if ssl is not None: - raise ValueError( - "verify_ssl, ssl_context, fingerprint and ssl " - "parameters are mutually exclusive" - ) - else: - ssl = ssl_context - if fingerprint is not None: - warnings.warn( - "fingerprint is deprecated, " "use ssl=Fingerprint(fingerprint) instead", - DeprecationWarning, - stacklevel=3, - ) - if ssl is not None: - raise ValueError( - "verify_ssl, ssl_context, fingerprint and ssl " - "parameters are mutually exclusive" - ) - else: - ssl = Fingerprint(fingerprint) - if not isinstance(ssl, SSL_ALLOWED_TYPES): - raise TypeError( - "ssl should be SSLContext, bool, Fingerprint or None, " - "got {!r} instead.".format(ssl) - ) - return ssl - - -@attr.s(auto_attribs=True, slots=True, frozen=True) -class ConnectionKey: - # the key should contain an information about used proxy / TLS - # to prevent reusing wrong connections from a pool - host: str - port: Optional[int] - is_ssl: bool - ssl: Union[SSLContext, None, bool, Fingerprint] - proxy: Optional[URL] - proxy_auth: Optional[BasicAuth] - proxy_headers_hash: Optional[int] # hash(CIMultiDict) - - -def _is_expected_content_type( - response_content_type: str, expected_content_type: str -) -> bool: - if expected_content_type == "application/json": - return json_re.match(response_content_type) is not None - return expected_content_type in response_content_type - - -class ClientRequest: - GET_METHODS = { - hdrs.METH_GET, - hdrs.METH_HEAD, - hdrs.METH_OPTIONS, - hdrs.METH_TRACE, - } - POST_METHODS = {hdrs.METH_PATCH, hdrs.METH_POST, hdrs.METH_PUT} - ALL_METHODS = GET_METHODS.union(POST_METHODS).union({hdrs.METH_DELETE}) - - DEFAULT_HEADERS = { - hdrs.ACCEPT: "*/*", - hdrs.ACCEPT_ENCODING: "gzip, deflate", - } - - body = b"" - auth = None - response = None - - _writer = None # async task for streaming data - _continue = None # waiter future for '100 Continue' response - - # N.B. - # Adding __del__ method with self._writer closing doesn't make sense - # because _writer is instance method, thus it keeps a reference to self. - # Until writer has finished finalizer will not be called. - - def __init__( - self, - method: str, - url: URL, - *, - params: Optional[Mapping[str, str]] = None, - headers: Optional[LooseHeaders] = None, - skip_auto_headers: Iterable[str] = frozenset(), - data: Any = None, - cookies: Optional[LooseCookies] = None, - auth: Optional[BasicAuth] = None, - version: http.HttpVersion = http.HttpVersion11, - compress: Optional[str] = None, - chunked: Optional[bool] = None, - expect100: bool = False, - loop: Optional[asyncio.AbstractEventLoop] = None, - response_class: Optional[Type["ClientResponse"]] = None, - proxy: Optional[URL] = None, - proxy_auth: Optional[BasicAuth] = None, - timer: Optional[BaseTimerContext] = None, - session: Optional["ClientSession"] = None, - ssl: Union[SSLContext, bool, Fingerprint, None] = None, - proxy_headers: Optional[LooseHeaders] = None, - traces: Optional[List["Trace"]] = None, - ): - - if loop is None: - loop = asyncio.get_event_loop() - - assert isinstance(url, URL), url - assert isinstance(proxy, (URL, type(None))), proxy - # FIXME: session is None in tests only, need to fix tests - # assert session is not None - self._session = cast("ClientSession", session) - if params: - q = MultiDict(url.query) - url2 = url.with_query(params) - q.extend(url2.query) - url = url.with_query(q) - self.original_url = url - self.url = url.with_fragment(None) - self.method = method.upper() - self.chunked = chunked - self.compress = compress - self.loop = loop - self.length = None - if response_class is None: - real_response_class = ClientResponse - else: - real_response_class = response_class - self.response_class = real_response_class # type: Type[ClientResponse] - self._timer = timer if timer is not None else TimerNoop() - self._ssl = ssl - - if loop.get_debug(): - self._source_traceback = traceback.extract_stack(sys._getframe(1)) - - self.update_version(version) - self.update_host(url) - self.update_headers(headers) - self.update_auto_headers(skip_auto_headers) - self.update_cookies(cookies) - self.update_content_encoding(data) - self.update_auth(auth) - self.update_proxy(proxy, proxy_auth, proxy_headers) - - self.update_body_from_data(data) - if data or self.method not in self.GET_METHODS: - self.update_transfer_encoding() - self.update_expect_continue(expect100) - if traces is None: - traces = [] - self._traces = traces - - def is_ssl(self) -> bool: - return self.url.scheme in ("https", "wss") - - @property - def ssl(self) -> Union["SSLContext", None, bool, Fingerprint]: - return self._ssl - - @property - def connection_key(self) -> ConnectionKey: - proxy_headers = self.proxy_headers - if proxy_headers: - h = hash( - tuple((k, v) for k, v in proxy_headers.items()) - ) # type: Optional[int] - else: - h = None - return ConnectionKey( - self.host, - self.port, - self.is_ssl(), - self.ssl, - self.proxy, - self.proxy_auth, - h, - ) - - @property - def host(self) -> str: - ret = self.url.raw_host - assert ret is not None - return ret - - @property - def port(self) -> Optional[int]: - return self.url.port - - @property - def request_info(self) -> RequestInfo: - headers = CIMultiDictProxy(self.headers) # type: CIMultiDictProxy[str] - return RequestInfo(self.url, self.method, headers, self.original_url) - - def update_host(self, url: URL) -> None: - """Update destination host, port and connection type (ssl).""" - # get host/port - if not url.raw_host: - raise InvalidURL(url) - - # basic auth info - username, password = url.user, url.password - if username: - self.auth = helpers.BasicAuth(username, password or "") - - def update_version(self, version: Union[http.HttpVersion, str]) -> None: - """Convert request version to two elements tuple. - - parser HTTP version '1.1' => (1, 1) - """ - if isinstance(version, str): - v = [part.strip() for part in version.split(".", 1)] - try: - version = http.HttpVersion(int(v[0]), int(v[1])) - except ValueError: - raise ValueError( - f"Can not parse http version number: {version}" - ) from None - self.version = version - - def update_headers(self, headers: Optional[LooseHeaders]) -> None: - """Update request headers.""" - self.headers = CIMultiDict() # type: CIMultiDict[str] - - # add host - netloc = cast(str, self.url.raw_host) - if helpers.is_ipv6_address(netloc): - netloc = f"[{netloc}]" - if self.url.port is not None and not self.url.is_default_port(): - netloc += ":" + str(self.url.port) - self.headers[hdrs.HOST] = netloc - - if headers: - if isinstance(headers, (dict, MultiDictProxy, MultiDict)): - headers = headers.items() # type: ignore - - for key, value in headers: # type: ignore - # A special case for Host header - if key.lower() == "host": - self.headers[key] = value - else: - self.headers.add(key, value) - - def update_auto_headers(self, skip_auto_headers: Iterable[str]) -> None: - self.skip_auto_headers = CIMultiDict( - (hdr, None) for hdr in sorted(skip_auto_headers) - ) - used_headers = self.headers.copy() - used_headers.extend(self.skip_auto_headers) # type: ignore - - for hdr, val in self.DEFAULT_HEADERS.items(): - if hdr not in used_headers: - self.headers.add(hdr, val) - - if hdrs.USER_AGENT not in used_headers: - self.headers[hdrs.USER_AGENT] = SERVER_SOFTWARE - - def update_cookies(self, cookies: Optional[LooseCookies]) -> None: - """Update request cookies header.""" - if not cookies: - return - - c = SimpleCookie() # type: SimpleCookie[str] - if hdrs.COOKIE in self.headers: - c.load(self.headers.get(hdrs.COOKIE, "")) - del self.headers[hdrs.COOKIE] - - if isinstance(cookies, Mapping): - iter_cookies = cookies.items() - else: - iter_cookies = cookies # type: ignore - for name, value in iter_cookies: - if isinstance(value, Morsel): - # Preserve coded_value - mrsl_val = value.get(value.key, Morsel()) - mrsl_val.set(value.key, value.value, value.coded_value) - c[name] = mrsl_val - else: - c[name] = value # type: ignore - - self.headers[hdrs.COOKIE] = c.output(header="", sep=";").strip() - - def update_content_encoding(self, data: Any) -> None: - """Set request content encoding.""" - if not data: - return - - enc = self.headers.get(hdrs.CONTENT_ENCODING, "").lower() - if enc: - if self.compress: - raise ValueError( - "compress can not be set " "if Content-Encoding header is set" - ) - elif self.compress: - if not isinstance(self.compress, str): - self.compress = "deflate" - self.headers[hdrs.CONTENT_ENCODING] = self.compress - self.chunked = True # enable chunked, no need to deal with length - - def update_transfer_encoding(self) -> None: - """Analyze transfer-encoding header.""" - te = self.headers.get(hdrs.TRANSFER_ENCODING, "").lower() - - if "chunked" in te: - if self.chunked: - raise ValueError( - "chunked can not be set " - 'if "Transfer-Encoding: chunked" header is set' - ) - - elif self.chunked: - if hdrs.CONTENT_LENGTH in self.headers: - raise ValueError( - "chunked can not be set " "if Content-Length header is set" - ) - - self.headers[hdrs.TRANSFER_ENCODING] = "chunked" - else: - if hdrs.CONTENT_LENGTH not in self.headers: - self.headers[hdrs.CONTENT_LENGTH] = str(len(self.body)) - - def update_auth(self, auth: Optional[BasicAuth]) -> None: - """Set basic auth.""" - if auth is None: - auth = self.auth - if auth is None: - return - - if not isinstance(auth, helpers.BasicAuth): - raise TypeError("BasicAuth() tuple is required instead") - - self.headers[hdrs.AUTHORIZATION] = auth.encode() - - def update_body_from_data(self, body: Any) -> None: - if not body: - return - - # FormData - if isinstance(body, FormData): - body = body() - - try: - body = payload.PAYLOAD_REGISTRY.get(body, disposition=None) - except payload.LookupError: - body = FormData(body)() - - self.body = body - - # enable chunked encoding if needed - if not self.chunked: - if hdrs.CONTENT_LENGTH not in self.headers: - size = body.size - if size is None: - self.chunked = True - else: - if hdrs.CONTENT_LENGTH not in self.headers: - self.headers[hdrs.CONTENT_LENGTH] = str(size) - - # copy payload headers - assert body.headers - for (key, value) in body.headers.items(): - if key in self.headers: - continue - if key in self.skip_auto_headers: - continue - self.headers[key] = value - - def update_expect_continue(self, expect: bool = False) -> None: - if expect: - self.headers[hdrs.EXPECT] = "100-continue" - elif self.headers.get(hdrs.EXPECT, "").lower() == "100-continue": - expect = True - - if expect: - self._continue = self.loop.create_future() - - def update_proxy( - self, - proxy: Optional[URL], - proxy_auth: Optional[BasicAuth], - proxy_headers: Optional[LooseHeaders], - ) -> None: - if proxy and not proxy.scheme == "http": - raise ValueError("Only http proxies are supported") - if proxy_auth and not isinstance(proxy_auth, helpers.BasicAuth): - raise ValueError("proxy_auth must be None or BasicAuth() tuple") - self.proxy = proxy - self.proxy_auth = proxy_auth - self.proxy_headers = proxy_headers - - def keep_alive(self) -> bool: - if self.version < HttpVersion10: - # keep alive not supported at all - return False - if self.version == HttpVersion10: - if self.headers.get(hdrs.CONNECTION) == "keep-alive": - return True - else: # no headers means we close for Http 1.0 - return False - elif self.headers.get(hdrs.CONNECTION) == "close": - return False - - return True - - async def write_bytes( - self, writer: AbstractStreamWriter, conn: "Connection" - ) -> None: - """Support coroutines that yields bytes objects.""" - # 100 response - if self._continue is not None: - await writer.drain() - await self._continue - - protocol = conn.protocol - assert protocol is not None - try: - if isinstance(self.body, payload.Payload): - await self.body.write(writer) - else: - if isinstance(self.body, (bytes, bytearray)): - self.body = (self.body,) # type: ignore - - for chunk in self.body: - await writer.write(chunk) # type: ignore - - await writer.write_eof() - except OSError as exc: - new_exc = ClientOSError( - exc.errno, "Can not write request body for %s" % self.url - ) - new_exc.__context__ = exc - new_exc.__cause__ = exc - protocol.set_exception(new_exc) - except asyncio.CancelledError as exc: - if not conn.closed: - protocol.set_exception(exc) - except Exception as exc: - protocol.set_exception(exc) - finally: - self._writer = None - - async def send(self, conn: "Connection") -> "ClientResponse": - # Specify request target: - # - CONNECT request must send authority form URI - # - not CONNECT proxy must send absolute form URI - # - most common is origin form URI - if self.method == hdrs.METH_CONNECT: - connect_host = self.url.raw_host - assert connect_host is not None - if helpers.is_ipv6_address(connect_host): - connect_host = f"[{connect_host}]" - path = f"{connect_host}:{self.url.port}" - elif self.proxy and not self.is_ssl(): - path = str(self.url) - else: - path = self.url.raw_path - if self.url.raw_query_string: - path += "?" + self.url.raw_query_string - - protocol = conn.protocol - assert protocol is not None - writer = StreamWriter( - protocol, - self.loop, - on_chunk_sent=functools.partial( - self._on_chunk_request_sent, self.method, self.url - ), - ) - - if self.compress: - writer.enable_compression(self.compress) - - if self.chunked is not None: - writer.enable_chunking() - - # set default content-type - if ( - self.method in self.POST_METHODS - and hdrs.CONTENT_TYPE not in self.skip_auto_headers - and hdrs.CONTENT_TYPE not in self.headers - ): - self.headers[hdrs.CONTENT_TYPE] = "application/octet-stream" - - # set the connection header - connection = self.headers.get(hdrs.CONNECTION) - if not connection: - if self.keep_alive(): - if self.version == HttpVersion10: - connection = "keep-alive" - else: - if self.version == HttpVersion11: - connection = "close" - - if connection is not None: - self.headers[hdrs.CONNECTION] = connection - - # status + headers - status_line = "{0} {1} HTTP/{2[0]}.{2[1]}".format( - self.method, path, self.version - ) - await writer.write_headers(status_line, self.headers) - - self._writer = self.loop.create_task(self.write_bytes(writer, conn)) - - response_class = self.response_class - assert response_class is not None - self.response = response_class( - self.method, - self.original_url, - writer=self._writer, - continue100=self._continue, - timer=self._timer, - request_info=self.request_info, - traces=self._traces, - loop=self.loop, - session=self._session, - ) - return self.response - - async def close(self) -> None: - if self._writer is not None: - try: - await self._writer - finally: - self._writer = None - - def terminate(self) -> None: - if self._writer is not None: - if not self.loop.is_closed(): - self._writer.cancel() - self._writer = None - - async def _on_chunk_request_sent(self, method: str, url: URL, chunk: bytes) -> None: - for trace in self._traces: - await trace.send_request_chunk_sent(method, url, chunk) - - -class ClientResponse(HeadersMixin): - - # from the Status-Line of the response - version = None # HTTP-Version - status = None # type: int # Status-Code - reason = None # Reason-Phrase - - content = None # type: StreamReader # Payload stream - _headers = None # type: CIMultiDictProxy[str] # Response headers - _raw_headers = None # type: RawHeaders # Response raw headers - - _connection = None # current connection - _source_traceback = None - # setted up by ClientRequest after ClientResponse object creation - # post-init stage allows to not change ctor signature - _closed = True # to allow __del__ for non-initialized properly response - _released = False - - def __init__( - self, - method: str, - url: URL, - *, - writer: "asyncio.Task[None]", - continue100: Optional["asyncio.Future[bool]"], - timer: BaseTimerContext, - request_info: RequestInfo, - traces: List["Trace"], - loop: asyncio.AbstractEventLoop, - session: "ClientSession", - ) -> None: - assert isinstance(url, URL) - - self.method = method - self.cookies = SimpleCookie() # type: SimpleCookie[str] - - self._real_url = url - self._url = url.with_fragment(None) - self._body = None # type: Any - self._writer = writer # type: Optional[asyncio.Task[None]] - self._continue = continue100 # None by default - self._closed = True - self._history = () # type: Tuple[ClientResponse, ...] - self._request_info = request_info - self._timer = timer if timer is not None else TimerNoop() - self._cache = {} # type: Dict[str, Any] - self._traces = traces - self._loop = loop - # store a reference to session #1985 - self._session = session # type: Optional[ClientSession] - if loop.get_debug(): - self._source_traceback = traceback.extract_stack(sys._getframe(1)) - - @reify - def url(self) -> URL: - return self._url - - @reify - def url_obj(self) -> URL: - warnings.warn("Deprecated, use .url #1654", DeprecationWarning, stacklevel=2) - return self._url - - @reify - def real_url(self) -> URL: - return self._real_url - - @reify - def host(self) -> str: - assert self._url.host is not None - return self._url.host - - @reify - def headers(self) -> "CIMultiDictProxy[str]": - return self._headers - - @reify - def raw_headers(self) -> RawHeaders: - return self._raw_headers - - @reify - def request_info(self) -> RequestInfo: - return self._request_info - - @reify - def content_disposition(self) -> Optional[ContentDisposition]: - raw = self._headers.get(hdrs.CONTENT_DISPOSITION) - if raw is None: - return None - disposition_type, params_dct = multipart.parse_content_disposition(raw) - params = MappingProxyType(params_dct) - filename = multipart.content_disposition_filename(params) - return ContentDisposition(disposition_type, params, filename) - - def __del__(self, _warnings: Any = warnings) -> None: - if self._closed: - return - - if self._connection is not None: - self._connection.release() - self._cleanup_writer() - - if self._loop.get_debug(): - if PY_36: - kwargs = {"source": self} - else: - kwargs = {} - _warnings.warn(f"Unclosed response {self!r}", ResourceWarning, **kwargs) - context = {"client_response": self, "message": "Unclosed response"} - if self._source_traceback: - context["source_traceback"] = self._source_traceback - self._loop.call_exception_handler(context) - - def __repr__(self) -> str: - out = io.StringIO() - ascii_encodable_url = str(self.url) - if self.reason: - ascii_encodable_reason = self.reason.encode( - "ascii", "backslashreplace" - ).decode("ascii") - else: - ascii_encodable_reason = self.reason - print( - "".format( - ascii_encodable_url, self.status, ascii_encodable_reason - ), - file=out, - ) - print(self.headers, file=out) - return out.getvalue() - - @property - def connection(self) -> Optional["Connection"]: - return self._connection - - @reify - def history(self) -> Tuple["ClientResponse", ...]: - """A sequence of of responses, if redirects occurred.""" - return self._history - - @reify - def links(self) -> "MultiDictProxy[MultiDictProxy[Union[str, URL]]]": - links_str = ", ".join(self.headers.getall("link", [])) - - if not links_str: - return MultiDictProxy(MultiDict()) - - links = MultiDict() # type: MultiDict[MultiDictProxy[Union[str, URL]]] - - for val in re.split(r",(?=\s*<)", links_str): - match = re.match(r"\s*<(.*)>(.*)", val) - if match is None: # pragma: no cover - # the check exists to suppress mypy error - continue - url, params_str = match.groups() - params = params_str.split(";")[1:] - - link = MultiDict() # type: MultiDict[Union[str, URL]] - - for param in params: - match = re.match(r"^\s*(\S*)\s*=\s*(['\"]?)(.*?)(\2)\s*$", param, re.M) - if match is None: # pragma: no cover - # the check exists to suppress mypy error - continue - key, _, value, _ = match.groups() - - link.add(key, value) - - key = link.get("rel", url) # type: ignore - - link.add("url", self.url.join(URL(url))) - - links.add(key, MultiDictProxy(link)) - - return MultiDictProxy(links) - - async def start(self, connection: "Connection") -> "ClientResponse": - """Start response processing.""" - self._closed = False - self._protocol = connection.protocol - self._connection = connection - - with self._timer: - while True: - # read response - try: - message, payload = await self._protocol.read() # type: ignore - except http.HttpProcessingError as exc: - raise ClientResponseError( - self.request_info, - self.history, - status=exc.code, - message=exc.message, - headers=exc.headers, - ) from exc - - if message.code < 100 or message.code > 199 or message.code == 101: - break - - if self._continue is not None: - set_result(self._continue, True) - self._continue = None - - # payload eof handler - payload.on_eof(self._response_eof) - - # response status - self.version = message.version - self.status = message.code - self.reason = message.reason - - # headers - self._headers = message.headers # type is CIMultiDictProxy - self._raw_headers = message.raw_headers # type is Tuple[bytes, bytes] - - # payload - self.content = payload - - # cookies - for hdr in self.headers.getall(hdrs.SET_COOKIE, ()): - try: - self.cookies.load(hdr) - except CookieError as exc: - client_logger.warning("Can not load response cookies: %s", exc) - return self - - def _response_eof(self) -> None: - if self._closed: - return - - if self._connection is not None: - # websocket, protocol could be None because - # connection could be detached - if ( - self._connection.protocol is not None - and self._connection.protocol.upgraded - ): - return - - self._connection.release() - self._connection = None - - self._closed = True - self._cleanup_writer() - - @property - def closed(self) -> bool: - return self._closed - - def close(self) -> None: - if not self._released: - self._notify_content() - if self._closed: - return - - self._closed = True - if self._loop is None or self._loop.is_closed(): - return - - if self._connection is not None: - self._connection.close() - self._connection = None - self._cleanup_writer() - - def release(self) -> Any: - if not self._released: - self._notify_content() - if self._closed: - return noop() - - self._closed = True - if self._connection is not None: - self._connection.release() - self._connection = None - - self._cleanup_writer() - return noop() - - @property - def ok(self) -> bool: - """Returns ``True`` if ``status`` is less than ``400``, ``False`` if not. - - This is **not** a check for ``200 OK`` but a check that the response - status is under 400. - """ - try: - self.raise_for_status() - except ClientResponseError: - return False - return True - - def raise_for_status(self) -> None: - if 400 <= self.status: - # reason should always be not None for a started response - assert self.reason is not None - self.release() - raise ClientResponseError( - self.request_info, - self.history, - status=self.status, - message=self.reason, - headers=self.headers, - ) - - def _cleanup_writer(self) -> None: - if self._writer is not None: - self._writer.cancel() - self._writer = None - self._session = None - - def _notify_content(self) -> None: - content = self.content - if content and content.exception() is None: - content.set_exception(ClientConnectionError("Connection closed")) - self._released = True - - async def wait_for_close(self) -> None: - if self._writer is not None: - try: - await self._writer - finally: - self._writer = None - self.release() - - async def read(self) -> bytes: - """Read response payload.""" - if self._body is None: - try: - self._body = await self.content.read() - for trace in self._traces: - await trace.send_response_chunk_received( - self.method, self.url, self._body - ) - except BaseException: - self.close() - raise - elif self._released: - raise ClientConnectionError("Connection closed") - - return self._body - - def get_encoding(self) -> str: - ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower() - mimetype = helpers.parse_mimetype(ctype) - - encoding = mimetype.parameters.get("charset") - if encoding: - try: - codecs.lookup(encoding) - except LookupError: - encoding = None - if not encoding: - if mimetype.type == "application" and ( - mimetype.subtype == "json" or mimetype.subtype == "rdap" - ): - # RFC 7159 states that the default encoding is UTF-8. - # RFC 7483 defines application/rdap+json - encoding = "utf-8" - elif self._body is None: - raise RuntimeError( - "Cannot guess the encoding of " "a not yet read body" - ) - else: - encoding = chardet.detect(self._body)["encoding"] - if not encoding: - encoding = "utf-8" - - return encoding - - async def text(self, encoding: Optional[str] = None, errors: str = "strict") -> str: - """Read response payload and decode.""" - if self._body is None: - await self.read() - - if encoding is None: - encoding = self.get_encoding() - - return self._body.decode(encoding, errors=errors) # type: ignore - - async def json( - self, - *, - encoding: Optional[str] = None, - loads: JSONDecoder = DEFAULT_JSON_DECODER, - content_type: Optional[str] = "application/json", - ) -> Any: - """Read and decodes JSON response.""" - if self._body is None: - await self.read() - - if content_type: - ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower() - if not _is_expected_content_type(ctype, content_type): - raise ContentTypeError( - self.request_info, - self.history, - message=( - "Attempt to decode JSON with " "unexpected mimetype: %s" % ctype - ), - headers=self.headers, - ) - - stripped = self._body.strip() # type: ignore - if not stripped: - return None - - if encoding is None: - encoding = self.get_encoding() - - return loads(stripped.decode(encoding)) - - async def __aenter__(self) -> "ClientResponse": - return self - - async def __aexit__( - self, - exc_type: Optional[Type[BaseException]], - exc_val: Optional[BaseException], - exc_tb: Optional[TracebackType], - ) -> None: - # similar to _RequestContextManager, we do not need to check - # for exceptions, response object can close connection - # if state is broken - self.release() diff --git a/third_party/python/aiohttp/aiohttp/client_ws.py b/third_party/python/aiohttp/aiohttp/client_ws.py deleted file mode 100644 index 28fa371cce99..000000000000 --- a/third_party/python/aiohttp/aiohttp/client_ws.py +++ /dev/null @@ -1,301 +0,0 @@ -"""WebSocket client for asyncio.""" - -import asyncio -from typing import Any, Optional - -import async_timeout - -from .client_exceptions import ClientError -from .client_reqrep import ClientResponse -from .helpers import call_later, set_result -from .http import ( - WS_CLOSED_MESSAGE, - WS_CLOSING_MESSAGE, - WebSocketError, - WSMessage, - WSMsgType, -) -from .http_websocket import WebSocketWriter # WSMessage -from .streams import EofStream, FlowControlDataQueue -from .typedefs import ( - DEFAULT_JSON_DECODER, - DEFAULT_JSON_ENCODER, - JSONDecoder, - JSONEncoder, -) - - -class ClientWebSocketResponse: - def __init__( - self, - reader: "FlowControlDataQueue[WSMessage]", - writer: WebSocketWriter, - protocol: Optional[str], - response: ClientResponse, - timeout: float, - autoclose: bool, - autoping: bool, - loop: asyncio.AbstractEventLoop, - *, - receive_timeout: Optional[float] = None, - heartbeat: Optional[float] = None, - compress: int = 0, - client_notakeover: bool = False, - ) -> None: - self._response = response - self._conn = response.connection - - self._writer = writer - self._reader = reader - self._protocol = protocol - self._closed = False - self._closing = False - self._close_code = None # type: Optional[int] - self._timeout = timeout - self._receive_timeout = receive_timeout - self._autoclose = autoclose - self._autoping = autoping - self._heartbeat = heartbeat - self._heartbeat_cb = None - if heartbeat is not None: - self._pong_heartbeat = heartbeat / 2.0 - self._pong_response_cb = None - self._loop = loop - self._waiting = None # type: Optional[asyncio.Future[bool]] - self._exception = None # type: Optional[BaseException] - self._compress = compress - self._client_notakeover = client_notakeover - - self._reset_heartbeat() - - def _cancel_heartbeat(self) -> None: - if self._pong_response_cb is not None: - self._pong_response_cb.cancel() - self._pong_response_cb = None - - if self._heartbeat_cb is not None: - self._heartbeat_cb.cancel() - self._heartbeat_cb = None - - def _reset_heartbeat(self) -> None: - self._cancel_heartbeat() - - if self._heartbeat is not None: - self._heartbeat_cb = call_later( - self._send_heartbeat, self._heartbeat, self._loop - ) - - def _send_heartbeat(self) -> None: - if self._heartbeat is not None and not self._closed: - # fire-and-forget a task is not perfect but maybe ok for - # sending ping. Otherwise we need a long-living heartbeat - # task in the class. - self._loop.create_task(self._writer.ping()) - - if self._pong_response_cb is not None: - self._pong_response_cb.cancel() - self._pong_response_cb = call_later( - self._pong_not_received, self._pong_heartbeat, self._loop - ) - - def _pong_not_received(self) -> None: - if not self._closed: - self._closed = True - self._close_code = 1006 - self._exception = asyncio.TimeoutError() - self._response.close() - - @property - def closed(self) -> bool: - return self._closed - - @property - def close_code(self) -> Optional[int]: - return self._close_code - - @property - def protocol(self) -> Optional[str]: - return self._protocol - - @property - def compress(self) -> int: - return self._compress - - @property - def client_notakeover(self) -> bool: - return self._client_notakeover - - def get_extra_info(self, name: str, default: Any = None) -> Any: - """extra info from connection transport""" - conn = self._response.connection - if conn is None: - return default - transport = conn.transport - if transport is None: - return default - return transport.get_extra_info(name, default) - - def exception(self) -> Optional[BaseException]: - return self._exception - - async def ping(self, message: bytes = b"") -> None: - await self._writer.ping(message) - - async def pong(self, message: bytes = b"") -> None: - await self._writer.pong(message) - - async def send_str(self, data: str, compress: Optional[int] = None) -> None: - if not isinstance(data, str): - raise TypeError("data argument must be str (%r)" % type(data)) - await self._writer.send(data, binary=False, compress=compress) - - async def send_bytes(self, data: bytes, compress: Optional[int] = None) -> None: - if not isinstance(data, (bytes, bytearray, memoryview)): - raise TypeError("data argument must be byte-ish (%r)" % type(data)) - await self._writer.send(data, binary=True, compress=compress) - - async def send_json( - self, - data: Any, - compress: Optional[int] = None, - *, - dumps: JSONEncoder = DEFAULT_JSON_ENCODER, - ) -> None: - await self.send_str(dumps(data), compress=compress) - - async def close(self, *, code: int = 1000, message: bytes = b"") -> bool: - # we need to break `receive()` cycle first, - # `close()` may be called from different task - if self._waiting is not None and not self._closed: - self._reader.feed_data(WS_CLOSING_MESSAGE, 0) - await self._waiting - - if not self._closed: - self._cancel_heartbeat() - self._closed = True - try: - await self._writer.close(code, message) - except asyncio.CancelledError: - self._close_code = 1006 - self._response.close() - raise - except Exception as exc: - self._close_code = 1006 - self._exception = exc - self._response.close() - return True - - if self._closing: - self._response.close() - return True - - while True: - try: - with async_timeout.timeout(self._timeout, loop=self._loop): - msg = await self._reader.read() - except asyncio.CancelledError: - self._close_code = 1006 - self._response.close() - raise - except Exception as exc: - self._close_code = 1006 - self._exception = exc - self._response.close() - return True - - if msg.type == WSMsgType.CLOSE: - self._close_code = msg.data - self._response.close() - return True - else: - return False - - async def receive(self, timeout: Optional[float] = None) -> WSMessage: - while True: - if self._waiting is not None: - raise RuntimeError("Concurrent call to receive() is not allowed") - - if self._closed: - return WS_CLOSED_MESSAGE - elif self._closing: - await self.close() - return WS_CLOSED_MESSAGE - - try: - self._waiting = self._loop.create_future() - try: - with async_timeout.timeout( - timeout or self._receive_timeout, loop=self._loop - ): - msg = await self._reader.read() - self._reset_heartbeat() - finally: - waiter = self._waiting - self._waiting = None - set_result(waiter, True) - except (asyncio.CancelledError, asyncio.TimeoutError): - self._close_code = 1006 - raise - except EofStream: - self._close_code = 1000 - await self.close() - return WSMessage(WSMsgType.CLOSED, None, None) - except ClientError: - self._closed = True - self._close_code = 1006 - return WS_CLOSED_MESSAGE - except WebSocketError as exc: - self._close_code = exc.code - await self.close(code=exc.code) - return WSMessage(WSMsgType.ERROR, exc, None) - except Exception as exc: - self._exception = exc - self._closing = True - self._close_code = 1006 - await self.close() - return WSMessage(WSMsgType.ERROR, exc, None) - - if msg.type == WSMsgType.CLOSE: - self._closing = True - self._close_code = msg.data - if not self._closed and self._autoclose: - await self.close() - elif msg.type == WSMsgType.CLOSING: - self._closing = True - elif msg.type == WSMsgType.PING and self._autoping: - await self.pong(msg.data) - continue - elif msg.type == WSMsgType.PONG and self._autoping: - continue - - return msg - - async def receive_str(self, *, timeout: Optional[float] = None) -> str: - msg = await self.receive(timeout) - if msg.type != WSMsgType.TEXT: - raise TypeError(f"Received message {msg.type}:{msg.data!r} is not str") - return msg.data - - async def receive_bytes(self, *, timeout: Optional[float] = None) -> bytes: - msg = await self.receive(timeout) - if msg.type != WSMsgType.BINARY: - raise TypeError(f"Received message {msg.type}:{msg.data!r} is not bytes") - return msg.data - - async def receive_json( - self, - *, - loads: JSONDecoder = DEFAULT_JSON_DECODER, - timeout: Optional[float] = None, - ) -> Any: - data = await self.receive_str(timeout=timeout) - return loads(data) - - def __aiter__(self) -> "ClientWebSocketResponse": - return self - - async def __anext__(self) -> WSMessage: - msg = await self.receive() - if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSING, WSMsgType.CLOSED): - raise StopAsyncIteration - return msg diff --git a/third_party/python/aiohttp/aiohttp/connector.py b/third_party/python/aiohttp/aiohttp/connector.py deleted file mode 100644 index 748b22a4228d..000000000000 --- a/third_party/python/aiohttp/aiohttp/connector.py +++ /dev/null @@ -1,1262 +0,0 @@ -import asyncio -import functools -import random -import sys -import traceback -import warnings -from collections import defaultdict, deque -from contextlib import suppress -from http.cookies import SimpleCookie -from itertools import cycle, islice -from time import monotonic -from types import TracebackType -from typing import ( - TYPE_CHECKING, - Any, - Awaitable, - Callable, - DefaultDict, - Dict, - Iterator, - List, - Optional, - Set, - Tuple, - Type, - Union, - cast, -) - -import attr - -from . import hdrs, helpers -from .abc import AbstractResolver -from .client_exceptions import ( - ClientConnectionError, - ClientConnectorCertificateError, - ClientConnectorError, - ClientConnectorSSLError, - ClientHttpProxyError, - ClientProxyConnectionError, - ServerFingerprintMismatch, - cert_errors, - ssl_errors, -) -from .client_proto import ResponseHandler -from .client_reqrep import ClientRequest, Fingerprint, _merge_ssl_params -from .helpers import PY_36, CeilTimeout, get_running_loop, is_ip_address, noop, sentinel -from .http import RESPONSES -from .locks import EventResultOrError -from .resolver import DefaultResolver - -try: - import ssl - - SSLContext = ssl.SSLContext -except ImportError: # pragma: no cover - ssl = None # type: ignore - SSLContext = object # type: ignore - - -__all__ = ("BaseConnector", "TCPConnector", "UnixConnector", "NamedPipeConnector") - - -if TYPE_CHECKING: # pragma: no cover - from .client import ClientTimeout - from .client_reqrep import ConnectionKey - from .tracing import Trace - - -class _DeprecationWaiter: - __slots__ = ("_awaitable", "_awaited") - - def __init__(self, awaitable: Awaitable[Any]) -> None: - self._awaitable = awaitable - self._awaited = False - - def __await__(self) -> Any: - self._awaited = True - return self._awaitable.__await__() - - def __del__(self) -> None: - if not self._awaited: - warnings.warn( - "Connector.close() is a coroutine, " - "please use await connector.close()", - DeprecationWarning, - ) - - -class Connection: - - _source_traceback = None - _transport = None - - def __init__( - self, - connector: "BaseConnector", - key: "ConnectionKey", - protocol: ResponseHandler, - loop: asyncio.AbstractEventLoop, - ) -> None: - self._key = key - self._connector = connector - self._loop = loop - self._protocol = protocol # type: Optional[ResponseHandler] - self._callbacks = [] # type: List[Callable[[], None]] - - if loop.get_debug(): - self._source_traceback = traceback.extract_stack(sys._getframe(1)) - - def __repr__(self) -> str: - return f"Connection<{self._key}>" - - def __del__(self, _warnings: Any = warnings) -> None: - if self._protocol is not None: - if PY_36: - kwargs = {"source": self} - else: - kwargs = {} - _warnings.warn(f"Unclosed connection {self!r}", ResourceWarning, **kwargs) - if self._loop.is_closed(): - return - - self._connector._release(self._key, self._protocol, should_close=True) - - context = {"client_connection": self, "message": "Unclosed connection"} - if self._source_traceback is not None: - context["source_traceback"] = self._source_traceback - self._loop.call_exception_handler(context) - - @property - def loop(self) -> asyncio.AbstractEventLoop: - warnings.warn( - "connector.loop property is deprecated", DeprecationWarning, stacklevel=2 - ) - return self._loop - - @property - def transport(self) -> Optional[asyncio.Transport]: - if self._protocol is None: - return None - return self._protocol.transport - - @property - def protocol(self) -> Optional[ResponseHandler]: - return self._protocol - - def add_callback(self, callback: Callable[[], None]) -> None: - if callback is not None: - self._callbacks.append(callback) - - def _notify_release(self) -> None: - callbacks, self._callbacks = self._callbacks[:], [] - - for cb in callbacks: - with suppress(Exception): - cb() - - def close(self) -> None: - self._notify_release() - - if self._protocol is not None: - self._connector._release(self._key, self._protocol, should_close=True) - self._protocol = None - - def release(self) -> None: - self._notify_release() - - if self._protocol is not None: - self._connector._release( - self._key, self._protocol, should_close=self._protocol.should_close - ) - self._protocol = None - - @property - def closed(self) -> bool: - return self._protocol is None or not self._protocol.is_connected() - - -class _TransportPlaceholder: - """ placeholder for BaseConnector.connect function """ - - def close(self) -> None: - pass - - -class BaseConnector: - """Base connector class. - - keepalive_timeout - (optional) Keep-alive timeout. - force_close - Set to True to force close and do reconnect - after each request (and between redirects). - limit - The total number of simultaneous connections. - limit_per_host - Number of simultaneous connections to one host. - enable_cleanup_closed - Enables clean-up closed ssl transports. - Disabled by default. - loop - Optional event loop. - """ - - _closed = True # prevent AttributeError in __del__ if ctor was failed - _source_traceback = None - - # abort transport after 2 seconds (cleanup broken connections) - _cleanup_closed_period = 2.0 - - def __init__( - self, - *, - keepalive_timeout: Union[object, None, float] = sentinel, - force_close: bool = False, - limit: int = 100, - limit_per_host: int = 0, - enable_cleanup_closed: bool = False, - loop: Optional[asyncio.AbstractEventLoop] = None, - ) -> None: - - if force_close: - if keepalive_timeout is not None and keepalive_timeout is not sentinel: - raise ValueError( - "keepalive_timeout cannot " "be set if force_close is True" - ) - else: - if keepalive_timeout is sentinel: - keepalive_timeout = 15.0 - - loop = get_running_loop(loop) - - self._closed = False - if loop.get_debug(): - self._source_traceback = traceback.extract_stack(sys._getframe(1)) - - self._conns = ( - {} - ) # type: Dict[ConnectionKey, List[Tuple[ResponseHandler, float]]] - self._limit = limit - self._limit_per_host = limit_per_host - self._acquired = set() # type: Set[ResponseHandler] - self._acquired_per_host = defaultdict( - set - ) # type: DefaultDict[ConnectionKey, Set[ResponseHandler]] - self._keepalive_timeout = cast(float, keepalive_timeout) - self._force_close = force_close - - # {host_key: FIFO list of waiters} - self._waiters = defaultdict(deque) # type: ignore - - self._loop = loop - self._factory = functools.partial(ResponseHandler, loop=loop) - - self.cookies = SimpleCookie() # type: SimpleCookie[str] - - # start keep-alive connection cleanup task - self._cleanup_handle = None - - # start cleanup closed transports task - self._cleanup_closed_handle = None - self._cleanup_closed_disabled = not enable_cleanup_closed - self._cleanup_closed_transports = [] # type: List[Optional[asyncio.Transport]] - self._cleanup_closed() - - def __del__(self, _warnings: Any = warnings) -> None: - if self._closed: - return - if not self._conns: - return - - conns = [repr(c) for c in self._conns.values()] - - self._close() - - if PY_36: - kwargs = {"source": self} - else: - kwargs = {} - _warnings.warn(f"Unclosed connector {self!r}", ResourceWarning, **kwargs) - context = { - "connector": self, - "connections": conns, - "message": "Unclosed connector", - } - if self._source_traceback is not None: - context["source_traceback"] = self._source_traceback - self._loop.call_exception_handler(context) - - def __enter__(self) -> "BaseConnector": - warnings.warn( - '"witn Connector():" is deprecated, ' - 'use "async with Connector():" instead', - DeprecationWarning, - ) - return self - - def __exit__(self, *exc: Any) -> None: - self.close() - - async def __aenter__(self) -> "BaseConnector": - return self - - async def __aexit__( - self, - exc_type: Optional[Type[BaseException]] = None, - exc_value: Optional[BaseException] = None, - exc_traceback: Optional[TracebackType] = None, - ) -> None: - await self.close() - - @property - def force_close(self) -> bool: - """Ultimately close connection on releasing if True.""" - return self._force_close - - @property - def limit(self) -> int: - """The total number for simultaneous connections. - - If limit is 0 the connector has no limit. - The default limit size is 100. - """ - return self._limit - - @property - def limit_per_host(self) -> int: - """The limit_per_host for simultaneous connections - to the same endpoint. - - Endpoints are the same if they are have equal - (host, port, is_ssl) triple. - - """ - return self._limit_per_host - - def _cleanup(self) -> None: - """Cleanup unused transports.""" - if self._cleanup_handle: - self._cleanup_handle.cancel() - # _cleanup_handle should be unset, otherwise _release() will not - # recreate it ever! - self._cleanup_handle = None - - now = self._loop.time() - timeout = self._keepalive_timeout - - if self._conns: - connections = {} - deadline = now - timeout - for key, conns in self._conns.items(): - alive = [] - for proto, use_time in conns: - if proto.is_connected(): - if use_time - deadline < 0: - transport = proto.transport - proto.close() - if key.is_ssl and not self._cleanup_closed_disabled: - self._cleanup_closed_transports.append(transport) - else: - alive.append((proto, use_time)) - else: - transport = proto.transport - proto.close() - if key.is_ssl and not self._cleanup_closed_disabled: - self._cleanup_closed_transports.append(transport) - - if alive: - connections[key] = alive - - self._conns = connections - - if self._conns: - self._cleanup_handle = helpers.weakref_handle( - self, "_cleanup", timeout, self._loop - ) - - def _drop_acquired_per_host( - self, key: "ConnectionKey", val: ResponseHandler - ) -> None: - acquired_per_host = self._acquired_per_host - if key not in acquired_per_host: - return - conns = acquired_per_host[key] - conns.remove(val) - if not conns: - del self._acquired_per_host[key] - - def _cleanup_closed(self) -> None: - """Double confirmation for transport close. - Some broken ssl servers may leave socket open without proper close. - """ - if self._cleanup_closed_handle: - self._cleanup_closed_handle.cancel() - - for transport in self._cleanup_closed_transports: - if transport is not None: - transport.abort() - - self._cleanup_closed_transports = [] - - if not self._cleanup_closed_disabled: - self._cleanup_closed_handle = helpers.weakref_handle( - self, "_cleanup_closed", self._cleanup_closed_period, self._loop - ) - - def close(self) -> Awaitable[None]: - """Close all opened transports.""" - self._close() - return _DeprecationWaiter(noop()) - - def _close(self) -> None: - if self._closed: - return - - self._closed = True - - try: - if self._loop.is_closed(): - return - - # cancel cleanup task - if self._cleanup_handle: - self._cleanup_handle.cancel() - - # cancel cleanup close task - if self._cleanup_closed_handle: - self._cleanup_closed_handle.cancel() - - for data in self._conns.values(): - for proto, t0 in data: - proto.close() - - for proto in self._acquired: - proto.close() - - for transport in self._cleanup_closed_transports: - if transport is not None: - transport.abort() - - finally: - self._conns.clear() - self._acquired.clear() - self._waiters.clear() - self._cleanup_handle = None - self._cleanup_closed_transports.clear() - self._cleanup_closed_handle = None - - @property - def closed(self) -> bool: - """Is connector closed. - - A readonly property. - """ - return self._closed - - def _available_connections(self, key: "ConnectionKey") -> int: - """ - Return number of available connections taking into account - the limit, limit_per_host and the connection key. - - If it returns less than 1 means that there is no connections - availables. - """ - - if self._limit: - # total calc available connections - available = self._limit - len(self._acquired) - - # check limit per host - if ( - self._limit_per_host - and available > 0 - and key in self._acquired_per_host - ): - acquired = self._acquired_per_host.get(key) - assert acquired is not None - available = self._limit_per_host - len(acquired) - - elif self._limit_per_host and key in self._acquired_per_host: - # check limit per host - acquired = self._acquired_per_host.get(key) - assert acquired is not None - available = self._limit_per_host - len(acquired) - else: - available = 1 - - return available - - async def connect( - self, req: "ClientRequest", traces: List["Trace"], timeout: "ClientTimeout" - ) -> Connection: - """Get from pool or create new connection.""" - key = req.connection_key - available = self._available_connections(key) - - # Wait if there are no available connections or if there are/were - # waiters (i.e. don't steal connection from a waiter about to wake up) - if available <= 0 or key in self._waiters: - fut = self._loop.create_future() - - # This connection will now count towards the limit. - self._waiters[key].append(fut) - - if traces: - for trace in traces: - await trace.send_connection_queued_start() - - try: - await fut - except BaseException as e: - if key in self._waiters: - # remove a waiter even if it was cancelled, normally it's - # removed when it's notified - try: - self._waiters[key].remove(fut) - except ValueError: # fut may no longer be in list - pass - - raise e - finally: - if key in self._waiters and not self._waiters[key]: - del self._waiters[key] - - if traces: - for trace in traces: - await trace.send_connection_queued_end() - - proto = self._get(key) - if proto is None: - placeholder = cast(ResponseHandler, _TransportPlaceholder()) - self._acquired.add(placeholder) - self._acquired_per_host[key].add(placeholder) - - if traces: - for trace in traces: - await trace.send_connection_create_start() - - try: - proto = await self._create_connection(req, traces, timeout) - if self._closed: - proto.close() - raise ClientConnectionError("Connector is closed.") - except BaseException: - if not self._closed: - self._acquired.remove(placeholder) - self._drop_acquired_per_host(key, placeholder) - self._release_waiter() - raise - else: - if not self._closed: - self._acquired.remove(placeholder) - self._drop_acquired_per_host(key, placeholder) - - if traces: - for trace in traces: - await trace.send_connection_create_end() - else: - if traces: - for trace in traces: - await trace.send_connection_reuseconn() - - self._acquired.add(proto) - self._acquired_per_host[key].add(proto) - return Connection(self, key, proto, self._loop) - - def _get(self, key: "ConnectionKey") -> Optional[ResponseHandler]: - try: - conns = self._conns[key] - except KeyError: - return None - - t1 = self._loop.time() - while conns: - proto, t0 = conns.pop() - if proto.is_connected(): - if t1 - t0 > self._keepalive_timeout: - transport = proto.transport - proto.close() - # only for SSL transports - if key.is_ssl and not self._cleanup_closed_disabled: - self._cleanup_closed_transports.append(transport) - else: - if not conns: - # The very last connection was reclaimed: drop the key - del self._conns[key] - return proto - else: - transport = proto.transport - proto.close() - if key.is_ssl and not self._cleanup_closed_disabled: - self._cleanup_closed_transports.append(transport) - - # No more connections: drop the key - del self._conns[key] - return None - - def _release_waiter(self) -> None: - """ - Iterates over all waiters till found one that is not finsihed and - belongs to a host that has available connections. - """ - if not self._waiters: - return - - # Having the dict keys ordered this avoids to iterate - # at the same order at each call. - queues = list(self._waiters.keys()) - random.shuffle(queues) - - for key in queues: - if self._available_connections(key) < 1: - continue - - waiters = self._waiters[key] - while waiters: - waiter = waiters.popleft() - if not waiter.done(): - waiter.set_result(None) - return - - def _release_acquired(self, key: "ConnectionKey", proto: ResponseHandler) -> None: - if self._closed: - # acquired connection is already released on connector closing - return - - try: - self._acquired.remove(proto) - self._drop_acquired_per_host(key, proto) - except KeyError: # pragma: no cover - # this may be result of undetermenistic order of objects - # finalization due garbage collection. - pass - else: - self._release_waiter() - - def _release( - self, - key: "ConnectionKey", - protocol: ResponseHandler, - *, - should_close: bool = False, - ) -> None: - if self._closed: - # acquired connection is already released on connector closing - return - - self._release_acquired(key, protocol) - - if self._force_close: - should_close = True - - if should_close or protocol.should_close: - transport = protocol.transport - protocol.close() - - if key.is_ssl and not self._cleanup_closed_disabled: - self._cleanup_closed_transports.append(transport) - else: - conns = self._conns.get(key) - if conns is None: - conns = self._conns[key] = [] - conns.append((protocol, self._loop.time())) - - if self._cleanup_handle is None: - self._cleanup_handle = helpers.weakref_handle( - self, "_cleanup", self._keepalive_timeout, self._loop - ) - - async def _create_connection( - self, req: "ClientRequest", traces: List["Trace"], timeout: "ClientTimeout" - ) -> ResponseHandler: - raise NotImplementedError() - - -class _DNSCacheTable: - def __init__(self, ttl: Optional[float] = None) -> None: - self._addrs_rr = ( - {} - ) # type: Dict[Tuple[str, int], Tuple[Iterator[Dict[str, Any]], int]] - self._timestamps = {} # type: Dict[Tuple[str, int], float] - self._ttl = ttl - - def __contains__(self, host: object) -> bool: - return host in self._addrs_rr - - def add(self, key: Tuple[str, int], addrs: List[Dict[str, Any]]) -> None: - self._addrs_rr[key] = (cycle(addrs), len(addrs)) - - if self._ttl: - self._timestamps[key] = monotonic() - - def remove(self, key: Tuple[str, int]) -> None: - self._addrs_rr.pop(key, None) - - if self._ttl: - self._timestamps.pop(key, None) - - def clear(self) -> None: - self._addrs_rr.clear() - self._timestamps.clear() - - def next_addrs(self, key: Tuple[str, int]) -> List[Dict[str, Any]]: - loop, length = self._addrs_rr[key] - addrs = list(islice(loop, length)) - # Consume one more element to shift internal state of `cycle` - next(loop) - return addrs - - def expired(self, key: Tuple[str, int]) -> bool: - if self._ttl is None: - return False - - return self._timestamps[key] + self._ttl < monotonic() - - -class TCPConnector(BaseConnector): - """TCP connector. - - verify_ssl - Set to True to check ssl certifications. - fingerprint - Pass the binary sha256 - digest of the expected certificate in DER format to verify - that the certificate the server presents matches. See also - https://en.wikipedia.org/wiki/Transport_Layer_Security#Certificate_pinning - resolver - Enable DNS lookups and use this - resolver - use_dns_cache - Use memory cache for DNS lookups. - ttl_dns_cache - Max seconds having cached a DNS entry, None forever. - family - socket address family - local_addr - local tuple of (host, port) to bind socket to - - keepalive_timeout - (optional) Keep-alive timeout. - force_close - Set to True to force close and do reconnect - after each request (and between redirects). - limit - The total number of simultaneous connections. - limit_per_host - Number of simultaneous connections to one host. - enable_cleanup_closed - Enables clean-up closed ssl transports. - Disabled by default. - loop - Optional event loop. - """ - - def __init__( - self, - *, - verify_ssl: bool = True, - fingerprint: Optional[bytes] = None, - use_dns_cache: bool = True, - ttl_dns_cache: Optional[int] = 10, - family: int = 0, - ssl_context: Optional[SSLContext] = None, - ssl: Union[None, bool, Fingerprint, SSLContext] = None, - local_addr: Optional[Tuple[str, int]] = None, - resolver: Optional[AbstractResolver] = None, - keepalive_timeout: Union[None, float, object] = sentinel, - force_close: bool = False, - limit: int = 100, - limit_per_host: int = 0, - enable_cleanup_closed: bool = False, - loop: Optional[asyncio.AbstractEventLoop] = None, - ): - super().__init__( - keepalive_timeout=keepalive_timeout, - force_close=force_close, - limit=limit, - limit_per_host=limit_per_host, - enable_cleanup_closed=enable_cleanup_closed, - loop=loop, - ) - - self._ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint) - if resolver is None: - resolver = DefaultResolver(loop=self._loop) - self._resolver = resolver - - self._use_dns_cache = use_dns_cache - self._cached_hosts = _DNSCacheTable(ttl=ttl_dns_cache) - self._throttle_dns_events = ( - {} - ) # type: Dict[Tuple[str, int], EventResultOrError] - self._family = family - self._local_addr = local_addr - - def close(self) -> Awaitable[None]: - """Close all ongoing DNS calls.""" - for ev in self._throttle_dns_events.values(): - ev.cancel() - - return super().close() - - @property - def family(self) -> int: - """Socket family like AF_INET.""" - return self._family - - @property - def use_dns_cache(self) -> bool: - """True if local DNS caching is enabled.""" - return self._use_dns_cache - - def clear_dns_cache( - self, host: Optional[str] = None, port: Optional[int] = None - ) -> None: - """Remove specified host/port or clear all dns local cache.""" - if host is not None and port is not None: - self._cached_hosts.remove((host, port)) - elif host is not None or port is not None: - raise ValueError("either both host and port " "or none of them are allowed") - else: - self._cached_hosts.clear() - - async def _resolve_host( - self, host: str, port: int, traces: Optional[List["Trace"]] = None - ) -> List[Dict[str, Any]]: - if is_ip_address(host): - return [ - { - "hostname": host, - "host": host, - "port": port, - "family": self._family, - "proto": 0, - "flags": 0, - } - ] - - if not self._use_dns_cache: - - if traces: - for trace in traces: - await trace.send_dns_resolvehost_start(host) - - res = await self._resolver.resolve(host, port, family=self._family) - - if traces: - for trace in traces: - await trace.send_dns_resolvehost_end(host) - - return res - - key = (host, port) - - if (key in self._cached_hosts) and (not self._cached_hosts.expired(key)): - # get result early, before any await (#4014) - result = self._cached_hosts.next_addrs(key) - - if traces: - for trace in traces: - await trace.send_dns_cache_hit(host) - return result - - if key in self._throttle_dns_events: - # get event early, before any await (#4014) - event = self._throttle_dns_events[key] - if traces: - for trace in traces: - await trace.send_dns_cache_hit(host) - await event.wait() - else: - # update dict early, before any await (#4014) - self._throttle_dns_events[key] = EventResultOrError(self._loop) - if traces: - for trace in traces: - await trace.send_dns_cache_miss(host) - try: - - if traces: - for trace in traces: - await trace.send_dns_resolvehost_start(host) - - addrs = await self._resolver.resolve(host, port, family=self._family) - if traces: - for trace in traces: - await trace.send_dns_resolvehost_end(host) - - self._cached_hosts.add(key, addrs) - self._throttle_dns_events[key].set() - except BaseException as e: - # any DNS exception, independently of the implementation - # is set for the waiters to raise the same exception. - self._throttle_dns_events[key].set(exc=e) - raise - finally: - self._throttle_dns_events.pop(key) - - return self._cached_hosts.next_addrs(key) - - async def _create_connection( - self, req: "ClientRequest", traces: List["Trace"], timeout: "ClientTimeout" - ) -> ResponseHandler: - """Create connection. - - Has same keyword arguments as BaseEventLoop.create_connection. - """ - if req.proxy: - _, proto = await self._create_proxy_connection(req, traces, timeout) - else: - _, proto = await self._create_direct_connection(req, traces, timeout) - - return proto - - @staticmethod - @functools.lru_cache(None) - def _make_ssl_context(verified: bool) -> SSLContext: - if verified: - return ssl.create_default_context() - else: - sslcontext = ssl.SSLContext(ssl.PROTOCOL_SSLv23) - sslcontext.options |= ssl.OP_NO_SSLv2 - sslcontext.options |= ssl.OP_NO_SSLv3 - try: - sslcontext.options |= ssl.OP_NO_COMPRESSION - except AttributeError as attr_err: - warnings.warn( - "{!s}: The Python interpreter is compiled " - "against OpenSSL < 1.0.0. Ref: " - "https://docs.python.org/3/library/ssl.html" - "#ssl.OP_NO_COMPRESSION".format(attr_err), - ) - sslcontext.set_default_verify_paths() - return sslcontext - - def _get_ssl_context(self, req: "ClientRequest") -> Optional[SSLContext]: - """Logic to get the correct SSL context - - 0. if req.ssl is false, return None - - 1. if ssl_context is specified in req, use it - 2. if _ssl_context is specified in self, use it - 3. otherwise: - 1. if verify_ssl is not specified in req, use self.ssl_context - (will generate a default context according to self.verify_ssl) - 2. if verify_ssl is True in req, generate a default SSL context - 3. if verify_ssl is False in req, generate a SSL context that - won't verify - """ - if req.is_ssl(): - if ssl is None: # pragma: no cover - raise RuntimeError("SSL is not supported.") - sslcontext = req.ssl - if isinstance(sslcontext, ssl.SSLContext): - return sslcontext - if sslcontext is not None: - # not verified or fingerprinted - return self._make_ssl_context(False) - sslcontext = self._ssl - if isinstance(sslcontext, ssl.SSLContext): - return sslcontext - if sslcontext is not None: - # not verified or fingerprinted - return self._make_ssl_context(False) - return self._make_ssl_context(True) - else: - return None - - def _get_fingerprint(self, req: "ClientRequest") -> Optional["Fingerprint"]: - ret = req.ssl - if isinstance(ret, Fingerprint): - return ret - ret = self._ssl - if isinstance(ret, Fingerprint): - return ret - return None - - async def _wrap_create_connection( - self, - *args: Any, - req: "ClientRequest", - timeout: "ClientTimeout", - client_error: Type[Exception] = ClientConnectorError, - **kwargs: Any, - ) -> Tuple[asyncio.Transport, ResponseHandler]: - try: - with CeilTimeout(timeout.sock_connect): - return await self._loop.create_connection(*args, **kwargs) # type: ignore # noqa - except cert_errors as exc: - raise ClientConnectorCertificateError(req.connection_key, exc) from exc - except ssl_errors as exc: - raise ClientConnectorSSLError(req.connection_key, exc) from exc - except OSError as exc: - raise client_error(req.connection_key, exc) from exc - - async def _create_direct_connection( - self, - req: "ClientRequest", - traces: List["Trace"], - timeout: "ClientTimeout", - *, - client_error: Type[Exception] = ClientConnectorError, - ) -> Tuple[asyncio.Transport, ResponseHandler]: - sslcontext = self._get_ssl_context(req) - fingerprint = self._get_fingerprint(req) - - host = req.url.raw_host - assert host is not None - port = req.port - assert port is not None - host_resolved = asyncio.ensure_future( - self._resolve_host(host, port, traces=traces), loop=self._loop - ) - try: - # Cancelling this lookup should not cancel the underlying lookup - # or else the cancel event will get broadcast to all the waiters - # across all connections. - hosts = await asyncio.shield(host_resolved) - except asyncio.CancelledError: - - def drop_exception(fut: "asyncio.Future[List[Dict[str, Any]]]") -> None: - with suppress(Exception, asyncio.CancelledError): - fut.result() - - host_resolved.add_done_callback(drop_exception) - raise - except OSError as exc: - # in case of proxy it is not ClientProxyConnectionError - # it is problem of resolving proxy ip itself - raise ClientConnectorError(req.connection_key, exc) from exc - - last_exc = None # type: Optional[Exception] - - for hinfo in hosts: - host = hinfo["host"] - port = hinfo["port"] - - try: - transp, proto = await self._wrap_create_connection( - self._factory, - host, - port, - timeout=timeout, - ssl=sslcontext, - family=hinfo["family"], - proto=hinfo["proto"], - flags=hinfo["flags"], - server_hostname=hinfo["hostname"] if sslcontext else None, - local_addr=self._local_addr, - req=req, - client_error=client_error, - ) - except ClientConnectorError as exc: - last_exc = exc - continue - - if req.is_ssl() and fingerprint: - try: - fingerprint.check(transp) - except ServerFingerprintMismatch as exc: - transp.close() - if not self._cleanup_closed_disabled: - self._cleanup_closed_transports.append(transp) - last_exc = exc - continue - - return transp, proto - else: - assert last_exc is not None - raise last_exc - - async def _create_proxy_connection( - self, req: "ClientRequest", traces: List["Trace"], timeout: "ClientTimeout" - ) -> Tuple[asyncio.Transport, ResponseHandler]: - headers = {} # type: Dict[str, str] - if req.proxy_headers is not None: - headers = req.proxy_headers # type: ignore - headers[hdrs.HOST] = req.headers[hdrs.HOST] - - url = req.proxy - assert url is not None - proxy_req = ClientRequest( - hdrs.METH_GET, - url, - headers=headers, - auth=req.proxy_auth, - loop=self._loop, - ssl=req.ssl, - ) - - # create connection to proxy server - transport, proto = await self._create_direct_connection( - proxy_req, [], timeout, client_error=ClientProxyConnectionError - ) - - # Many HTTP proxies has buggy keepalive support. Let's not - # reuse connection but close it after processing every - # response. - proto.force_close() - - auth = proxy_req.headers.pop(hdrs.AUTHORIZATION, None) - if auth is not None: - if not req.is_ssl(): - req.headers[hdrs.PROXY_AUTHORIZATION] = auth - else: - proxy_req.headers[hdrs.PROXY_AUTHORIZATION] = auth - - if req.is_ssl(): - sslcontext = self._get_ssl_context(req) - # For HTTPS requests over HTTP proxy - # we must notify proxy to tunnel connection - # so we send CONNECT command: - # CONNECT www.python.org:443 HTTP/1.1 - # Host: www.python.org - # - # next we must do TLS handshake and so on - # to do this we must wrap raw socket into secure one - # asyncio handles this perfectly - proxy_req.method = hdrs.METH_CONNECT - proxy_req.url = req.url - key = attr.evolve( - req.connection_key, proxy=None, proxy_auth=None, proxy_headers_hash=None - ) - conn = Connection(self, key, proto, self._loop) - proxy_resp = await proxy_req.send(conn) - try: - protocol = conn._protocol - assert protocol is not None - protocol.set_response_params() - resp = await proxy_resp.start(conn) - except BaseException: - proxy_resp.close() - conn.close() - raise - else: - conn._protocol = None - conn._transport = None - try: - if resp.status != 200: - message = resp.reason - if message is None: - message = RESPONSES[resp.status][0] - raise ClientHttpProxyError( - proxy_resp.request_info, - resp.history, - status=resp.status, - message=message, - headers=resp.headers, - ) - rawsock = transport.get_extra_info("socket", default=None) - if rawsock is None: - raise RuntimeError("Transport does not expose socket instance") - # Duplicate the socket, so now we can close proxy transport - rawsock = rawsock.dup() - finally: - transport.close() - - transport, proto = await self._wrap_create_connection( - self._factory, - timeout=timeout, - ssl=sslcontext, - sock=rawsock, - server_hostname=req.host, - req=req, - ) - finally: - proxy_resp.close() - - return transport, proto - - -class UnixConnector(BaseConnector): - """Unix socket connector. - - path - Unix socket path. - keepalive_timeout - (optional) Keep-alive timeout. - force_close - Set to True to force close and do reconnect - after each request (and between redirects). - limit - The total number of simultaneous connections. - limit_per_host - Number of simultaneous connections to one host. - loop - Optional event loop. - """ - - def __init__( - self, - path: str, - force_close: bool = False, - keepalive_timeout: Union[object, float, None] = sentinel, - limit: int = 100, - limit_per_host: int = 0, - loop: Optional[asyncio.AbstractEventLoop] = None, - ) -> None: - super().__init__( - force_close=force_close, - keepalive_timeout=keepalive_timeout, - limit=limit, - limit_per_host=limit_per_host, - loop=loop, - ) - self._path = path - - @property - def path(self) -> str: - """Path to unix socket.""" - return self._path - - async def _create_connection( - self, req: "ClientRequest", traces: List["Trace"], timeout: "ClientTimeout" - ) -> ResponseHandler: - try: - with CeilTimeout(timeout.sock_connect): - _, proto = await self._loop.create_unix_connection( - self._factory, self._path - ) - except OSError as exc: - raise ClientConnectorError(req.connection_key, exc) from exc - - return cast(ResponseHandler, proto) - - -class NamedPipeConnector(BaseConnector): - """Named pipe connector. - - Only supported by the proactor event loop. - See also: https://docs.python.org/3.7/library/asyncio-eventloop.html - - path - Windows named pipe path. - keepalive_timeout - (optional) Keep-alive timeout. - force_close - Set to True to force close and do reconnect - after each request (and between redirects). - limit - The total number of simultaneous connections. - limit_per_host - Number of simultaneous connections to one host. - loop - Optional event loop. - """ - - def __init__( - self, - path: str, - force_close: bool = False, - keepalive_timeout: Union[object, float, None] = sentinel, - limit: int = 100, - limit_per_host: int = 0, - loop: Optional[asyncio.AbstractEventLoop] = None, - ) -> None: - super().__init__( - force_close=force_close, - keepalive_timeout=keepalive_timeout, - limit=limit, - limit_per_host=limit_per_host, - loop=loop, - ) - if not isinstance(self._loop, asyncio.ProactorEventLoop): # type: ignore - raise RuntimeError( - "Named Pipes only available in proactor " "loop under windows" - ) - self._path = path - - @property - def path(self) -> str: - """Path to the named pipe.""" - return self._path - - async def _create_connection( - self, req: "ClientRequest", traces: List["Trace"], timeout: "ClientTimeout" - ) -> ResponseHandler: - try: - with CeilTimeout(timeout.sock_connect): - _, proto = await self._loop.create_pipe_connection( # type: ignore - self._factory, self._path - ) - # the drain is required so that the connection_made is called - # and transport is set otherwise it is not set before the - # `assert conn.transport is not None` - # in client.py's _request method - await asyncio.sleep(0) - # other option is to manually set transport like - # `proto.transport = trans` - except OSError as exc: - raise ClientConnectorError(req.connection_key, exc) from exc - - return cast(ResponseHandler, proto) diff --git a/third_party/python/aiohttp/aiohttp/cookiejar.py b/third_party/python/aiohttp/aiohttp/cookiejar.py deleted file mode 100644 index b6b59d628945..000000000000 --- a/third_party/python/aiohttp/aiohttp/cookiejar.py +++ /dev/null @@ -1,382 +0,0 @@ -import asyncio -import datetime -import os # noqa -import pathlib -import pickle -import re -from collections import defaultdict -from http.cookies import BaseCookie, Morsel, SimpleCookie -from typing import ( # noqa - DefaultDict, - Dict, - Iterable, - Iterator, - Mapping, - Optional, - Set, - Tuple, - Union, - cast, -) - -from yarl import URL - -from .abc import AbstractCookieJar -from .helpers import is_ip_address, next_whole_second -from .typedefs import LooseCookies, PathLike - -__all__ = ("CookieJar", "DummyCookieJar") - - -CookieItem = Union[str, "Morsel[str]"] - - -class CookieJar(AbstractCookieJar): - """Implements cookie storage adhering to RFC 6265.""" - - DATE_TOKENS_RE = re.compile( - r"[\x09\x20-\x2F\x3B-\x40\x5B-\x60\x7B-\x7E]*" - r"(?P[\x00-\x08\x0A-\x1F\d:a-zA-Z\x7F-\xFF]+)" - ) - - DATE_HMS_TIME_RE = re.compile(r"(\d{1,2}):(\d{1,2}):(\d{1,2})") - - DATE_DAY_OF_MONTH_RE = re.compile(r"(\d{1,2})") - - DATE_MONTH_RE = re.compile( - "(jan)|(feb)|(mar)|(apr)|(may)|(jun)|(jul)|" "(aug)|(sep)|(oct)|(nov)|(dec)", - re.I, - ) - - DATE_YEAR_RE = re.compile(r"(\d{2,4})") - - MAX_TIME = datetime.datetime.max.replace(tzinfo=datetime.timezone.utc) - - MAX_32BIT_TIME = datetime.datetime.utcfromtimestamp(2 ** 31 - 1) - - def __init__( - self, - *, - unsafe: bool = False, - quote_cookie: bool = True, - loop: Optional[asyncio.AbstractEventLoop] = None - ) -> None: - super().__init__(loop=loop) - self._cookies = defaultdict( - SimpleCookie - ) # type: DefaultDict[str, SimpleCookie[str]] - self._host_only_cookies = set() # type: Set[Tuple[str, str]] - self._unsafe = unsafe - self._quote_cookie = quote_cookie - self._next_expiration = next_whole_second() - self._expirations = {} # type: Dict[Tuple[str, str], datetime.datetime] - # #4515: datetime.max may not be representable on 32-bit platforms - self._max_time = self.MAX_TIME - try: - self._max_time.timestamp() - except OverflowError: - self._max_time = self.MAX_32BIT_TIME - - def save(self, file_path: PathLike) -> None: - file_path = pathlib.Path(file_path) - with file_path.open(mode="wb") as f: - pickle.dump(self._cookies, f, pickle.HIGHEST_PROTOCOL) - - def load(self, file_path: PathLike) -> None: - file_path = pathlib.Path(file_path) - with file_path.open(mode="rb") as f: - self._cookies = pickle.load(f) - - def clear(self) -> None: - self._cookies.clear() - self._host_only_cookies.clear() - self._next_expiration = next_whole_second() - self._expirations.clear() - - def __iter__(self) -> "Iterator[Morsel[str]]": - self._do_expiration() - for val in self._cookies.values(): - yield from val.values() - - def __len__(self) -> int: - return sum(1 for i in self) - - def _do_expiration(self) -> None: - now = datetime.datetime.now(datetime.timezone.utc) - if self._next_expiration > now: - return - if not self._expirations: - return - next_expiration = self._max_time - to_del = [] - cookies = self._cookies - expirations = self._expirations - for (domain, name), when in expirations.items(): - if when <= now: - cookies[domain].pop(name, None) - to_del.append((domain, name)) - self._host_only_cookies.discard((domain, name)) - else: - next_expiration = min(next_expiration, when) - for key in to_del: - del expirations[key] - - try: - self._next_expiration = next_expiration.replace( - microsecond=0 - ) + datetime.timedelta(seconds=1) - except OverflowError: - self._next_expiration = self._max_time - - def _expire_cookie(self, when: datetime.datetime, domain: str, name: str) -> None: - self._next_expiration = min(self._next_expiration, when) - self._expirations[(domain, name)] = when - - def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None: - """Update cookies.""" - hostname = response_url.raw_host - - if not self._unsafe and is_ip_address(hostname): - # Don't accept cookies from IPs - return - - if isinstance(cookies, Mapping): - cookies = cookies.items() - - for name, cookie in cookies: - if not isinstance(cookie, Morsel): - tmp = SimpleCookie() # type: SimpleCookie[str] - tmp[name] = cookie # type: ignore - cookie = tmp[name] - - domain = cookie["domain"] - - # ignore domains with trailing dots - if domain.endswith("."): - domain = "" - del cookie["domain"] - - if not domain and hostname is not None: - # Set the cookie's domain to the response hostname - # and set its host-only-flag - self._host_only_cookies.add((hostname, name)) - domain = cookie["domain"] = hostname - - if domain.startswith("."): - # Remove leading dot - domain = domain[1:] - cookie["domain"] = domain - - if hostname and not self._is_domain_match(domain, hostname): - # Setting cookies for different domains is not allowed - continue - - path = cookie["path"] - if not path or not path.startswith("/"): - # Set the cookie's path to the response path - path = response_url.path - if not path.startswith("/"): - path = "/" - else: - # Cut everything from the last slash to the end - path = "/" + path[1 : path.rfind("/")] - cookie["path"] = path - - max_age = cookie["max-age"] - if max_age: - try: - delta_seconds = int(max_age) - try: - max_age_expiration = datetime.datetime.now( - datetime.timezone.utc - ) + datetime.timedelta(seconds=delta_seconds) - except OverflowError: - max_age_expiration = self._max_time - self._expire_cookie(max_age_expiration, domain, name) - except ValueError: - cookie["max-age"] = "" - - else: - expires = cookie["expires"] - if expires: - expire_time = self._parse_date(expires) - if expire_time: - self._expire_cookie(expire_time, domain, name) - else: - cookie["expires"] = "" - - self._cookies[domain][name] = cookie - - self._do_expiration() - - def filter_cookies( - self, request_url: URL = URL() - ) -> Union["BaseCookie[str]", "SimpleCookie[str]"]: - """Returns this jar's cookies filtered by their attributes.""" - self._do_expiration() - request_url = URL(request_url) - filtered: Union["SimpleCookie[str]", "BaseCookie[str]"] = ( - SimpleCookie() if self._quote_cookie else BaseCookie() - ) - hostname = request_url.raw_host or "" - is_not_secure = request_url.scheme not in ("https", "wss") - - for cookie in self: - name = cookie.key - domain = cookie["domain"] - - # Send shared cookies - if not domain: - filtered[name] = cookie.value - continue - - if not self._unsafe and is_ip_address(hostname): - continue - - if (domain, name) in self._host_only_cookies: - if domain != hostname: - continue - elif not self._is_domain_match(domain, hostname): - continue - - if not self._is_path_match(request_url.path, cookie["path"]): - continue - - if is_not_secure and cookie["secure"]: - continue - - # It's critical we use the Morsel so the coded_value - # (based on cookie version) is preserved - mrsl_val = cast("Morsel[str]", cookie.get(cookie.key, Morsel())) - mrsl_val.set(cookie.key, cookie.value, cookie.coded_value) - filtered[name] = mrsl_val - - return filtered - - @staticmethod - def _is_domain_match(domain: str, hostname: str) -> bool: - """Implements domain matching adhering to RFC 6265.""" - if hostname == domain: - return True - - if not hostname.endswith(domain): - return False - - non_matching = hostname[: -len(domain)] - - if not non_matching.endswith("."): - return False - - return not is_ip_address(hostname) - - @staticmethod - def _is_path_match(req_path: str, cookie_path: str) -> bool: - """Implements path matching adhering to RFC 6265.""" - if not req_path.startswith("/"): - req_path = "/" - - if req_path == cookie_path: - return True - - if not req_path.startswith(cookie_path): - return False - - if cookie_path.endswith("/"): - return True - - non_matching = req_path[len(cookie_path) :] - - return non_matching.startswith("/") - - @classmethod - def _parse_date(cls, date_str: str) -> Optional[datetime.datetime]: - """Implements date string parsing adhering to RFC 6265.""" - if not date_str: - return None - - found_time = False - found_day = False - found_month = False - found_year = False - - hour = minute = second = 0 - day = 0 - month = 0 - year = 0 - - for token_match in cls.DATE_TOKENS_RE.finditer(date_str): - - token = token_match.group("token") - - if not found_time: - time_match = cls.DATE_HMS_TIME_RE.match(token) - if time_match: - found_time = True - hour, minute, second = [int(s) for s in time_match.groups()] - continue - - if not found_day: - day_match = cls.DATE_DAY_OF_MONTH_RE.match(token) - if day_match: - found_day = True - day = int(day_match.group()) - continue - - if not found_month: - month_match = cls.DATE_MONTH_RE.match(token) - if month_match: - found_month = True - assert month_match.lastindex is not None - month = month_match.lastindex - continue - - if not found_year: - year_match = cls.DATE_YEAR_RE.match(token) - if year_match: - found_year = True - year = int(year_match.group()) - - if 70 <= year <= 99: - year += 1900 - elif 0 <= year <= 69: - year += 2000 - - if False in (found_day, found_month, found_year, found_time): - return None - - if not 1 <= day <= 31: - return None - - if year < 1601 or hour > 23 or minute > 59 or second > 59: - return None - - return datetime.datetime( - year, month, day, hour, minute, second, tzinfo=datetime.timezone.utc - ) - - -class DummyCookieJar(AbstractCookieJar): - """Implements a dummy cookie storage. - - It can be used with the ClientSession when no cookie processing is needed. - - """ - - def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None: - super().__init__(loop=loop) - - def __iter__(self) -> "Iterator[Morsel[str]]": - while False: - yield None - - def __len__(self) -> int: - return 0 - - def clear(self) -> None: - pass - - def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None: - pass - - def filter_cookies(self, request_url: URL) -> "BaseCookie[str]": - return SimpleCookie() diff --git a/third_party/python/aiohttp/aiohttp/formdata.py b/third_party/python/aiohttp/aiohttp/formdata.py deleted file mode 100644 index 900716b72a68..000000000000 --- a/third_party/python/aiohttp/aiohttp/formdata.py +++ /dev/null @@ -1,170 +0,0 @@ -import io -from typing import Any, Iterable, List, Optional -from urllib.parse import urlencode - -from multidict import MultiDict, MultiDictProxy - -from . import hdrs, multipart, payload -from .helpers import guess_filename -from .payload import Payload - -__all__ = ("FormData",) - - -class FormData: - """Helper class for multipart/form-data and - application/x-www-form-urlencoded body generation.""" - - def __init__( - self, - fields: Iterable[Any] = (), - quote_fields: bool = True, - charset: Optional[str] = None, - ) -> None: - self._writer = multipart.MultipartWriter("form-data") - self._fields = [] # type: List[Any] - self._is_multipart = False - self._is_processed = False - self._quote_fields = quote_fields - self._charset = charset - - if isinstance(fields, dict): - fields = list(fields.items()) - elif not isinstance(fields, (list, tuple)): - fields = (fields,) - self.add_fields(*fields) - - @property - def is_multipart(self) -> bool: - return self._is_multipart - - def add_field( - self, - name: str, - value: Any, - *, - content_type: Optional[str] = None, - filename: Optional[str] = None, - content_transfer_encoding: Optional[str] = None - ) -> None: - - if isinstance(value, io.IOBase): - self._is_multipart = True - elif isinstance(value, (bytes, bytearray, memoryview)): - if filename is None and content_transfer_encoding is None: - filename = name - - type_options = MultiDict({"name": name}) # type: MultiDict[str] - if filename is not None and not isinstance(filename, str): - raise TypeError( - "filename must be an instance of str. " "Got: %s" % filename - ) - if filename is None and isinstance(value, io.IOBase): - filename = guess_filename(value, name) - if filename is not None: - type_options["filename"] = filename - self._is_multipart = True - - headers = {} - if content_type is not None: - if not isinstance(content_type, str): - raise TypeError( - "content_type must be an instance of str. " "Got: %s" % content_type - ) - headers[hdrs.CONTENT_TYPE] = content_type - self._is_multipart = True - if content_transfer_encoding is not None: - if not isinstance(content_transfer_encoding, str): - raise TypeError( - "content_transfer_encoding must be an instance" - " of str. Got: %s" % content_transfer_encoding - ) - headers[hdrs.CONTENT_TRANSFER_ENCODING] = content_transfer_encoding - self._is_multipart = True - - self._fields.append((type_options, headers, value)) - - def add_fields(self, *fields: Any) -> None: - to_add = list(fields) - - while to_add: - rec = to_add.pop(0) - - if isinstance(rec, io.IOBase): - k = guess_filename(rec, "unknown") - self.add_field(k, rec) # type: ignore - - elif isinstance(rec, (MultiDictProxy, MultiDict)): - to_add.extend(rec.items()) - - elif isinstance(rec, (list, tuple)) and len(rec) == 2: - k, fp = rec - self.add_field(k, fp) # type: ignore - - else: - raise TypeError( - "Only io.IOBase, multidict and (name, file) " - "pairs allowed, use .add_field() for passing " - "more complex parameters, got {!r}".format(rec) - ) - - def _gen_form_urlencoded(self) -> payload.BytesPayload: - # form data (x-www-form-urlencoded) - data = [] - for type_options, _, value in self._fields: - data.append((type_options["name"], value)) - - charset = self._charset if self._charset is not None else "utf-8" - - if charset == "utf-8": - content_type = "application/x-www-form-urlencoded" - else: - content_type = "application/x-www-form-urlencoded; " "charset=%s" % charset - - return payload.BytesPayload( - urlencode(data, doseq=True, encoding=charset).encode(), - content_type=content_type, - ) - - def _gen_form_data(self) -> multipart.MultipartWriter: - """Encode a list of fields using the multipart/form-data MIME format""" - if self._is_processed: - raise RuntimeError("Form data has been processed already") - for dispparams, headers, value in self._fields: - try: - if hdrs.CONTENT_TYPE in headers: - part = payload.get_payload( - value, - content_type=headers[hdrs.CONTENT_TYPE], - headers=headers, - encoding=self._charset, - ) - else: - part = payload.get_payload( - value, headers=headers, encoding=self._charset - ) - except Exception as exc: - raise TypeError( - "Can not serialize value type: %r\n " - "headers: %r\n value: %r" % (type(value), headers, value) - ) from exc - - if dispparams: - part.set_content_disposition( - "form-data", quote_fields=self._quote_fields, **dispparams - ) - # FIXME cgi.FieldStorage doesn't likes body parts with - # Content-Length which were sent via chunked transfer encoding - assert part.headers is not None - part.headers.popall(hdrs.CONTENT_LENGTH, None) - - self._writer.append_payload(part) - - self._is_processed = True - return self._writer - - def __call__(self) -> Payload: - if self._is_multipart: - return self._gen_form_data() - else: - return self._gen_form_urlencoded() diff --git a/third_party/python/aiohttp/aiohttp/frozenlist.py b/third_party/python/aiohttp/aiohttp/frozenlist.py deleted file mode 100644 index 46b26108cfa9..000000000000 --- a/third_party/python/aiohttp/aiohttp/frozenlist.py +++ /dev/null @@ -1,72 +0,0 @@ -from collections.abc import MutableSequence -from functools import total_ordering - -from .helpers import NO_EXTENSIONS - - -@total_ordering -class FrozenList(MutableSequence): - - __slots__ = ("_frozen", "_items") - - def __init__(self, items=None): - self._frozen = False - if items is not None: - items = list(items) - else: - items = [] - self._items = items - - @property - def frozen(self): - return self._frozen - - def freeze(self): - self._frozen = True - - def __getitem__(self, index): - return self._items[index] - - def __setitem__(self, index, value): - if self._frozen: - raise RuntimeError("Cannot modify frozen list.") - self._items[index] = value - - def __delitem__(self, index): - if self._frozen: - raise RuntimeError("Cannot modify frozen list.") - del self._items[index] - - def __len__(self): - return self._items.__len__() - - def __iter__(self): - return self._items.__iter__() - - def __reversed__(self): - return self._items.__reversed__() - - def __eq__(self, other): - return list(self) == other - - def __le__(self, other): - return list(self) <= other - - def insert(self, pos, item): - if self._frozen: - raise RuntimeError("Cannot modify frozen list.") - self._items.insert(pos, item) - - def __repr__(self): - return f"" - - -PyFrozenList = FrozenList - -try: - from aiohttp._frozenlist import FrozenList as CFrozenList # type: ignore - - if not NO_EXTENSIONS: - FrozenList = CFrozenList # type: ignore -except ImportError: # pragma: no cover - pass diff --git a/third_party/python/aiohttp/aiohttp/frozenlist.pyi b/third_party/python/aiohttp/aiohttp/frozenlist.pyi deleted file mode 100644 index 72ab086715bb..000000000000 --- a/third_party/python/aiohttp/aiohttp/frozenlist.pyi +++ /dev/null @@ -1,46 +0,0 @@ -from typing import ( - Generic, - Iterable, - Iterator, - List, - MutableSequence, - Optional, - TypeVar, - Union, - overload, -) - -_T = TypeVar("_T") -_Arg = Union[List[_T], Iterable[_T]] - -class FrozenList(MutableSequence[_T], Generic[_T]): - def __init__(self, items: Optional[_Arg[_T]] = ...) -> None: ... - @property - def frozen(self) -> bool: ... - def freeze(self) -> None: ... - @overload - def __getitem__(self, i: int) -> _T: ... - @overload - def __getitem__(self, s: slice) -> FrozenList[_T]: ... - @overload - def __setitem__(self, i: int, o: _T) -> None: ... - @overload - def __setitem__(self, s: slice, o: Iterable[_T]) -> None: ... - @overload - def __delitem__(self, i: int) -> None: ... - @overload - def __delitem__(self, i: slice) -> None: ... - def __len__(self) -> int: ... - def __iter__(self) -> Iterator[_T]: ... - def __reversed__(self) -> Iterator[_T]: ... - def __eq__(self, other: object) -> bool: ... - def __le__(self, other: FrozenList[_T]) -> bool: ... - def __ne__(self, other: object) -> bool: ... - def __lt__(self, other: FrozenList[_T]) -> bool: ... - def __ge__(self, other: FrozenList[_T]) -> bool: ... - def __gt__(self, other: FrozenList[_T]) -> bool: ... - def insert(self, pos: int, item: _T) -> None: ... - def __repr__(self) -> str: ... - -# types for C accelerators are the same -CFrozenList = PyFrozenList = FrozenList diff --git a/third_party/python/aiohttp/aiohttp/hdrs.py b/third_party/python/aiohttp/aiohttp/hdrs.py deleted file mode 100644 index f04a5457f9fe..000000000000 --- a/third_party/python/aiohttp/aiohttp/hdrs.py +++ /dev/null @@ -1,108 +0,0 @@ -"""HTTP Headers constants.""" - -# After changing the file content call ./tools/gen.py -# to regenerate the headers parser - -from multidict import istr - -METH_ANY = "*" -METH_CONNECT = "CONNECT" -METH_HEAD = "HEAD" -METH_GET = "GET" -METH_DELETE = "DELETE" -METH_OPTIONS = "OPTIONS" -METH_PATCH = "PATCH" -METH_POST = "POST" -METH_PUT = "PUT" -METH_TRACE = "TRACE" - -METH_ALL = { - METH_CONNECT, - METH_HEAD, - METH_GET, - METH_DELETE, - METH_OPTIONS, - METH_PATCH, - METH_POST, - METH_PUT, - METH_TRACE, -} - - -ACCEPT = istr("Accept") -ACCEPT_CHARSET = istr("Accept-Charset") -ACCEPT_ENCODING = istr("Accept-Encoding") -ACCEPT_LANGUAGE = istr("Accept-Language") -ACCEPT_RANGES = istr("Accept-Ranges") -ACCESS_CONTROL_MAX_AGE = istr("Access-Control-Max-Age") -ACCESS_CONTROL_ALLOW_CREDENTIALS = istr("Access-Control-Allow-Credentials") -ACCESS_CONTROL_ALLOW_HEADERS = istr("Access-Control-Allow-Headers") -ACCESS_CONTROL_ALLOW_METHODS = istr("Access-Control-Allow-Methods") -ACCESS_CONTROL_ALLOW_ORIGIN = istr("Access-Control-Allow-Origin") -ACCESS_CONTROL_EXPOSE_HEADERS = istr("Access-Control-Expose-Headers") -ACCESS_CONTROL_REQUEST_HEADERS = istr("Access-Control-Request-Headers") -ACCESS_CONTROL_REQUEST_METHOD = istr("Access-Control-Request-Method") -AGE = istr("Age") -ALLOW = istr("Allow") -AUTHORIZATION = istr("Authorization") -CACHE_CONTROL = istr("Cache-Control") -CONNECTION = istr("Connection") -CONTENT_DISPOSITION = istr("Content-Disposition") -CONTENT_ENCODING = istr("Content-Encoding") -CONTENT_LANGUAGE = istr("Content-Language") -CONTENT_LENGTH = istr("Content-Length") -CONTENT_LOCATION = istr("Content-Location") -CONTENT_MD5 = istr("Content-MD5") -CONTENT_RANGE = istr("Content-Range") -CONTENT_TRANSFER_ENCODING = istr("Content-Transfer-Encoding") -CONTENT_TYPE = istr("Content-Type") -COOKIE = istr("Cookie") -DATE = istr("Date") -DESTINATION = istr("Destination") -DIGEST = istr("Digest") -ETAG = istr("Etag") -EXPECT = istr("Expect") -EXPIRES = istr("Expires") -FORWARDED = istr("Forwarded") -FROM = istr("From") -HOST = istr("Host") -IF_MATCH = istr("If-Match") -IF_MODIFIED_SINCE = istr("If-Modified-Since") -IF_NONE_MATCH = istr("If-None-Match") -IF_RANGE = istr("If-Range") -IF_UNMODIFIED_SINCE = istr("If-Unmodified-Since") -KEEP_ALIVE = istr("Keep-Alive") -LAST_EVENT_ID = istr("Last-Event-ID") -LAST_MODIFIED = istr("Last-Modified") -LINK = istr("Link") -LOCATION = istr("Location") -MAX_FORWARDS = istr("Max-Forwards") -ORIGIN = istr("Origin") -PRAGMA = istr("Pragma") -PROXY_AUTHENTICATE = istr("Proxy-Authenticate") -PROXY_AUTHORIZATION = istr("Proxy-Authorization") -RANGE = istr("Range") -REFERER = istr("Referer") -RETRY_AFTER = istr("Retry-After") -SEC_WEBSOCKET_ACCEPT = istr("Sec-WebSocket-Accept") -SEC_WEBSOCKET_VERSION = istr("Sec-WebSocket-Version") -SEC_WEBSOCKET_PROTOCOL = istr("Sec-WebSocket-Protocol") -SEC_WEBSOCKET_EXTENSIONS = istr("Sec-WebSocket-Extensions") -SEC_WEBSOCKET_KEY = istr("Sec-WebSocket-Key") -SEC_WEBSOCKET_KEY1 = istr("Sec-WebSocket-Key1") -SERVER = istr("Server") -SET_COOKIE = istr("Set-Cookie") -TE = istr("TE") -TRAILER = istr("Trailer") -TRANSFER_ENCODING = istr("Transfer-Encoding") -UPGRADE = istr("Upgrade") -URI = istr("URI") -USER_AGENT = istr("User-Agent") -VARY = istr("Vary") -VIA = istr("Via") -WANT_DIGEST = istr("Want-Digest") -WARNING = istr("Warning") -WWW_AUTHENTICATE = istr("WWW-Authenticate") -X_FORWARDED_FOR = istr("X-Forwarded-For") -X_FORWARDED_HOST = istr("X-Forwarded-Host") -X_FORWARDED_PROTO = istr("X-Forwarded-Proto") diff --git a/third_party/python/aiohttp/aiohttp/helpers.py b/third_party/python/aiohttp/aiohttp/helpers.py deleted file mode 100644 index bbf5f1298fb7..000000000000 --- a/third_party/python/aiohttp/aiohttp/helpers.py +++ /dev/null @@ -1,780 +0,0 @@ -"""Various helper functions""" - -import asyncio -import base64 -import binascii -import cgi -import datetime -import functools -import inspect -import netrc -import os -import platform -import re -import sys -import time -import warnings -import weakref -from collections import namedtuple -from contextlib import suppress -from math import ceil -from pathlib import Path -from types import TracebackType -from typing import ( - Any, - Callable, - Dict, - Generator, - Generic, - Iterable, - Iterator, - List, - Mapping, - Optional, - Pattern, - Set, - Tuple, - Type, - TypeVar, - Union, - cast, -) -from urllib.parse import quote -from urllib.request import getproxies - -import async_timeout -import attr -from multidict import MultiDict, MultiDictProxy -from typing_extensions import Protocol -from yarl import URL - -from . import hdrs -from .log import client_logger, internal_logger -from .typedefs import PathLike # noqa - -__all__ = ("BasicAuth", "ChainMapProxy") - -PY_36 = sys.version_info >= (3, 6) -PY_37 = sys.version_info >= (3, 7) -PY_38 = sys.version_info >= (3, 8) - -if not PY_37: - import idna_ssl - - idna_ssl.patch_match_hostname() - -try: - from typing import ContextManager -except ImportError: - from typing_extensions import ContextManager - - -def all_tasks( - loop: Optional[asyncio.AbstractEventLoop] = None, -) -> Set["asyncio.Task[Any]"]: - tasks = list(asyncio.Task.all_tasks(loop)) - return {t for t in tasks if not t.done()} - - -if PY_37: - all_tasks = getattr(asyncio, "all_tasks") - - -_T = TypeVar("_T") -_S = TypeVar("_S") - - -sentinel = object() # type: Any -NO_EXTENSIONS = bool(os.environ.get("AIOHTTP_NO_EXTENSIONS")) # type: bool - -# N.B. sys.flags.dev_mode is available on Python 3.7+, use getattr -# for compatibility with older versions -DEBUG = getattr(sys.flags, "dev_mode", False) or ( - not sys.flags.ignore_environment and bool(os.environ.get("PYTHONASYNCIODEBUG")) -) # type: bool - - -CHAR = {chr(i) for i in range(0, 128)} -CTL = {chr(i) for i in range(0, 32)} | { - chr(127), -} -SEPARATORS = { - "(", - ")", - "<", - ">", - "@", - ",", - ";", - ":", - "\\", - '"', - "/", - "[", - "]", - "?", - "=", - "{", - "}", - " ", - chr(9), -} -TOKEN = CHAR ^ CTL ^ SEPARATORS - - -class noop: - def __await__(self) -> Generator[None, None, None]: - yield - - -class BasicAuth(namedtuple("BasicAuth", ["login", "password", "encoding"])): - """Http basic authentication helper.""" - - def __new__( - cls, login: str, password: str = "", encoding: str = "latin1" - ) -> "BasicAuth": - if login is None: - raise ValueError("None is not allowed as login value") - - if password is None: - raise ValueError("None is not allowed as password value") - - if ":" in login: - raise ValueError('A ":" is not allowed in login (RFC 1945#section-11.1)') - - return super().__new__(cls, login, password, encoding) - - @classmethod - def decode(cls, auth_header: str, encoding: str = "latin1") -> "BasicAuth": - """Create a BasicAuth object from an Authorization HTTP header.""" - try: - auth_type, encoded_credentials = auth_header.split(" ", 1) - except ValueError: - raise ValueError("Could not parse authorization header.") - - if auth_type.lower() != "basic": - raise ValueError("Unknown authorization method %s" % auth_type) - - try: - decoded = base64.b64decode( - encoded_credentials.encode("ascii"), validate=True - ).decode(encoding) - except binascii.Error: - raise ValueError("Invalid base64 encoding.") - - try: - # RFC 2617 HTTP Authentication - # https://www.ietf.org/rfc/rfc2617.txt - # the colon must be present, but the username and password may be - # otherwise blank. - username, password = decoded.split(":", 1) - except ValueError: - raise ValueError("Invalid credentials.") - - return cls(username, password, encoding=encoding) - - @classmethod - def from_url(cls, url: URL, *, encoding: str = "latin1") -> Optional["BasicAuth"]: - """Create BasicAuth from url.""" - if not isinstance(url, URL): - raise TypeError("url should be yarl.URL instance") - if url.user is None: - return None - return cls(url.user, url.password or "", encoding=encoding) - - def encode(self) -> str: - """Encode credentials.""" - creds = (f"{self.login}:{self.password}").encode(self.encoding) - return "Basic %s" % base64.b64encode(creds).decode(self.encoding) - - -def strip_auth_from_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]: - auth = BasicAuth.from_url(url) - if auth is None: - return url, None - else: - return url.with_user(None), auth - - -def netrc_from_env() -> Optional[netrc.netrc]: - """Attempt to load the netrc file from the path specified by the env-var - NETRC or in the default location in the user's home directory. - - Returns None if it couldn't be found or fails to parse. - """ - netrc_env = os.environ.get("NETRC") - - if netrc_env is not None: - netrc_path = Path(netrc_env) - else: - try: - home_dir = Path.home() - except RuntimeError as e: # pragma: no cover - # if pathlib can't resolve home, it may raise a RuntimeError - client_logger.debug( - "Could not resolve home directory when " - "trying to look for .netrc file: %s", - e, - ) - return None - - netrc_path = home_dir / ( - "_netrc" if platform.system() == "Windows" else ".netrc" - ) - - try: - return netrc.netrc(str(netrc_path)) - except netrc.NetrcParseError as e: - client_logger.warning("Could not parse .netrc file: %s", e) - except OSError as e: - # we couldn't read the file (doesn't exist, permissions, etc.) - if netrc_env or netrc_path.is_file(): - # only warn if the environment wanted us to load it, - # or it appears like the default file does actually exist - client_logger.warning("Could not read .netrc file: %s", e) - - return None - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class ProxyInfo: - proxy: URL - proxy_auth: Optional[BasicAuth] - - -def proxies_from_env() -> Dict[str, ProxyInfo]: - proxy_urls = {k: URL(v) for k, v in getproxies().items() if k in ("http", "https")} - netrc_obj = netrc_from_env() - stripped = {k: strip_auth_from_url(v) for k, v in proxy_urls.items()} - ret = {} - for proto, val in stripped.items(): - proxy, auth = val - if proxy.scheme == "https": - client_logger.warning("HTTPS proxies %s are not supported, ignoring", proxy) - continue - if netrc_obj and auth is None: - auth_from_netrc = None - if proxy.host is not None: - auth_from_netrc = netrc_obj.authenticators(proxy.host) - if auth_from_netrc is not None: - # auth_from_netrc is a (`user`, `account`, `password`) tuple, - # `user` and `account` both can be username, - # if `user` is None, use `account` - *logins, password = auth_from_netrc - login = logins[0] if logins[0] else logins[-1] - auth = BasicAuth(cast(str, login), cast(str, password)) - ret[proto] = ProxyInfo(proxy, auth) - return ret - - -def current_task( - loop: Optional[asyncio.AbstractEventLoop] = None, -) -> "Optional[asyncio.Task[Any]]": - if PY_37: - return asyncio.current_task(loop=loop) - else: - return asyncio.Task.current_task(loop=loop) - - -def get_running_loop( - loop: Optional[asyncio.AbstractEventLoop] = None, -) -> asyncio.AbstractEventLoop: - if loop is None: - loop = asyncio.get_event_loop() - if not loop.is_running(): - warnings.warn( - "The object should be created within an async function", - DeprecationWarning, - stacklevel=3, - ) - if loop.get_debug(): - internal_logger.warning( - "The object should be created within an async function", stack_info=True - ) - return loop - - -def isasyncgenfunction(obj: Any) -> bool: - func = getattr(inspect, "isasyncgenfunction", None) - if func is not None: - return func(obj) - else: - return False - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class MimeType: - type: str - subtype: str - suffix: str - parameters: "MultiDictProxy[str]" - - -@functools.lru_cache(maxsize=56) -def parse_mimetype(mimetype: str) -> MimeType: - """Parses a MIME type into its components. - - mimetype is a MIME type string. - - Returns a MimeType object. - - Example: - - >>> parse_mimetype('text/html; charset=utf-8') - MimeType(type='text', subtype='html', suffix='', - parameters={'charset': 'utf-8'}) - - """ - if not mimetype: - return MimeType( - type="", subtype="", suffix="", parameters=MultiDictProxy(MultiDict()) - ) - - parts = mimetype.split(";") - params = MultiDict() # type: MultiDict[str] - for item in parts[1:]: - if not item: - continue - key, value = cast( - Tuple[str, str], item.split("=", 1) if "=" in item else (item, "") - ) - params.add(key.lower().strip(), value.strip(' "')) - - fulltype = parts[0].strip().lower() - if fulltype == "*": - fulltype = "*/*" - - mtype, stype = ( - cast(Tuple[str, str], fulltype.split("/", 1)) - if "/" in fulltype - else (fulltype, "") - ) - stype, suffix = ( - cast(Tuple[str, str], stype.split("+", 1)) if "+" in stype else (stype, "") - ) - - return MimeType( - type=mtype, subtype=stype, suffix=suffix, parameters=MultiDictProxy(params) - ) - - -def guess_filename(obj: Any, default: Optional[str] = None) -> Optional[str]: - name = getattr(obj, "name", None) - if name and isinstance(name, str) and name[0] != "<" and name[-1] != ">": - return Path(name).name - return default - - -def content_disposition_header( - disptype: str, quote_fields: bool = True, **params: str -) -> str: - """Sets ``Content-Disposition`` header. - - disptype is a disposition type: inline, attachment, form-data. - Should be valid extension token (see RFC 2183) - - params is a dict with disposition params. - """ - if not disptype or not (TOKEN > set(disptype)): - raise ValueError("bad content disposition type {!r}" "".format(disptype)) - - value = disptype - if params: - lparams = [] - for key, val in params.items(): - if not key or not (TOKEN > set(key)): - raise ValueError( - "bad content disposition parameter" " {!r}={!r}".format(key, val) - ) - qval = quote(val, "") if quote_fields else val - lparams.append((key, '"%s"' % qval)) - if key == "filename": - lparams.append(("filename*", "utf-8''" + qval)) - sparams = "; ".join("=".join(pair) for pair in lparams) - value = "; ".join((value, sparams)) - return value - - -class _TSelf(Protocol): - _cache: Dict[str, Any] - - -class reify(Generic[_T]): - """Use as a class method decorator. It operates almost exactly like - the Python `@property` decorator, but it puts the result of the - method it decorates into the instance dict after the first call, - effectively replacing the function it decorates with an instance - variable. It is, in Python parlance, a data descriptor. - - """ - - def __init__(self, wrapped: Callable[..., _T]) -> None: - self.wrapped = wrapped - self.__doc__ = wrapped.__doc__ - self.name = wrapped.__name__ - - def __get__(self, inst: _TSelf, owner: Optional[Type[Any]] = None) -> _T: - try: - try: - return inst._cache[self.name] - except KeyError: - val = self.wrapped(inst) - inst._cache[self.name] = val - return val - except AttributeError: - if inst is None: - return self - raise - - def __set__(self, inst: _TSelf, value: _T) -> None: - raise AttributeError("reified property is read-only") - - -reify_py = reify - -try: - from ._helpers import reify as reify_c - - if not NO_EXTENSIONS: - reify = reify_c # type: ignore -except ImportError: - pass - -_ipv4_pattern = ( - r"^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}" - r"(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$" -) -_ipv6_pattern = ( - r"^(?:(?:(?:[A-F0-9]{1,4}:){6}|(?=(?:[A-F0-9]{0,4}:){0,6}" - r"(?:[0-9]{1,3}\.){3}[0-9]{1,3}$)(([0-9A-F]{1,4}:){0,5}|:)" - r"((:[0-9A-F]{1,4}){1,5}:|:)|::(?:[A-F0-9]{1,4}:){5})" - r"(?:(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])\.){3}" - r"(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])|(?:[A-F0-9]{1,4}:){7}" - r"[A-F0-9]{1,4}|(?=(?:[A-F0-9]{0,4}:){0,7}[A-F0-9]{0,4}$)" - r"(([0-9A-F]{1,4}:){1,7}|:)((:[0-9A-F]{1,4}){1,7}|:)|(?:[A-F0-9]{1,4}:){7}" - r":|:(:[A-F0-9]{1,4}){7})$" -) -_ipv4_regex = re.compile(_ipv4_pattern) -_ipv6_regex = re.compile(_ipv6_pattern, flags=re.IGNORECASE) -_ipv4_regexb = re.compile(_ipv4_pattern.encode("ascii")) -_ipv6_regexb = re.compile(_ipv6_pattern.encode("ascii"), flags=re.IGNORECASE) - - -def _is_ip_address( - regex: Pattern[str], regexb: Pattern[bytes], host: Optional[Union[str, bytes]] -) -> bool: - if host is None: - return False - if isinstance(host, str): - return bool(regex.match(host)) - elif isinstance(host, (bytes, bytearray, memoryview)): - return bool(regexb.match(host)) - else: - raise TypeError("{} [{}] is not a str or bytes".format(host, type(host))) - - -is_ipv4_address = functools.partial(_is_ip_address, _ipv4_regex, _ipv4_regexb) -is_ipv6_address = functools.partial(_is_ip_address, _ipv6_regex, _ipv6_regexb) - - -def is_ip_address(host: Optional[Union[str, bytes, bytearray, memoryview]]) -> bool: - return is_ipv4_address(host) or is_ipv6_address(host) - - -def next_whole_second() -> datetime.datetime: - """Return current time rounded up to the next whole second.""" - return datetime.datetime.now(datetime.timezone.utc).replace( - microsecond=0 - ) + datetime.timedelta(seconds=0) - - -_cached_current_datetime = None # type: Optional[int] -_cached_formatted_datetime = "" - - -def rfc822_formatted_time() -> str: - global _cached_current_datetime - global _cached_formatted_datetime - - now = int(time.time()) - if now != _cached_current_datetime: - # Weekday and month names for HTTP date/time formatting; - # always English! - # Tuples are constants stored in codeobject! - _weekdayname = ("Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun") - _monthname = ( - "", # Dummy so we can use 1-based month numbers - "Jan", - "Feb", - "Mar", - "Apr", - "May", - "Jun", - "Jul", - "Aug", - "Sep", - "Oct", - "Nov", - "Dec", - ) - - year, month, day, hh, mm, ss, wd, *tail = time.gmtime(now) - _cached_formatted_datetime = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % ( - _weekdayname[wd], - day, - _monthname[month], - year, - hh, - mm, - ss, - ) - _cached_current_datetime = now - return _cached_formatted_datetime - - -def _weakref_handle(info): # type: ignore - ref, name = info - ob = ref() - if ob is not None: - with suppress(Exception): - getattr(ob, name)() - - -def weakref_handle(ob, name, timeout, loop): # type: ignore - if timeout is not None and timeout > 0: - when = loop.time() + timeout - if timeout >= 5: - when = ceil(when) - - return loop.call_at(when, _weakref_handle, (weakref.ref(ob), name)) - - -def call_later(cb, timeout, loop): # type: ignore - if timeout is not None and timeout > 0: - when = loop.time() + timeout - if timeout > 5: - when = ceil(when) - return loop.call_at(when, cb) - - -class TimeoutHandle: - """ Timeout handle """ - - def __init__( - self, loop: asyncio.AbstractEventLoop, timeout: Optional[float] - ) -> None: - self._timeout = timeout - self._loop = loop - self._callbacks = ( - [] - ) # type: List[Tuple[Callable[..., None], Tuple[Any, ...], Dict[str, Any]]] - - def register( - self, callback: Callable[..., None], *args: Any, **kwargs: Any - ) -> None: - self._callbacks.append((callback, args, kwargs)) - - def close(self) -> None: - self._callbacks.clear() - - def start(self) -> Optional[asyncio.Handle]: - timeout = self._timeout - if timeout is not None and timeout > 0: - when = self._loop.time() + timeout - if timeout >= 5: - when = ceil(when) - return self._loop.call_at(when, self.__call__) - else: - return None - - def timer(self) -> "BaseTimerContext": - if self._timeout is not None and self._timeout > 0: - timer = TimerContext(self._loop) - self.register(timer.timeout) - return timer - else: - return TimerNoop() - - def __call__(self) -> None: - for cb, args, kwargs in self._callbacks: - with suppress(Exception): - cb(*args, **kwargs) - - self._callbacks.clear() - - -class BaseTimerContext(ContextManager["BaseTimerContext"]): - pass - - -class TimerNoop(BaseTimerContext): - def __enter__(self) -> BaseTimerContext: - return self - - def __exit__( - self, - exc_type: Optional[Type[BaseException]], - exc_val: Optional[BaseException], - exc_tb: Optional[TracebackType], - ) -> None: - return - - -class TimerContext(BaseTimerContext): - """ Low resolution timeout context manager """ - - def __init__(self, loop: asyncio.AbstractEventLoop) -> None: - self._loop = loop - self._tasks = [] # type: List[asyncio.Task[Any]] - self._cancelled = False - - def __enter__(self) -> BaseTimerContext: - task = current_task(loop=self._loop) - - if task is None: - raise RuntimeError( - "Timeout context manager should be used " "inside a task" - ) - - if self._cancelled: - task.cancel() - raise asyncio.TimeoutError from None - - self._tasks.append(task) - return self - - def __exit__( - self, - exc_type: Optional[Type[BaseException]], - exc_val: Optional[BaseException], - exc_tb: Optional[TracebackType], - ) -> Optional[bool]: - if self._tasks: - self._tasks.pop() - - if exc_type is asyncio.CancelledError and self._cancelled: - raise asyncio.TimeoutError from None - return None - - def timeout(self) -> None: - if not self._cancelled: - for task in set(self._tasks): - task.cancel() - - self._cancelled = True - - -class CeilTimeout(async_timeout.timeout): - def __enter__(self) -> async_timeout.timeout: - if self._timeout is not None: - self._task = current_task(loop=self._loop) - if self._task is None: - raise RuntimeError( - "Timeout context manager should be used inside a task" - ) - now = self._loop.time() - delay = self._timeout - when = now + delay - if delay > 5: - when = ceil(when) - self._cancel_handler = self._loop.call_at(when, self._cancel_task) - return self - - -class HeadersMixin: - - ATTRS = frozenset(["_content_type", "_content_dict", "_stored_content_type"]) - - _content_type = None # type: Optional[str] - _content_dict = None # type: Optional[Dict[str, str]] - _stored_content_type = sentinel - - def _parse_content_type(self, raw: str) -> None: - self._stored_content_type = raw - if raw is None: - # default value according to RFC 2616 - self._content_type = "application/octet-stream" - self._content_dict = {} - else: - self._content_type, self._content_dict = cgi.parse_header(raw) - - @property - def content_type(self) -> str: - """The value of content part for Content-Type HTTP header.""" - raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore - if self._stored_content_type != raw: - self._parse_content_type(raw) - return self._content_type # type: ignore - - @property - def charset(self) -> Optional[str]: - """The value of charset part for Content-Type HTTP header.""" - raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore - if self._stored_content_type != raw: - self._parse_content_type(raw) - return self._content_dict.get("charset") # type: ignore - - @property - def content_length(self) -> Optional[int]: - """The value of Content-Length HTTP header.""" - content_length = self._headers.get(hdrs.CONTENT_LENGTH) # type: ignore - - if content_length is not None: - return int(content_length) - else: - return None - - -def set_result(fut: "asyncio.Future[_T]", result: _T) -> None: - if not fut.done(): - fut.set_result(result) - - -def set_exception(fut: "asyncio.Future[_T]", exc: BaseException) -> None: - if not fut.done(): - fut.set_exception(exc) - - -class ChainMapProxy(Mapping[str, Any]): - __slots__ = ("_maps",) - - def __init__(self, maps: Iterable[Mapping[str, Any]]) -> None: - self._maps = tuple(maps) - - def __init_subclass__(cls) -> None: - raise TypeError( - "Inheritance class {} from ChainMapProxy " - "is forbidden".format(cls.__name__) - ) - - def __getitem__(self, key: str) -> Any: - for mapping in self._maps: - try: - return mapping[key] - except KeyError: - pass - raise KeyError(key) - - def get(self, key: str, default: Any = None) -> Any: - return self[key] if key in self else default - - def __len__(self) -> int: - # reuses stored hash values if possible - return len(set().union(*self._maps)) # type: ignore - - def __iter__(self) -> Iterator[str]: - d = {} # type: Dict[str, Any] - for mapping in reversed(self._maps): - # reuses stored hash values if possible - d.update(mapping) - return iter(d) - - def __contains__(self, key: object) -> bool: - return any(key in m for m in self._maps) - - def __bool__(self) -> bool: - return any(self._maps) - - def __repr__(self) -> str: - content = ", ".join(map(repr, self._maps)) - return f"ChainMapProxy({content})" diff --git a/third_party/python/aiohttp/aiohttp/http.py b/third_party/python/aiohttp/aiohttp/http.py deleted file mode 100644 index 415ffbf563bc..000000000000 --- a/third_party/python/aiohttp/aiohttp/http.py +++ /dev/null @@ -1,72 +0,0 @@ -import http.server -import sys -from typing import Mapping, Tuple - -from . import __version__ -from .http_exceptions import HttpProcessingError as HttpProcessingError -from .http_parser import ( - HeadersParser as HeadersParser, - HttpParser as HttpParser, - HttpRequestParser as HttpRequestParser, - HttpResponseParser as HttpResponseParser, - RawRequestMessage as RawRequestMessage, - RawResponseMessage as RawResponseMessage, -) -from .http_websocket import ( - WS_CLOSED_MESSAGE as WS_CLOSED_MESSAGE, - WS_CLOSING_MESSAGE as WS_CLOSING_MESSAGE, - WS_KEY as WS_KEY, - WebSocketError as WebSocketError, - WebSocketReader as WebSocketReader, - WebSocketWriter as WebSocketWriter, - WSCloseCode as WSCloseCode, - WSMessage as WSMessage, - WSMsgType as WSMsgType, - ws_ext_gen as ws_ext_gen, - ws_ext_parse as ws_ext_parse, -) -from .http_writer import ( - HttpVersion as HttpVersion, - HttpVersion10 as HttpVersion10, - HttpVersion11 as HttpVersion11, - StreamWriter as StreamWriter, -) - -__all__ = ( - "HttpProcessingError", - "RESPONSES", - "SERVER_SOFTWARE", - # .http_writer - "StreamWriter", - "HttpVersion", - "HttpVersion10", - "HttpVersion11", - # .http_parser - "HeadersParser", - "HttpParser", - "HttpRequestParser", - "HttpResponseParser", - "RawRequestMessage", - "RawResponseMessage", - # .http_websocket - "WS_CLOSED_MESSAGE", - "WS_CLOSING_MESSAGE", - "WS_KEY", - "WebSocketReader", - "WebSocketWriter", - "ws_ext_gen", - "ws_ext_parse", - "WSMessage", - "WebSocketError", - "WSMsgType", - "WSCloseCode", -) - - -SERVER_SOFTWARE = "Python/{0[0]}.{0[1]} aiohttp/{1}".format( - sys.version_info, __version__ -) # type: str - -RESPONSES = ( - http.server.BaseHTTPRequestHandler.responses -) # type: Mapping[int, Tuple[str, str]] diff --git a/third_party/python/aiohttp/aiohttp/http_exceptions.py b/third_party/python/aiohttp/aiohttp/http_exceptions.py deleted file mode 100644 index c885f80f3220..000000000000 --- a/third_party/python/aiohttp/aiohttp/http_exceptions.py +++ /dev/null @@ -1,105 +0,0 @@ -"""Low-level http related exceptions.""" - - -from typing import Optional, Union - -from .typedefs import _CIMultiDict - -__all__ = ("HttpProcessingError",) - - -class HttpProcessingError(Exception): - """HTTP error. - - Shortcut for raising HTTP errors with custom code, message and headers. - - code: HTTP Error code. - message: (optional) Error message. - headers: (optional) Headers to be sent in response, a list of pairs - """ - - code = 0 - message = "" - headers = None - - def __init__( - self, - *, - code: Optional[int] = None, - message: str = "", - headers: Optional[_CIMultiDict] = None, - ) -> None: - if code is not None: - self.code = code - self.headers = headers - self.message = message - - def __str__(self) -> str: - return f"{self.code}, message={self.message!r}" - - def __repr__(self) -> str: - return f"<{self.__class__.__name__}: {self}>" - - -class BadHttpMessage(HttpProcessingError): - - code = 400 - message = "Bad Request" - - def __init__(self, message: str, *, headers: Optional[_CIMultiDict] = None) -> None: - super().__init__(message=message, headers=headers) - self.args = (message,) - - -class HttpBadRequest(BadHttpMessage): - - code = 400 - message = "Bad Request" - - -class PayloadEncodingError(BadHttpMessage): - """Base class for payload errors""" - - -class ContentEncodingError(PayloadEncodingError): - """Content encoding error.""" - - -class TransferEncodingError(PayloadEncodingError): - """transfer encoding error.""" - - -class ContentLengthError(PayloadEncodingError): - """Not enough data for satisfy content length header.""" - - -class LineTooLong(BadHttpMessage): - def __init__( - self, line: str, limit: str = "Unknown", actual_size: str = "Unknown" - ) -> None: - super().__init__( - f"Got more than {limit} bytes ({actual_size}) when reading {line}." - ) - self.args = (line, limit, actual_size) - - -class InvalidHeader(BadHttpMessage): - def __init__(self, hdr: Union[bytes, str]) -> None: - if isinstance(hdr, bytes): - hdr = hdr.decode("utf-8", "surrogateescape") - super().__init__(f"Invalid HTTP Header: {hdr}") - self.hdr = hdr - self.args = (hdr,) - - -class BadStatusLine(BadHttpMessage): - def __init__(self, line: str = "") -> None: - if not isinstance(line, str): - line = repr(line) - super().__init__(f"Bad status line {line!r}") - self.args = (line,) - self.line = line - - -class InvalidURLError(BadHttpMessage): - pass diff --git a/third_party/python/aiohttp/aiohttp/http_parser.py b/third_party/python/aiohttp/aiohttp/http_parser.py deleted file mode 100644 index 71ba815ae67b..000000000000 --- a/third_party/python/aiohttp/aiohttp/http_parser.py +++ /dev/null @@ -1,901 +0,0 @@ -import abc -import asyncio -import collections -import re -import string -import zlib -from enum import IntEnum -from typing import Any, List, Optional, Tuple, Type, Union - -from multidict import CIMultiDict, CIMultiDictProxy, istr -from yarl import URL - -from . import hdrs -from .base_protocol import BaseProtocol -from .helpers import NO_EXTENSIONS, BaseTimerContext -from .http_exceptions import ( - BadStatusLine, - ContentEncodingError, - ContentLengthError, - InvalidHeader, - LineTooLong, - TransferEncodingError, -) -from .http_writer import HttpVersion, HttpVersion10 -from .log import internal_logger -from .streams import EMPTY_PAYLOAD, StreamReader -from .typedefs import RawHeaders - -try: - import brotli - - HAS_BROTLI = True -except ImportError: # pragma: no cover - HAS_BROTLI = False - - -__all__ = ( - "HeadersParser", - "HttpParser", - "HttpRequestParser", - "HttpResponseParser", - "RawRequestMessage", - "RawResponseMessage", -) - -ASCIISET = set(string.printable) - -# See https://tools.ietf.org/html/rfc7230#section-3.1.1 -# and https://tools.ietf.org/html/rfc7230#appendix-B -# -# method = token -# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" / "+" / "-" / "." / -# "^" / "_" / "`" / "|" / "~" / DIGIT / ALPHA -# token = 1*tchar -METHRE = re.compile(r"[!#$%&'*+\-.^_`|~0-9A-Za-z]+") -VERSRE = re.compile(r"HTTP/(\d+).(\d+)") -HDRRE = re.compile(rb"[\x00-\x1F\x7F()<>@,;:\[\]={} \t\\\\\"]") - -RawRequestMessage = collections.namedtuple( - "RawRequestMessage", - [ - "method", - "path", - "version", - "headers", - "raw_headers", - "should_close", - "compression", - "upgrade", - "chunked", - "url", - ], -) - -RawResponseMessage = collections.namedtuple( - "RawResponseMessage", - [ - "version", - "code", - "reason", - "headers", - "raw_headers", - "should_close", - "compression", - "upgrade", - "chunked", - ], -) - - -class ParseState(IntEnum): - - PARSE_NONE = 0 - PARSE_LENGTH = 1 - PARSE_CHUNKED = 2 - PARSE_UNTIL_EOF = 3 - - -class ChunkState(IntEnum): - PARSE_CHUNKED_SIZE = 0 - PARSE_CHUNKED_CHUNK = 1 - PARSE_CHUNKED_CHUNK_EOF = 2 - PARSE_MAYBE_TRAILERS = 3 - PARSE_TRAILERS = 4 - - -class HeadersParser: - def __init__( - self, - max_line_size: int = 8190, - max_headers: int = 32768, - max_field_size: int = 8190, - ) -> None: - self.max_line_size = max_line_size - self.max_headers = max_headers - self.max_field_size = max_field_size - - def parse_headers( - self, lines: List[bytes] - ) -> Tuple["CIMultiDictProxy[str]", RawHeaders]: - headers = CIMultiDict() # type: CIMultiDict[str] - raw_headers = [] - - lines_idx = 1 - line = lines[1] - line_count = len(lines) - - while line: - # Parse initial header name : value pair. - try: - bname, bvalue = line.split(b":", 1) - except ValueError: - raise InvalidHeader(line) from None - - bname = bname.strip(b" \t") - bvalue = bvalue.lstrip() - if HDRRE.search(bname): - raise InvalidHeader(bname) - if len(bname) > self.max_field_size: - raise LineTooLong( - "request header name {}".format( - bname.decode("utf8", "xmlcharrefreplace") - ), - str(self.max_field_size), - str(len(bname)), - ) - - header_length = len(bvalue) - - # next line - lines_idx += 1 - line = lines[lines_idx] - - # consume continuation lines - continuation = line and line[0] in (32, 9) # (' ', '\t') - - if continuation: - bvalue_lst = [bvalue] - while continuation: - header_length += len(line) - if header_length > self.max_field_size: - raise LineTooLong( - "request header field {}".format( - bname.decode("utf8", "xmlcharrefreplace") - ), - str(self.max_field_size), - str(header_length), - ) - bvalue_lst.append(line) - - # next line - lines_idx += 1 - if lines_idx < line_count: - line = lines[lines_idx] - if line: - continuation = line[0] in (32, 9) # (' ', '\t') - else: - line = b"" - break - bvalue = b"".join(bvalue_lst) - else: - if header_length > self.max_field_size: - raise LineTooLong( - "request header field {}".format( - bname.decode("utf8", "xmlcharrefreplace") - ), - str(self.max_field_size), - str(header_length), - ) - - bvalue = bvalue.strip() - name = bname.decode("utf-8", "surrogateescape") - value = bvalue.decode("utf-8", "surrogateescape") - - headers.add(name, value) - raw_headers.append((bname, bvalue)) - - return (CIMultiDictProxy(headers), tuple(raw_headers)) - - -class HttpParser(abc.ABC): - def __init__( - self, - protocol: Optional[BaseProtocol] = None, - loop: Optional[asyncio.AbstractEventLoop] = None, - limit: int = 2 ** 16, - max_line_size: int = 8190, - max_headers: int = 32768, - max_field_size: int = 8190, - timer: Optional[BaseTimerContext] = None, - code: Optional[int] = None, - method: Optional[str] = None, - readall: bool = False, - payload_exception: Optional[Type[BaseException]] = None, - response_with_body: bool = True, - read_until_eof: bool = False, - auto_decompress: bool = True, - ) -> None: - self.protocol = protocol - self.loop = loop - self.max_line_size = max_line_size - self.max_headers = max_headers - self.max_field_size = max_field_size - self.timer = timer - self.code = code - self.method = method - self.readall = readall - self.payload_exception = payload_exception - self.response_with_body = response_with_body - self.read_until_eof = read_until_eof - - self._lines = [] # type: List[bytes] - self._tail = b"" - self._upgraded = False - self._payload = None - self._payload_parser = None # type: Optional[HttpPayloadParser] - self._auto_decompress = auto_decompress - self._limit = limit - self._headers_parser = HeadersParser(max_line_size, max_headers, max_field_size) - - @abc.abstractmethod - def parse_message(self, lines: List[bytes]) -> Any: - pass - - def feed_eof(self) -> Any: - if self._payload_parser is not None: - self._payload_parser.feed_eof() - self._payload_parser = None - else: - # try to extract partial message - if self._tail: - self._lines.append(self._tail) - - if self._lines: - if self._lines[-1] != "\r\n": - self._lines.append(b"") - try: - return self.parse_message(self._lines) - except Exception: - return None - - def feed_data( - self, - data: bytes, - SEP: bytes = b"\r\n", - EMPTY: bytes = b"", - CONTENT_LENGTH: istr = hdrs.CONTENT_LENGTH, - METH_CONNECT: str = hdrs.METH_CONNECT, - SEC_WEBSOCKET_KEY1: istr = hdrs.SEC_WEBSOCKET_KEY1, - ) -> Tuple[List[Any], bool, bytes]: - - messages = [] - - if self._tail: - data, self._tail = self._tail + data, b"" - - data_len = len(data) - start_pos = 0 - loop = self.loop - - while start_pos < data_len: - - # read HTTP message (request/response line + headers), \r\n\r\n - # and split by lines - if self._payload_parser is None and not self._upgraded: - pos = data.find(SEP, start_pos) - # consume \r\n - if pos == start_pos and not self._lines: - start_pos = pos + 2 - continue - - if pos >= start_pos: - # line found - self._lines.append(data[start_pos:pos]) - start_pos = pos + 2 - - # \r\n\r\n found - if self._lines[-1] == EMPTY: - try: - msg = self.parse_message(self._lines) - finally: - self._lines.clear() - - # payload length - length = msg.headers.get(CONTENT_LENGTH) - if length is not None: - try: - length = int(length) - except ValueError: - raise InvalidHeader(CONTENT_LENGTH) - if length < 0: - raise InvalidHeader(CONTENT_LENGTH) - - # do not support old websocket spec - if SEC_WEBSOCKET_KEY1 in msg.headers: - raise InvalidHeader(SEC_WEBSOCKET_KEY1) - - self._upgraded = msg.upgrade - - method = getattr(msg, "method", self.method) - - assert self.protocol is not None - # calculate payload - if ( - (length is not None and length > 0) - or msg.chunked - and not msg.upgrade - ): - payload = StreamReader( - self.protocol, - timer=self.timer, - loop=loop, - limit=self._limit, - ) - payload_parser = HttpPayloadParser( - payload, - length=length, - chunked=msg.chunked, - method=method, - compression=msg.compression, - code=self.code, - readall=self.readall, - response_with_body=self.response_with_body, - auto_decompress=self._auto_decompress, - ) - if not payload_parser.done: - self._payload_parser = payload_parser - elif method == METH_CONNECT: - payload = StreamReader( - self.protocol, - timer=self.timer, - loop=loop, - limit=self._limit, - ) - self._upgraded = True - self._payload_parser = HttpPayloadParser( - payload, - method=msg.method, - compression=msg.compression, - readall=True, - auto_decompress=self._auto_decompress, - ) - else: - if ( - getattr(msg, "code", 100) >= 199 - and length is None - and self.read_until_eof - ): - payload = StreamReader( - self.protocol, - timer=self.timer, - loop=loop, - limit=self._limit, - ) - payload_parser = HttpPayloadParser( - payload, - length=length, - chunked=msg.chunked, - method=method, - compression=msg.compression, - code=self.code, - readall=True, - response_with_body=self.response_with_body, - auto_decompress=self._auto_decompress, - ) - if not payload_parser.done: - self._payload_parser = payload_parser - else: - payload = EMPTY_PAYLOAD # type: ignore - - messages.append((msg, payload)) - else: - self._tail = data[start_pos:] - data = EMPTY - break - - # no parser, just store - elif self._payload_parser is None and self._upgraded: - assert not self._lines - break - - # feed payload - elif data and start_pos < data_len: - assert not self._lines - assert self._payload_parser is not None - try: - eof, data = self._payload_parser.feed_data(data[start_pos:]) - except BaseException as exc: - if self.payload_exception is not None: - self._payload_parser.payload.set_exception( - self.payload_exception(str(exc)) - ) - else: - self._payload_parser.payload.set_exception(exc) - - eof = True - data = b"" - - if eof: - start_pos = 0 - data_len = len(data) - self._payload_parser = None - continue - else: - break - - if data and start_pos < data_len: - data = data[start_pos:] - else: - data = EMPTY - - return messages, self._upgraded, data - - def parse_headers( - self, lines: List[bytes] - ) -> Tuple[ - "CIMultiDictProxy[str]", RawHeaders, Optional[bool], Optional[str], bool, bool - ]: - """Parses RFC 5322 headers from a stream. - - Line continuations are supported. Returns list of header name - and value pairs. Header name is in upper case. - """ - headers, raw_headers = self._headers_parser.parse_headers(lines) - close_conn = None - encoding = None - upgrade = False - chunked = False - - # keep-alive - conn = headers.get(hdrs.CONNECTION) - if conn: - v = conn.lower() - if v == "close": - close_conn = True - elif v == "keep-alive": - close_conn = False - elif v == "upgrade": - upgrade = True - - # encoding - enc = headers.get(hdrs.CONTENT_ENCODING) - if enc: - enc = enc.lower() - if enc in ("gzip", "deflate", "br"): - encoding = enc - - # chunking - te = headers.get(hdrs.TRANSFER_ENCODING) - if te and "chunked" in te.lower(): - chunked = True - - return (headers, raw_headers, close_conn, encoding, upgrade, chunked) - - def set_upgraded(self, val: bool) -> None: - """Set connection upgraded (to websocket) mode. - :param bool val: new state. - """ - self._upgraded = val - - -class HttpRequestParser(HttpParser): - """Read request status line. Exception .http_exceptions.BadStatusLine - could be raised in case of any errors in status line. - Returns RawRequestMessage. - """ - - def parse_message(self, lines: List[bytes]) -> Any: - # request line - line = lines[0].decode("utf-8", "surrogateescape") - try: - method, path, version = line.split(None, 2) - except ValueError: - raise BadStatusLine(line) from None - - if len(path) > self.max_line_size: - raise LineTooLong( - "Status line is too long", str(self.max_line_size), str(len(path)) - ) - - path_part, _hash_separator, url_fragment = path.partition("#") - path_part, _question_mark_separator, qs_part = path_part.partition("?") - - # method - if not METHRE.match(method): - raise BadStatusLine(method) - - # version - try: - if version.startswith("HTTP/"): - n1, n2 = version[5:].split(".", 1) - version_o = HttpVersion(int(n1), int(n2)) - else: - raise BadStatusLine(version) - except Exception: - raise BadStatusLine(version) - - # read headers - ( - headers, - raw_headers, - close, - compression, - upgrade, - chunked, - ) = self.parse_headers(lines) - - if close is None: # then the headers weren't set in the request - if version_o <= HttpVersion10: # HTTP 1.0 must asks to not close - close = True - else: # HTTP 1.1 must ask to close. - close = False - - return RawRequestMessage( - method, - path, - version_o, - headers, - raw_headers, - close, - compression, - upgrade, - chunked, - # NOTE: `yarl.URL.build()` is used to mimic what the Cython-based - # NOTE: parser does, otherwise it results into the same - # NOTE: HTTP Request-Line input producing different - # NOTE: `yarl.URL()` objects - URL.build( - path=path_part, - query_string=qs_part, - fragment=url_fragment, - encoded=True, - ), - ) - - -class HttpResponseParser(HttpParser): - """Read response status line and headers. - - BadStatusLine could be raised in case of any errors in status line. - Returns RawResponseMessage""" - - def parse_message(self, lines: List[bytes]) -> Any: - line = lines[0].decode("utf-8", "surrogateescape") - try: - version, status = line.split(None, 1) - except ValueError: - raise BadStatusLine(line) from None - - try: - status, reason = status.split(None, 1) - except ValueError: - reason = "" - - if len(reason) > self.max_line_size: - raise LineTooLong( - "Status line is too long", str(self.max_line_size), str(len(reason)) - ) - - # version - match = VERSRE.match(version) - if match is None: - raise BadStatusLine(line) - version_o = HttpVersion(int(match.group(1)), int(match.group(2))) - - # The status code is a three-digit number - try: - status_i = int(status) - except ValueError: - raise BadStatusLine(line) from None - - if status_i > 999: - raise BadStatusLine(line) - - # read headers - ( - headers, - raw_headers, - close, - compression, - upgrade, - chunked, - ) = self.parse_headers(lines) - - if close is None: - close = version_o <= HttpVersion10 - - return RawResponseMessage( - version_o, - status_i, - reason.strip(), - headers, - raw_headers, - close, - compression, - upgrade, - chunked, - ) - - -class HttpPayloadParser: - def __init__( - self, - payload: StreamReader, - length: Optional[int] = None, - chunked: bool = False, - compression: Optional[str] = None, - code: Optional[int] = None, - method: Optional[str] = None, - readall: bool = False, - response_with_body: bool = True, - auto_decompress: bool = True, - ) -> None: - self._length = 0 - self._type = ParseState.PARSE_NONE - self._chunk = ChunkState.PARSE_CHUNKED_SIZE - self._chunk_size = 0 - self._chunk_tail = b"" - self._auto_decompress = auto_decompress - self.done = False - - # payload decompression wrapper - if response_with_body and compression and self._auto_decompress: - real_payload = DeflateBuffer( - payload, compression - ) # type: Union[StreamReader, DeflateBuffer] - else: - real_payload = payload - - # payload parser - if not response_with_body: - # don't parse payload if it's not expected to be received - self._type = ParseState.PARSE_NONE - real_payload.feed_eof() - self.done = True - - elif chunked: - self._type = ParseState.PARSE_CHUNKED - elif length is not None: - self._type = ParseState.PARSE_LENGTH - self._length = length - if self._length == 0: - real_payload.feed_eof() - self.done = True - else: - if readall and code != 204: - self._type = ParseState.PARSE_UNTIL_EOF - elif method in ("PUT", "POST"): - internal_logger.warning( # pragma: no cover - "Content-Length or Transfer-Encoding header is required" - ) - self._type = ParseState.PARSE_NONE - real_payload.feed_eof() - self.done = True - - self.payload = real_payload - - def feed_eof(self) -> None: - if self._type == ParseState.PARSE_UNTIL_EOF: - self.payload.feed_eof() - elif self._type == ParseState.PARSE_LENGTH: - raise ContentLengthError( - "Not enough data for satisfy content length header." - ) - elif self._type == ParseState.PARSE_CHUNKED: - raise TransferEncodingError( - "Not enough data for satisfy transfer length header." - ) - - def feed_data( - self, chunk: bytes, SEP: bytes = b"\r\n", CHUNK_EXT: bytes = b";" - ) -> Tuple[bool, bytes]: - # Read specified amount of bytes - if self._type == ParseState.PARSE_LENGTH: - required = self._length - chunk_len = len(chunk) - - if required >= chunk_len: - self._length = required - chunk_len - self.payload.feed_data(chunk, chunk_len) - if self._length == 0: - self.payload.feed_eof() - return True, b"" - else: - self._length = 0 - self.payload.feed_data(chunk[:required], required) - self.payload.feed_eof() - return True, chunk[required:] - - # Chunked transfer encoding parser - elif self._type == ParseState.PARSE_CHUNKED: - if self._chunk_tail: - chunk = self._chunk_tail + chunk - self._chunk_tail = b"" - - while chunk: - - # read next chunk size - if self._chunk == ChunkState.PARSE_CHUNKED_SIZE: - pos = chunk.find(SEP) - if pos >= 0: - i = chunk.find(CHUNK_EXT, 0, pos) - if i >= 0: - size_b = chunk[:i] # strip chunk-extensions - else: - size_b = chunk[:pos] - - try: - size = int(bytes(size_b), 16) - except ValueError: - exc = TransferEncodingError( - chunk[:pos].decode("ascii", "surrogateescape") - ) - self.payload.set_exception(exc) - raise exc from None - - chunk = chunk[pos + 2 :] - if size == 0: # eof marker - self._chunk = ChunkState.PARSE_MAYBE_TRAILERS - else: - self._chunk = ChunkState.PARSE_CHUNKED_CHUNK - self._chunk_size = size - self.payload.begin_http_chunk_receiving() - else: - self._chunk_tail = chunk - return False, b"" - - # read chunk and feed buffer - if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK: - required = self._chunk_size - chunk_len = len(chunk) - - if required > chunk_len: - self._chunk_size = required - chunk_len - self.payload.feed_data(chunk, chunk_len) - return False, b"" - else: - self._chunk_size = 0 - self.payload.feed_data(chunk[:required], required) - chunk = chunk[required:] - self._chunk = ChunkState.PARSE_CHUNKED_CHUNK_EOF - self.payload.end_http_chunk_receiving() - - # toss the CRLF at the end of the chunk - if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK_EOF: - if chunk[:2] == SEP: - chunk = chunk[2:] - self._chunk = ChunkState.PARSE_CHUNKED_SIZE - else: - self._chunk_tail = chunk - return False, b"" - - # if stream does not contain trailer, after 0\r\n - # we should get another \r\n otherwise - # trailers needs to be skiped until \r\n\r\n - if self._chunk == ChunkState.PARSE_MAYBE_TRAILERS: - head = chunk[:2] - if head == SEP: - # end of stream - self.payload.feed_eof() - return True, chunk[2:] - # Both CR and LF, or only LF may not be received yet. It is - # expected that CRLF or LF will be shown at the very first - # byte next time, otherwise trailers should come. The last - # CRLF which marks the end of response might not be - # contained in the same TCP segment which delivered the - # size indicator. - if not head: - return False, b"" - if head == SEP[:1]: - self._chunk_tail = head - return False, b"" - self._chunk = ChunkState.PARSE_TRAILERS - - # read and discard trailer up to the CRLF terminator - if self._chunk == ChunkState.PARSE_TRAILERS: - pos = chunk.find(SEP) - if pos >= 0: - chunk = chunk[pos + 2 :] - self._chunk = ChunkState.PARSE_MAYBE_TRAILERS - else: - self._chunk_tail = chunk - return False, b"" - - # Read all bytes until eof - elif self._type == ParseState.PARSE_UNTIL_EOF: - self.payload.feed_data(chunk, len(chunk)) - - return False, b"" - - -class DeflateBuffer: - """DeflateStream decompress stream and feed data into specified stream.""" - - def __init__(self, out: StreamReader, encoding: Optional[str]) -> None: - self.out = out - self.size = 0 - self.encoding = encoding - self._started_decoding = False - - if encoding == "br": - if not HAS_BROTLI: # pragma: no cover - raise ContentEncodingError( - "Can not decode content-encoding: brotli (br). " - "Please install `brotlipy`" - ) - self.decompressor = brotli.Decompressor() - else: - zlib_mode = 16 + zlib.MAX_WBITS if encoding == "gzip" else zlib.MAX_WBITS - self.decompressor = zlib.decompressobj(wbits=zlib_mode) - - def set_exception(self, exc: BaseException) -> None: - self.out.set_exception(exc) - - def feed_data(self, chunk: bytes, size: int) -> None: - if not size: - return - - self.size += size - - # RFC1950 - # bits 0..3 = CM = 0b1000 = 8 = "deflate" - # bits 4..7 = CINFO = 1..7 = windows size. - if ( - not self._started_decoding - and self.encoding == "deflate" - and chunk[0] & 0xF != 8 - ): - # Change the decoder to decompress incorrectly compressed data - # Actually we should issue a warning about non-RFC-compliant data. - self.decompressor = zlib.decompressobj(wbits=-zlib.MAX_WBITS) - - try: - chunk = self.decompressor.decompress(chunk) - except Exception: - raise ContentEncodingError( - "Can not decode content-encoding: %s" % self.encoding - ) - - self._started_decoding = True - - if chunk: - self.out.feed_data(chunk, len(chunk)) - - def feed_eof(self) -> None: - chunk = self.decompressor.flush() - - if chunk or self.size > 0: - self.out.feed_data(chunk, len(chunk)) - if self.encoding == "deflate" and not self.decompressor.eof: - raise ContentEncodingError("deflate") - - self.out.feed_eof() - - def begin_http_chunk_receiving(self) -> None: - self.out.begin_http_chunk_receiving() - - def end_http_chunk_receiving(self) -> None: - self.out.end_http_chunk_receiving() - - -HttpRequestParserPy = HttpRequestParser -HttpResponseParserPy = HttpResponseParser -RawRequestMessagePy = RawRequestMessage -RawResponseMessagePy = RawResponseMessage - -try: - if not NO_EXTENSIONS: - from ._http_parser import ( # type: ignore - HttpRequestParser, - HttpResponseParser, - RawRequestMessage, - RawResponseMessage, - ) - - HttpRequestParserC = HttpRequestParser - HttpResponseParserC = HttpResponseParser - RawRequestMessageC = RawRequestMessage - RawResponseMessageC = RawResponseMessage -except ImportError: # pragma: no cover - pass diff --git a/third_party/python/aiohttp/aiohttp/http_websocket.py b/third_party/python/aiohttp/aiohttp/http_websocket.py deleted file mode 100644 index 5cdaeea43c03..000000000000 --- a/third_party/python/aiohttp/aiohttp/http_websocket.py +++ /dev/null @@ -1,698 +0,0 @@ -"""WebSocket protocol versions 13 and 8.""" - -import asyncio -import collections -import json -import random -import re -import sys -import zlib -from enum import IntEnum -from struct import Struct -from typing import Any, Callable, List, Optional, Tuple, Union - -from .base_protocol import BaseProtocol -from .helpers import NO_EXTENSIONS -from .streams import DataQueue - -__all__ = ( - "WS_CLOSED_MESSAGE", - "WS_CLOSING_MESSAGE", - "WS_KEY", - "WebSocketReader", - "WebSocketWriter", - "WSMessage", - "WebSocketError", - "WSMsgType", - "WSCloseCode", -) - - -class WSCloseCode(IntEnum): - OK = 1000 - GOING_AWAY = 1001 - PROTOCOL_ERROR = 1002 - UNSUPPORTED_DATA = 1003 - INVALID_TEXT = 1007 - POLICY_VIOLATION = 1008 - MESSAGE_TOO_BIG = 1009 - MANDATORY_EXTENSION = 1010 - INTERNAL_ERROR = 1011 - SERVICE_RESTART = 1012 - TRY_AGAIN_LATER = 1013 - - -ALLOWED_CLOSE_CODES = {int(i) for i in WSCloseCode} - - -class WSMsgType(IntEnum): - # websocket spec types - CONTINUATION = 0x0 - TEXT = 0x1 - BINARY = 0x2 - PING = 0x9 - PONG = 0xA - CLOSE = 0x8 - - # aiohttp specific types - CLOSING = 0x100 - CLOSED = 0x101 - ERROR = 0x102 - - text = TEXT - binary = BINARY - ping = PING - pong = PONG - close = CLOSE - closing = CLOSING - closed = CLOSED - error = ERROR - - -WS_KEY = b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11" - - -UNPACK_LEN2 = Struct("!H").unpack_from -UNPACK_LEN3 = Struct("!Q").unpack_from -UNPACK_CLOSE_CODE = Struct("!H").unpack -PACK_LEN1 = Struct("!BB").pack -PACK_LEN2 = Struct("!BBH").pack -PACK_LEN3 = Struct("!BBQ").pack -PACK_CLOSE_CODE = Struct("!H").pack -MSG_SIZE = 2 ** 14 -DEFAULT_LIMIT = 2 ** 16 - - -_WSMessageBase = collections.namedtuple("_WSMessageBase", ["type", "data", "extra"]) - - -class WSMessage(_WSMessageBase): - def json(self, *, loads: Callable[[Any], Any] = json.loads) -> Any: - """Return parsed JSON data. - - .. versionadded:: 0.22 - """ - return loads(self.data) - - -WS_CLOSED_MESSAGE = WSMessage(WSMsgType.CLOSED, None, None) -WS_CLOSING_MESSAGE = WSMessage(WSMsgType.CLOSING, None, None) - - -class WebSocketError(Exception): - """WebSocket protocol parser error.""" - - def __init__(self, code: int, message: str) -> None: - self.code = code - super().__init__(code, message) - - def __str__(self) -> str: - return self.args[1] - - -class WSHandshakeError(Exception): - """WebSocket protocol handshake error.""" - - -native_byteorder = sys.byteorder - - -# Used by _websocket_mask_python -_XOR_TABLE = [bytes(a ^ b for a in range(256)) for b in range(256)] - - -def _websocket_mask_python(mask: bytes, data: bytearray) -> None: - """Websocket masking function. - - `mask` is a `bytes` object of length 4; `data` is a `bytearray` - object of any length. The contents of `data` are masked with `mask`, - as specified in section 5.3 of RFC 6455. - - Note that this function mutates the `data` argument. - - This pure-python implementation may be replaced by an optimized - version when available. - - """ - assert isinstance(data, bytearray), data - assert len(mask) == 4, mask - - if data: - a, b, c, d = (_XOR_TABLE[n] for n in mask) - data[::4] = data[::4].translate(a) - data[1::4] = data[1::4].translate(b) - data[2::4] = data[2::4].translate(c) - data[3::4] = data[3::4].translate(d) - - -if NO_EXTENSIONS: # pragma: no cover - _websocket_mask = _websocket_mask_python -else: - try: - from ._websocket import _websocket_mask_cython # type: ignore - - _websocket_mask = _websocket_mask_cython - except ImportError: # pragma: no cover - _websocket_mask = _websocket_mask_python - -_WS_DEFLATE_TRAILING = bytes([0x00, 0x00, 0xFF, 0xFF]) - - -_WS_EXT_RE = re.compile( - r"^(?:;\s*(?:" - r"(server_no_context_takeover)|" - r"(client_no_context_takeover)|" - r"(server_max_window_bits(?:=(\d+))?)|" - r"(client_max_window_bits(?:=(\d+))?)))*$" -) - -_WS_EXT_RE_SPLIT = re.compile(r"permessage-deflate([^,]+)?") - - -def ws_ext_parse(extstr: Optional[str], isserver: bool = False) -> Tuple[int, bool]: - if not extstr: - return 0, False - - compress = 0 - notakeover = False - for ext in _WS_EXT_RE_SPLIT.finditer(extstr): - defext = ext.group(1) - # Return compress = 15 when get `permessage-deflate` - if not defext: - compress = 15 - break - match = _WS_EXT_RE.match(defext) - if match: - compress = 15 - if isserver: - # Server never fail to detect compress handshake. - # Server does not need to send max wbit to client - if match.group(4): - compress = int(match.group(4)) - # Group3 must match if group4 matches - # Compress wbit 8 does not support in zlib - # If compress level not support, - # CONTINUE to next extension - if compress > 15 or compress < 9: - compress = 0 - continue - if match.group(1): - notakeover = True - # Ignore regex group 5 & 6 for client_max_window_bits - break - else: - if match.group(6): - compress = int(match.group(6)) - # Group5 must match if group6 matches - # Compress wbit 8 does not support in zlib - # If compress level not support, - # FAIL the parse progress - if compress > 15 or compress < 9: - raise WSHandshakeError("Invalid window size") - if match.group(2): - notakeover = True - # Ignore regex group 5 & 6 for client_max_window_bits - break - # Return Fail if client side and not match - elif not isserver: - raise WSHandshakeError("Extension for deflate not supported" + ext.group(1)) - - return compress, notakeover - - -def ws_ext_gen( - compress: int = 15, isserver: bool = False, server_notakeover: bool = False -) -> str: - # client_notakeover=False not used for server - # compress wbit 8 does not support in zlib - if compress < 9 or compress > 15: - raise ValueError( - "Compress wbits must between 9 and 15, " "zlib does not support wbits=8" - ) - enabledext = ["permessage-deflate"] - if not isserver: - enabledext.append("client_max_window_bits") - - if compress < 15: - enabledext.append("server_max_window_bits=" + str(compress)) - if server_notakeover: - enabledext.append("server_no_context_takeover") - # if client_notakeover: - # enabledext.append('client_no_context_takeover') - return "; ".join(enabledext) - - -class WSParserState(IntEnum): - READ_HEADER = 1 - READ_PAYLOAD_LENGTH = 2 - READ_PAYLOAD_MASK = 3 - READ_PAYLOAD = 4 - - -class WebSocketReader: - def __init__( - self, queue: DataQueue[WSMessage], max_msg_size: int, compress: bool = True - ) -> None: - self.queue = queue - self._max_msg_size = max_msg_size - - self._exc = None # type: Optional[BaseException] - self._partial = bytearray() - self._state = WSParserState.READ_HEADER - - self._opcode = None # type: Optional[int] - self._frame_fin = False - self._frame_opcode = None # type: Optional[int] - self._frame_payload = bytearray() - - self._tail = b"" - self._has_mask = False - self._frame_mask = None # type: Optional[bytes] - self._payload_length = 0 - self._payload_length_flag = 0 - self._compressed = None # type: Optional[bool] - self._decompressobj = None # type: Any # zlib.decompressobj actually - self._compress = compress - - def feed_eof(self) -> None: - self.queue.feed_eof() - - def feed_data(self, data: bytes) -> Tuple[bool, bytes]: - if self._exc: - return True, data - - try: - return self._feed_data(data) - except Exception as exc: - self._exc = exc - self.queue.set_exception(exc) - return True, b"" - - def _feed_data(self, data: bytes) -> Tuple[bool, bytes]: - for fin, opcode, payload, compressed in self.parse_frame(data): - if compressed and not self._decompressobj: - self._decompressobj = zlib.decompressobj(wbits=-zlib.MAX_WBITS) - if opcode == WSMsgType.CLOSE: - if len(payload) >= 2: - close_code = UNPACK_CLOSE_CODE(payload[:2])[0] - if close_code < 3000 and close_code not in ALLOWED_CLOSE_CODES: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - f"Invalid close code: {close_code}", - ) - try: - close_message = payload[2:].decode("utf-8") - except UnicodeDecodeError as exc: - raise WebSocketError( - WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message" - ) from exc - msg = WSMessage(WSMsgType.CLOSE, close_code, close_message) - elif payload: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - f"Invalid close frame: {fin} {opcode} {payload!r}", - ) - else: - msg = WSMessage(WSMsgType.CLOSE, 0, "") - - self.queue.feed_data(msg, 0) - - elif opcode == WSMsgType.PING: - self.queue.feed_data( - WSMessage(WSMsgType.PING, payload, ""), len(payload) - ) - - elif opcode == WSMsgType.PONG: - self.queue.feed_data( - WSMessage(WSMsgType.PONG, payload, ""), len(payload) - ) - - elif ( - opcode not in (WSMsgType.TEXT, WSMsgType.BINARY) - and self._opcode is None - ): - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, f"Unexpected opcode={opcode!r}" - ) - else: - # load text/binary - if not fin: - # got partial frame payload - if opcode != WSMsgType.CONTINUATION: - self._opcode = opcode - self._partial.extend(payload) - if self._max_msg_size and len(self._partial) >= self._max_msg_size: - raise WebSocketError( - WSCloseCode.MESSAGE_TOO_BIG, - "Message size {} exceeds limit {}".format( - len(self._partial), self._max_msg_size - ), - ) - else: - # previous frame was non finished - # we should get continuation opcode - if self._partial: - if opcode != WSMsgType.CONTINUATION: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - "The opcode in non-fin frame is expected " - "to be zero, got {!r}".format(opcode), - ) - - if opcode == WSMsgType.CONTINUATION: - assert self._opcode is not None - opcode = self._opcode - self._opcode = None - - self._partial.extend(payload) - if self._max_msg_size and len(self._partial) >= self._max_msg_size: - raise WebSocketError( - WSCloseCode.MESSAGE_TOO_BIG, - "Message size {} exceeds limit {}".format( - len(self._partial), self._max_msg_size - ), - ) - - # Decompress process must to be done after all packets - # received. - if compressed: - self._partial.extend(_WS_DEFLATE_TRAILING) - payload_merged = self._decompressobj.decompress( - self._partial, self._max_msg_size - ) - if self._decompressobj.unconsumed_tail: - left = len(self._decompressobj.unconsumed_tail) - raise WebSocketError( - WSCloseCode.MESSAGE_TOO_BIG, - "Decompressed message size {} exceeds limit {}".format( - self._max_msg_size + left, self._max_msg_size - ), - ) - else: - payload_merged = bytes(self._partial) - - self._partial.clear() - - if opcode == WSMsgType.TEXT: - try: - text = payload_merged.decode("utf-8") - self.queue.feed_data( - WSMessage(WSMsgType.TEXT, text, ""), len(text) - ) - except UnicodeDecodeError as exc: - raise WebSocketError( - WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message" - ) from exc - else: - self.queue.feed_data( - WSMessage(WSMsgType.BINARY, payload_merged, ""), - len(payload_merged), - ) - - return False, b"" - - def parse_frame( - self, buf: bytes - ) -> List[Tuple[bool, Optional[int], bytearray, Optional[bool]]]: - """Return the next frame from the socket.""" - frames = [] - if self._tail: - buf, self._tail = self._tail + buf, b"" - - start_pos = 0 - buf_length = len(buf) - - while True: - # read header - if self._state == WSParserState.READ_HEADER: - if buf_length - start_pos >= 2: - data = buf[start_pos : start_pos + 2] - start_pos += 2 - first_byte, second_byte = data - - fin = (first_byte >> 7) & 1 - rsv1 = (first_byte >> 6) & 1 - rsv2 = (first_byte >> 5) & 1 - rsv3 = (first_byte >> 4) & 1 - opcode = first_byte & 0xF - - # frame-fin = %x0 ; more frames of this message follow - # / %x1 ; final frame of this message - # frame-rsv1 = %x0 ; - # 1 bit, MUST be 0 unless negotiated otherwise - # frame-rsv2 = %x0 ; - # 1 bit, MUST be 0 unless negotiated otherwise - # frame-rsv3 = %x0 ; - # 1 bit, MUST be 0 unless negotiated otherwise - # - # Remove rsv1 from this test for deflate development - if rsv2 or rsv3 or (rsv1 and not self._compress): - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - "Received frame with non-zero reserved bits", - ) - - if opcode > 0x7 and fin == 0: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - "Received fragmented control frame", - ) - - has_mask = (second_byte >> 7) & 1 - length = second_byte & 0x7F - - # Control frames MUST have a payload - # length of 125 bytes or less - if opcode > 0x7 and length > 125: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - "Control frame payload cannot be " "larger than 125 bytes", - ) - - # Set compress status if last package is FIN - # OR set compress status if this is first fragment - # Raise error if not first fragment with rsv1 = 0x1 - if self._frame_fin or self._compressed is None: - self._compressed = True if rsv1 else False - elif rsv1: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - "Received frame with non-zero reserved bits", - ) - - self._frame_fin = bool(fin) - self._frame_opcode = opcode - self._has_mask = bool(has_mask) - self._payload_length_flag = length - self._state = WSParserState.READ_PAYLOAD_LENGTH - else: - break - - # read payload length - if self._state == WSParserState.READ_PAYLOAD_LENGTH: - length = self._payload_length_flag - if length == 126: - if buf_length - start_pos >= 2: - data = buf[start_pos : start_pos + 2] - start_pos += 2 - length = UNPACK_LEN2(data)[0] - self._payload_length = length - self._state = ( - WSParserState.READ_PAYLOAD_MASK - if self._has_mask - else WSParserState.READ_PAYLOAD - ) - else: - break - elif length > 126: - if buf_length - start_pos >= 8: - data = buf[start_pos : start_pos + 8] - start_pos += 8 - length = UNPACK_LEN3(data)[0] - self._payload_length = length - self._state = ( - WSParserState.READ_PAYLOAD_MASK - if self._has_mask - else WSParserState.READ_PAYLOAD - ) - else: - break - else: - self._payload_length = length - self._state = ( - WSParserState.READ_PAYLOAD_MASK - if self._has_mask - else WSParserState.READ_PAYLOAD - ) - - # read payload mask - if self._state == WSParserState.READ_PAYLOAD_MASK: - if buf_length - start_pos >= 4: - self._frame_mask = buf[start_pos : start_pos + 4] - start_pos += 4 - self._state = WSParserState.READ_PAYLOAD - else: - break - - if self._state == WSParserState.READ_PAYLOAD: - length = self._payload_length - payload = self._frame_payload - - chunk_len = buf_length - start_pos - if length >= chunk_len: - self._payload_length = length - chunk_len - payload.extend(buf[start_pos:]) - start_pos = buf_length - else: - self._payload_length = 0 - payload.extend(buf[start_pos : start_pos + length]) - start_pos = start_pos + length - - if self._payload_length == 0: - if self._has_mask: - assert self._frame_mask is not None - _websocket_mask(self._frame_mask, payload) - - frames.append( - (self._frame_fin, self._frame_opcode, payload, self._compressed) - ) - - self._frame_payload = bytearray() - self._state = WSParserState.READ_HEADER - else: - break - - self._tail = buf[start_pos:] - - return frames - - -class WebSocketWriter: - def __init__( - self, - protocol: BaseProtocol, - transport: asyncio.Transport, - *, - use_mask: bool = False, - limit: int = DEFAULT_LIMIT, - random: Any = random.Random(), - compress: int = 0, - notakeover: bool = False, - ) -> None: - self.protocol = protocol - self.transport = transport - self.use_mask = use_mask - self.randrange = random.randrange - self.compress = compress - self.notakeover = notakeover - self._closing = False - self._limit = limit - self._output_size = 0 - self._compressobj = None # type: Any # actually compressobj - - async def _send_frame( - self, message: bytes, opcode: int, compress: Optional[int] = None - ) -> None: - """Send a frame over the websocket with message as its payload.""" - if self._closing and not (opcode & WSMsgType.CLOSE): - raise ConnectionResetError("Cannot write to closing transport") - - rsv = 0 - - # Only compress larger packets (disabled) - # Does small packet needs to be compressed? - # if self.compress and opcode < 8 and len(message) > 124: - if (compress or self.compress) and opcode < 8: - if compress: - # Do not set self._compress if compressing is for this frame - compressobj = zlib.compressobj(level=zlib.Z_BEST_SPEED, wbits=-compress) - else: # self.compress - if not self._compressobj: - self._compressobj = zlib.compressobj( - level=zlib.Z_BEST_SPEED, wbits=-self.compress - ) - compressobj = self._compressobj - - message = compressobj.compress(message) - message = message + compressobj.flush( - zlib.Z_FULL_FLUSH if self.notakeover else zlib.Z_SYNC_FLUSH - ) - if message.endswith(_WS_DEFLATE_TRAILING): - message = message[:-4] - rsv = rsv | 0x40 - - msg_length = len(message) - - use_mask = self.use_mask - if use_mask: - mask_bit = 0x80 - else: - mask_bit = 0 - - if msg_length < 126: - header = PACK_LEN1(0x80 | rsv | opcode, msg_length | mask_bit) - elif msg_length < (1 << 16): - header = PACK_LEN2(0x80 | rsv | opcode, 126 | mask_bit, msg_length) - else: - header = PACK_LEN3(0x80 | rsv | opcode, 127 | mask_bit, msg_length) - if use_mask: - mask = self.randrange(0, 0xFFFFFFFF) - mask = mask.to_bytes(4, "big") - message = bytearray(message) - _websocket_mask(mask, message) - self._write(header + mask + message) - self._output_size += len(header) + len(mask) + len(message) - else: - if len(message) > MSG_SIZE: - self._write(header) - self._write(message) - else: - self._write(header + message) - - self._output_size += len(header) + len(message) - - if self._output_size > self._limit: - self._output_size = 0 - await self.protocol._drain_helper() - - def _write(self, data: bytes) -> None: - if self.transport is None or self.transport.is_closing(): - raise ConnectionResetError("Cannot write to closing transport") - self.transport.write(data) - - async def pong(self, message: bytes = b"") -> None: - """Send pong message.""" - if isinstance(message, str): - message = message.encode("utf-8") - await self._send_frame(message, WSMsgType.PONG) - - async def ping(self, message: bytes = b"") -> None: - """Send ping message.""" - if isinstance(message, str): - message = message.encode("utf-8") - await self._send_frame(message, WSMsgType.PING) - - async def send( - self, - message: Union[str, bytes], - binary: bool = False, - compress: Optional[int] = None, - ) -> None: - """Send a frame over the websocket with message as its payload.""" - if isinstance(message, str): - message = message.encode("utf-8") - if binary: - await self._send_frame(message, WSMsgType.BINARY, compress) - else: - await self._send_frame(message, WSMsgType.TEXT, compress) - - async def close(self, code: int = 1000, message: bytes = b"") -> None: - """Close the websocket, sending the specified code and message.""" - if isinstance(message, str): - message = message.encode("utf-8") - try: - await self._send_frame( - PACK_CLOSE_CODE(code) + message, opcode=WSMsgType.CLOSE - ) - finally: - self._closing = True diff --git a/third_party/python/aiohttp/aiohttp/http_writer.py b/third_party/python/aiohttp/aiohttp/http_writer.py deleted file mode 100644 index d261fc4e8d10..000000000000 --- a/third_party/python/aiohttp/aiohttp/http_writer.py +++ /dev/null @@ -1,182 +0,0 @@ -"""Http related parsers and protocol.""" - -import asyncio -import collections -import zlib -from typing import Any, Awaitable, Callable, Optional, Union # noqa - -from multidict import CIMultiDict - -from .abc import AbstractStreamWriter -from .base_protocol import BaseProtocol -from .helpers import NO_EXTENSIONS - -__all__ = ("StreamWriter", "HttpVersion", "HttpVersion10", "HttpVersion11") - -HttpVersion = collections.namedtuple("HttpVersion", ["major", "minor"]) -HttpVersion10 = HttpVersion(1, 0) -HttpVersion11 = HttpVersion(1, 1) - - -_T_OnChunkSent = Optional[Callable[[bytes], Awaitable[None]]] - - -class StreamWriter(AbstractStreamWriter): - def __init__( - self, - protocol: BaseProtocol, - loop: asyncio.AbstractEventLoop, - on_chunk_sent: _T_OnChunkSent = None, - ) -> None: - self._protocol = protocol - self._transport = protocol.transport - - self.loop = loop - self.length = None - self.chunked = False - self.buffer_size = 0 - self.output_size = 0 - - self._eof = False - self._compress = None # type: Any - self._drain_waiter = None - - self._on_chunk_sent = on_chunk_sent # type: _T_OnChunkSent - - @property - def transport(self) -> Optional[asyncio.Transport]: - return self._transport - - @property - def protocol(self) -> BaseProtocol: - return self._protocol - - def enable_chunking(self) -> None: - self.chunked = True - - def enable_compression(self, encoding: str = "deflate") -> None: - zlib_mode = 16 + zlib.MAX_WBITS if encoding == "gzip" else zlib.MAX_WBITS - self._compress = zlib.compressobj(wbits=zlib_mode) - - def _write(self, chunk: bytes) -> None: - size = len(chunk) - self.buffer_size += size - self.output_size += size - - if self._transport is None or self._transport.is_closing(): - raise ConnectionResetError("Cannot write to closing transport") - self._transport.write(chunk) - - async def write( - self, chunk: bytes, *, drain: bool = True, LIMIT: int = 0x10000 - ) -> None: - """Writes chunk of data to a stream. - - write_eof() indicates end of stream. - writer can't be used after write_eof() method being called. - write() return drain future. - """ - if self._on_chunk_sent is not None: - await self._on_chunk_sent(chunk) - - if isinstance(chunk, memoryview): - if chunk.nbytes != len(chunk): - # just reshape it - chunk = chunk.cast("c") - - if self._compress is not None: - chunk = self._compress.compress(chunk) - if not chunk: - return - - if self.length is not None: - chunk_len = len(chunk) - if self.length >= chunk_len: - self.length = self.length - chunk_len - else: - chunk = chunk[: self.length] - self.length = 0 - if not chunk: - return - - if chunk: - if self.chunked: - chunk_len_pre = ("%x\r\n" % len(chunk)).encode("ascii") - chunk = chunk_len_pre + chunk + b"\r\n" - - self._write(chunk) - - if self.buffer_size > LIMIT and drain: - self.buffer_size = 0 - await self.drain() - - async def write_headers( - self, status_line: str, headers: "CIMultiDict[str]" - ) -> None: - """Write request/response status and headers.""" - # status + headers - buf = _serialize_headers(status_line, headers) - self._write(buf) - - async def write_eof(self, chunk: bytes = b"") -> None: - if self._eof: - return - - if chunk and self._on_chunk_sent is not None: - await self._on_chunk_sent(chunk) - - if self._compress: - if chunk: - chunk = self._compress.compress(chunk) - - chunk = chunk + self._compress.flush() - if chunk and self.chunked: - chunk_len = ("%x\r\n" % len(chunk)).encode("ascii") - chunk = chunk_len + chunk + b"\r\n0\r\n\r\n" - else: - if self.chunked: - if chunk: - chunk_len = ("%x\r\n" % len(chunk)).encode("ascii") - chunk = chunk_len + chunk + b"\r\n0\r\n\r\n" - else: - chunk = b"0\r\n\r\n" - - if chunk: - self._write(chunk) - - await self.drain() - - self._eof = True - self._transport = None - - async def drain(self) -> None: - """Flush the write buffer. - - The intended use is to write - - await w.write(data) - await w.drain() - """ - if self._protocol.transport is not None: - await self._protocol._drain_helper() - - -def _py_serialize_headers(status_line: str, headers: "CIMultiDict[str]") -> bytes: - line = ( - status_line - + "\r\n" - + "".join([k + ": " + v + "\r\n" for k, v in headers.items()]) - ) - return line.encode("utf-8") + b"\r\n" - - -_serialize_headers = _py_serialize_headers - -try: - import aiohttp._http_writer as _http_writer # type: ignore - - _c_serialize_headers = _http_writer._serialize_headers - if not NO_EXTENSIONS: - _serialize_headers = _c_serialize_headers -except ImportError: - pass diff --git a/third_party/python/aiohttp/aiohttp/locks.py b/third_party/python/aiohttp/aiohttp/locks.py deleted file mode 100644 index ce5b9c6f7319..000000000000 --- a/third_party/python/aiohttp/aiohttp/locks.py +++ /dev/null @@ -1,45 +0,0 @@ -import asyncio -import collections -from typing import Any, Optional - -try: - from typing import Deque -except ImportError: - from typing_extensions import Deque - - -class EventResultOrError: - """ - This class wrappers the Event asyncio lock allowing either awake the - locked Tasks without any error or raising an exception. - - thanks to @vorpalsmith for the simple design. - """ - - def __init__(self, loop: asyncio.AbstractEventLoop) -> None: - self._loop = loop - self._exc = None # type: Optional[BaseException] - self._event = asyncio.Event() - self._waiters = collections.deque() # type: Deque[asyncio.Future[Any]] - - def set(self, exc: Optional[BaseException] = None) -> None: - self._exc = exc - self._event.set() - - async def wait(self) -> Any: - waiter = self._loop.create_task(self._event.wait()) - self._waiters.append(waiter) - try: - val = await waiter - finally: - self._waiters.remove(waiter) - - if self._exc is not None: - raise self._exc - - return val - - def cancel(self) -> None: - """ Cancel all waiters """ - for waiter in self._waiters: - waiter.cancel() diff --git a/third_party/python/aiohttp/aiohttp/log.py b/third_party/python/aiohttp/aiohttp/log.py deleted file mode 100644 index 3cecea2bac18..000000000000 --- a/third_party/python/aiohttp/aiohttp/log.py +++ /dev/null @@ -1,8 +0,0 @@ -import logging - -access_logger = logging.getLogger("aiohttp.access") -client_logger = logging.getLogger("aiohttp.client") -internal_logger = logging.getLogger("aiohttp.internal") -server_logger = logging.getLogger("aiohttp.server") -web_logger = logging.getLogger("aiohttp.web") -ws_logger = logging.getLogger("aiohttp.websocket") diff --git a/third_party/python/aiohttp/aiohttp/multipart.py b/third_party/python/aiohttp/aiohttp/multipart.py deleted file mode 100644 index 9e1ca92d23e8..000000000000 --- a/third_party/python/aiohttp/aiohttp/multipart.py +++ /dev/null @@ -1,957 +0,0 @@ -import base64 -import binascii -import json -import re -import uuid -import warnings -import zlib -from collections import deque -from types import TracebackType -from typing import ( - TYPE_CHECKING, - Any, - AsyncIterator, - Dict, - Iterator, - List, - Mapping, - Optional, - Sequence, - Tuple, - Type, - Union, -) -from urllib.parse import parse_qsl, unquote, urlencode - -from multidict import CIMultiDict, CIMultiDictProxy, MultiMapping - -from .hdrs import ( - CONTENT_DISPOSITION, - CONTENT_ENCODING, - CONTENT_LENGTH, - CONTENT_TRANSFER_ENCODING, - CONTENT_TYPE, -) -from .helpers import CHAR, TOKEN, parse_mimetype, reify -from .http import HeadersParser -from .payload import ( - JsonPayload, - LookupError, - Order, - Payload, - StringPayload, - get_payload, - payload_type, -) -from .streams import StreamReader - -__all__ = ( - "MultipartReader", - "MultipartWriter", - "BodyPartReader", - "BadContentDispositionHeader", - "BadContentDispositionParam", - "parse_content_disposition", - "content_disposition_filename", -) - - -if TYPE_CHECKING: # pragma: no cover - from .client_reqrep import ClientResponse - - -class BadContentDispositionHeader(RuntimeWarning): - pass - - -class BadContentDispositionParam(RuntimeWarning): - pass - - -def parse_content_disposition( - header: Optional[str], -) -> Tuple[Optional[str], Dict[str, str]]: - def is_token(string: str) -> bool: - return bool(string) and TOKEN >= set(string) - - def is_quoted(string: str) -> bool: - return string[0] == string[-1] == '"' - - def is_rfc5987(string: str) -> bool: - return is_token(string) and string.count("'") == 2 - - def is_extended_param(string: str) -> bool: - return string.endswith("*") - - def is_continuous_param(string: str) -> bool: - pos = string.find("*") + 1 - if not pos: - return False - substring = string[pos:-1] if string.endswith("*") else string[pos:] - return substring.isdigit() - - def unescape(text: str, *, chars: str = "".join(map(re.escape, CHAR))) -> str: - return re.sub(f"\\\\([{chars}])", "\\1", text) - - if not header: - return None, {} - - disptype, *parts = header.split(";") - if not is_token(disptype): - warnings.warn(BadContentDispositionHeader(header)) - return None, {} - - params = {} # type: Dict[str, str] - while parts: - item = parts.pop(0) - - if "=" not in item: - warnings.warn(BadContentDispositionHeader(header)) - return None, {} - - key, value = item.split("=", 1) - key = key.lower().strip() - value = value.lstrip() - - if key in params: - warnings.warn(BadContentDispositionHeader(header)) - return None, {} - - if not is_token(key): - warnings.warn(BadContentDispositionParam(item)) - continue - - elif is_continuous_param(key): - if is_quoted(value): - value = unescape(value[1:-1]) - elif not is_token(value): - warnings.warn(BadContentDispositionParam(item)) - continue - - elif is_extended_param(key): - if is_rfc5987(value): - encoding, _, value = value.split("'", 2) - encoding = encoding or "utf-8" - else: - warnings.warn(BadContentDispositionParam(item)) - continue - - try: - value = unquote(value, encoding, "strict") - except UnicodeDecodeError: # pragma: nocover - warnings.warn(BadContentDispositionParam(item)) - continue - - else: - failed = True - if is_quoted(value): - failed = False - value = unescape(value[1:-1].lstrip("\\/")) - elif is_token(value): - failed = False - elif parts: - # maybe just ; in filename, in any case this is just - # one case fix, for proper fix we need to redesign parser - _value = "{};{}".format(value, parts[0]) - if is_quoted(_value): - parts.pop(0) - value = unescape(_value[1:-1].lstrip("\\/")) - failed = False - - if failed: - warnings.warn(BadContentDispositionHeader(header)) - return None, {} - - params[key] = value - - return disptype.lower(), params - - -def content_disposition_filename( - params: Mapping[str, str], name: str = "filename" -) -> Optional[str]: - name_suf = "%s*" % name - if not params: - return None - elif name_suf in params: - return params[name_suf] - elif name in params: - return params[name] - else: - parts = [] - fnparams = sorted( - (key, value) for key, value in params.items() if key.startswith(name_suf) - ) - for num, (key, value) in enumerate(fnparams): - _, tail = key.split("*", 1) - if tail.endswith("*"): - tail = tail[:-1] - if tail == str(num): - parts.append(value) - else: - break - if not parts: - return None - value = "".join(parts) - if "'" in value: - encoding, _, value = value.split("'", 2) - encoding = encoding or "utf-8" - return unquote(value, encoding, "strict") - return value - - -class MultipartResponseWrapper: - """Wrapper around the MultipartReader. - - It takes care about - underlying connection and close it when it needs in. - """ - - def __init__( - self, - resp: "ClientResponse", - stream: "MultipartReader", - ) -> None: - self.resp = resp - self.stream = stream - - def __aiter__(self) -> "MultipartResponseWrapper": - return self - - async def __anext__( - self, - ) -> Union["MultipartReader", "BodyPartReader"]: - part = await self.next() - if part is None: - raise StopAsyncIteration - return part - - def at_eof(self) -> bool: - """Returns True when all response data had been read.""" - return self.resp.content.at_eof() - - async def next( - self, - ) -> Optional[Union["MultipartReader", "BodyPartReader"]]: - """Emits next multipart reader object.""" - item = await self.stream.next() - if self.stream.at_eof(): - await self.release() - return item - - async def release(self) -> None: - """Releases the connection gracefully, reading all the content - to the void.""" - await self.resp.release() - - -class BodyPartReader: - """Multipart reader for single body part.""" - - chunk_size = 8192 - - def __init__( - self, boundary: bytes, headers: "CIMultiDictProxy[str]", content: StreamReader - ) -> None: - self.headers = headers - self._boundary = boundary - self._content = content - self._at_eof = False - length = self.headers.get(CONTENT_LENGTH, None) - self._length = int(length) if length is not None else None - self._read_bytes = 0 - # TODO: typeing.Deque is not supported by Python 3.5 - self._unread = deque() # type: Any - self._prev_chunk = None # type: Optional[bytes] - self._content_eof = 0 - self._cache = {} # type: Dict[str, Any] - - def __aiter__(self) -> AsyncIterator["BodyPartReader"]: - return self # type: ignore - - async def __anext__(self) -> bytes: - part = await self.next() - if part is None: - raise StopAsyncIteration - return part - - async def next(self) -> Optional[bytes]: - item = await self.read() - if not item: - return None - return item - - async def read(self, *, decode: bool = False) -> bytes: - """Reads body part data. - - decode: Decodes data following by encoding - method from Content-Encoding header. If it missed - data remains untouched - """ - if self._at_eof: - return b"" - data = bytearray() - while not self._at_eof: - data.extend(await self.read_chunk(self.chunk_size)) - if decode: - return self.decode(data) - return data - - async def read_chunk(self, size: int = chunk_size) -> bytes: - """Reads body part content chunk of the specified size. - - size: chunk size - """ - if self._at_eof: - return b"" - if self._length: - chunk = await self._read_chunk_from_length(size) - else: - chunk = await self._read_chunk_from_stream(size) - - self._read_bytes += len(chunk) - if self._read_bytes == self._length: - self._at_eof = True - if self._at_eof: - clrf = await self._content.readline() - assert ( - b"\r\n" == clrf - ), "reader did not read all the data or it is malformed" - return chunk - - async def _read_chunk_from_length(self, size: int) -> bytes: - # Reads body part content chunk of the specified size. - # The body part must has Content-Length header with proper value. - assert self._length is not None, "Content-Length required for chunked read" - chunk_size = min(size, self._length - self._read_bytes) - chunk = await self._content.read(chunk_size) - return chunk - - async def _read_chunk_from_stream(self, size: int) -> bytes: - # Reads content chunk of body part with unknown length. - # The Content-Length header for body part is not necessary. - assert ( - size >= len(self._boundary) + 2 - ), "Chunk size must be greater or equal than boundary length + 2" - first_chunk = self._prev_chunk is None - if first_chunk: - self._prev_chunk = await self._content.read(size) - - chunk = await self._content.read(size) - self._content_eof += int(self._content.at_eof()) - assert self._content_eof < 3, "Reading after EOF" - assert self._prev_chunk is not None - window = self._prev_chunk + chunk - sub = b"\r\n" + self._boundary - if first_chunk: - idx = window.find(sub) - else: - idx = window.find(sub, max(0, len(self._prev_chunk) - len(sub))) - if idx >= 0: - # pushing boundary back to content - with warnings.catch_warnings(): - warnings.filterwarnings("ignore", category=DeprecationWarning) - self._content.unread_data(window[idx:]) - if size > idx: - self._prev_chunk = self._prev_chunk[:idx] - chunk = window[len(self._prev_chunk) : idx] - if not chunk: - self._at_eof = True - result = self._prev_chunk - self._prev_chunk = chunk - return result - - async def readline(self) -> bytes: - """Reads body part by line by line.""" - if self._at_eof: - return b"" - - if self._unread: - line = self._unread.popleft() - else: - line = await self._content.readline() - - if line.startswith(self._boundary): - # the very last boundary may not come with \r\n, - # so set single rules for everyone - sline = line.rstrip(b"\r\n") - boundary = self._boundary - last_boundary = self._boundary + b"--" - # ensure that we read exactly the boundary, not something alike - if sline == boundary or sline == last_boundary: - self._at_eof = True - self._unread.append(line) - return b"" - else: - next_line = await self._content.readline() - if next_line.startswith(self._boundary): - line = line[:-2] # strip CRLF but only once - self._unread.append(next_line) - - return line - - async def release(self) -> None: - """Like read(), but reads all the data to the void.""" - if self._at_eof: - return - while not self._at_eof: - await self.read_chunk(self.chunk_size) - - async def text(self, *, encoding: Optional[str] = None) -> str: - """Like read(), but assumes that body part contains text data.""" - data = await self.read(decode=True) - # see https://www.w3.org/TR/html5/forms.html#multipart/form-data-encoding-algorithm # NOQA - # and https://dvcs.w3.org/hg/xhr/raw-file/tip/Overview.html#dom-xmlhttprequest-send # NOQA - encoding = encoding or self.get_charset(default="utf-8") - return data.decode(encoding) - - async def json(self, *, encoding: Optional[str] = None) -> Optional[Dict[str, Any]]: - """Like read(), but assumes that body parts contains JSON data.""" - data = await self.read(decode=True) - if not data: - return None - encoding = encoding or self.get_charset(default="utf-8") - return json.loads(data.decode(encoding)) - - async def form(self, *, encoding: Optional[str] = None) -> List[Tuple[str, str]]: - """Like read(), but assumes that body parts contains form - urlencoded data. - """ - data = await self.read(decode=True) - if not data: - return [] - if encoding is not None: - real_encoding = encoding - else: - real_encoding = self.get_charset(default="utf-8") - return parse_qsl( - data.rstrip().decode(real_encoding), - keep_blank_values=True, - encoding=real_encoding, - ) - - def at_eof(self) -> bool: - """Returns True if the boundary was reached or False otherwise.""" - return self._at_eof - - def decode(self, data: bytes) -> bytes: - """Decodes data according the specified Content-Encoding - or Content-Transfer-Encoding headers value. - """ - if CONTENT_TRANSFER_ENCODING in self.headers: - data = self._decode_content_transfer(data) - if CONTENT_ENCODING in self.headers: - return self._decode_content(data) - return data - - def _decode_content(self, data: bytes) -> bytes: - encoding = self.headers.get(CONTENT_ENCODING, "").lower() - - if encoding == "deflate": - return zlib.decompress(data, -zlib.MAX_WBITS) - elif encoding == "gzip": - return zlib.decompress(data, 16 + zlib.MAX_WBITS) - elif encoding == "identity": - return data - else: - raise RuntimeError(f"unknown content encoding: {encoding}") - - def _decode_content_transfer(self, data: bytes) -> bytes: - encoding = self.headers.get(CONTENT_TRANSFER_ENCODING, "").lower() - - if encoding == "base64": - return base64.b64decode(data) - elif encoding == "quoted-printable": - return binascii.a2b_qp(data) - elif encoding in ("binary", "8bit", "7bit"): - return data - else: - raise RuntimeError( - "unknown content transfer encoding: {}" "".format(encoding) - ) - - def get_charset(self, default: str) -> str: - """Returns charset parameter from Content-Type header or default.""" - ctype = self.headers.get(CONTENT_TYPE, "") - mimetype = parse_mimetype(ctype) - return mimetype.parameters.get("charset", default) - - @reify - def name(self) -> Optional[str]: - """Returns name specified in Content-Disposition header or None - if missed or header is malformed. - """ - - _, params = parse_content_disposition(self.headers.get(CONTENT_DISPOSITION)) - return content_disposition_filename(params, "name") - - @reify - def filename(self) -> Optional[str]: - """Returns filename specified in Content-Disposition header or None - if missed or header is malformed. - """ - _, params = parse_content_disposition(self.headers.get(CONTENT_DISPOSITION)) - return content_disposition_filename(params, "filename") - - -@payload_type(BodyPartReader, order=Order.try_first) -class BodyPartReaderPayload(Payload): - def __init__(self, value: BodyPartReader, *args: Any, **kwargs: Any) -> None: - super().__init__(value, *args, **kwargs) - - params = {} # type: Dict[str, str] - if value.name is not None: - params["name"] = value.name - if value.filename is not None: - params["filename"] = value.filename - - if params: - self.set_content_disposition("attachment", True, **params) - - async def write(self, writer: Any) -> None: - field = self._value - chunk = await field.read_chunk(size=2 ** 16) - while chunk: - await writer.write(field.decode(chunk)) - chunk = await field.read_chunk(size=2 ** 16) - - -class MultipartReader: - """Multipart body reader.""" - - #: Response wrapper, used when multipart readers constructs from response. - response_wrapper_cls = MultipartResponseWrapper - #: Multipart reader class, used to handle multipart/* body parts. - #: None points to type(self) - multipart_reader_cls = None - #: Body part reader class for non multipart/* content types. - part_reader_cls = BodyPartReader - - def __init__(self, headers: Mapping[str, str], content: StreamReader) -> None: - self.headers = headers - self._boundary = ("--" + self._get_boundary()).encode() - self._content = content - self._last_part = ( - None - ) # type: Optional[Union['MultipartReader', BodyPartReader]] - self._at_eof = False - self._at_bof = True - self._unread = [] # type: List[bytes] - - def __aiter__( - self, - ) -> AsyncIterator["BodyPartReader"]: - return self # type: ignore - - async def __anext__( - self, - ) -> Optional[Union["MultipartReader", BodyPartReader]]: - part = await self.next() - if part is None: - raise StopAsyncIteration - return part - - @classmethod - def from_response( - cls, - response: "ClientResponse", - ) -> MultipartResponseWrapper: - """Constructs reader instance from HTTP response. - - :param response: :class:`~aiohttp.client.ClientResponse` instance - """ - obj = cls.response_wrapper_cls( - response, cls(response.headers, response.content) - ) - return obj - - def at_eof(self) -> bool: - """Returns True if the final boundary was reached or - False otherwise. - """ - return self._at_eof - - async def next( - self, - ) -> Optional[Union["MultipartReader", BodyPartReader]]: - """Emits the next multipart body part.""" - # So, if we're at BOF, we need to skip till the boundary. - if self._at_eof: - return None - await self._maybe_release_last_part() - if self._at_bof: - await self._read_until_first_boundary() - self._at_bof = False - else: - await self._read_boundary() - if self._at_eof: # we just read the last boundary, nothing to do there - return None - self._last_part = await self.fetch_next_part() - return self._last_part - - async def release(self) -> None: - """Reads all the body parts to the void till the final boundary.""" - while not self._at_eof: - item = await self.next() - if item is None: - break - await item.release() - - async def fetch_next_part( - self, - ) -> Union["MultipartReader", BodyPartReader]: - """Returns the next body part reader.""" - headers = await self._read_headers() - return self._get_part_reader(headers) - - def _get_part_reader( - self, - headers: "CIMultiDictProxy[str]", - ) -> Union["MultipartReader", BodyPartReader]: - """Dispatches the response by the `Content-Type` header, returning - suitable reader instance. - - :param dict headers: Response headers - """ - ctype = headers.get(CONTENT_TYPE, "") - mimetype = parse_mimetype(ctype) - - if mimetype.type == "multipart": - if self.multipart_reader_cls is None: - return type(self)(headers, self._content) - return self.multipart_reader_cls(headers, self._content) - else: - return self.part_reader_cls(self._boundary, headers, self._content) - - def _get_boundary(self) -> str: - mimetype = parse_mimetype(self.headers[CONTENT_TYPE]) - - assert mimetype.type == "multipart", "multipart/* content type expected" - - if "boundary" not in mimetype.parameters: - raise ValueError( - "boundary missed for Content-Type: %s" % self.headers[CONTENT_TYPE] - ) - - boundary = mimetype.parameters["boundary"] - if len(boundary) > 70: - raise ValueError("boundary %r is too long (70 chars max)" % boundary) - - return boundary - - async def _readline(self) -> bytes: - if self._unread: - return self._unread.pop() - return await self._content.readline() - - async def _read_until_first_boundary(self) -> None: - while True: - chunk = await self._readline() - if chunk == b"": - raise ValueError( - "Could not find starting boundary %r" % (self._boundary) - ) - chunk = chunk.rstrip() - if chunk == self._boundary: - return - elif chunk == self._boundary + b"--": - self._at_eof = True - return - - async def _read_boundary(self) -> None: - chunk = (await self._readline()).rstrip() - if chunk == self._boundary: - pass - elif chunk == self._boundary + b"--": - self._at_eof = True - epilogue = await self._readline() - next_line = await self._readline() - - # the epilogue is expected and then either the end of input or the - # parent multipart boundary, if the parent boundary is found then - # it should be marked as unread and handed to the parent for - # processing - if next_line[:2] == b"--": - self._unread.append(next_line) - # otherwise the request is likely missing an epilogue and both - # lines should be passed to the parent for processing - # (this handles the old behavior gracefully) - else: - self._unread.extend([next_line, epilogue]) - else: - raise ValueError(f"Invalid boundary {chunk!r}, expected {self._boundary!r}") - - async def _read_headers(self) -> "CIMultiDictProxy[str]": - lines = [b""] - while True: - chunk = await self._content.readline() - chunk = chunk.strip() - lines.append(chunk) - if not chunk: - break - parser = HeadersParser() - headers, raw_headers = parser.parse_headers(lines) - return headers - - async def _maybe_release_last_part(self) -> None: - """Ensures that the last read body part is read completely.""" - if self._last_part is not None: - if not self._last_part.at_eof(): - await self._last_part.release() - self._unread.extend(self._last_part._unread) - self._last_part = None - - -_Part = Tuple[Payload, str, str] - - -class MultipartWriter(Payload): - """Multipart body writer.""" - - def __init__(self, subtype: str = "mixed", boundary: Optional[str] = None) -> None: - boundary = boundary if boundary is not None else uuid.uuid4().hex - # The underlying Payload API demands a str (utf-8), not bytes, - # so we need to ensure we don't lose anything during conversion. - # As a result, require the boundary to be ASCII only. - # In both situations. - - try: - self._boundary = boundary.encode("ascii") - except UnicodeEncodeError: - raise ValueError("boundary should contain ASCII only chars") from None - ctype = f"multipart/{subtype}; boundary={self._boundary_value}" - - super().__init__(None, content_type=ctype) - - self._parts = [] # type: List[_Part] - - def __enter__(self) -> "MultipartWriter": - return self - - def __exit__( - self, - exc_type: Optional[Type[BaseException]], - exc_val: Optional[BaseException], - exc_tb: Optional[TracebackType], - ) -> None: - pass - - def __iter__(self) -> Iterator[_Part]: - return iter(self._parts) - - def __len__(self) -> int: - return len(self._parts) - - def __bool__(self) -> bool: - return True - - _valid_tchar_regex = re.compile(br"\A[!#$%&'*+\-.^_`|~\w]+\Z") - _invalid_qdtext_char_regex = re.compile(br"[\x00-\x08\x0A-\x1F\x7F]") - - @property - def _boundary_value(self) -> str: - """Wrap boundary parameter value in quotes, if necessary. - - Reads self.boundary and returns a unicode sting. - """ - # Refer to RFCs 7231, 7230, 5234. - # - # parameter = token "=" ( token / quoted-string ) - # token = 1*tchar - # quoted-string = DQUOTE *( qdtext / quoted-pair ) DQUOTE - # qdtext = HTAB / SP / %x21 / %x23-5B / %x5D-7E / obs-text - # obs-text = %x80-FF - # quoted-pair = "\" ( HTAB / SP / VCHAR / obs-text ) - # tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" - # / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~" - # / DIGIT / ALPHA - # ; any VCHAR, except delimiters - # VCHAR = %x21-7E - value = self._boundary - if re.match(self._valid_tchar_regex, value): - return value.decode("ascii") # cannot fail - - if re.search(self._invalid_qdtext_char_regex, value): - raise ValueError("boundary value contains invalid characters") - - # escape %x5C and %x22 - quoted_value_content = value.replace(b"\\", b"\\\\") - quoted_value_content = quoted_value_content.replace(b'"', b'\\"') - - return '"' + quoted_value_content.decode("ascii") + '"' - - @property - def boundary(self) -> str: - return self._boundary.decode("ascii") - - def append(self, obj: Any, headers: Optional[MultiMapping[str]] = None) -> Payload: - if headers is None: - headers = CIMultiDict() - - if isinstance(obj, Payload): - obj.headers.update(headers) - return self.append_payload(obj) - else: - try: - payload = get_payload(obj, headers=headers) - except LookupError: - raise TypeError("Cannot create payload from %r" % obj) - else: - return self.append_payload(payload) - - def append_payload(self, payload: Payload) -> Payload: - """Adds a new body part to multipart writer.""" - # compression - encoding = payload.headers.get( - CONTENT_ENCODING, - "", - ).lower() # type: Optional[str] - if encoding and encoding not in ("deflate", "gzip", "identity"): - raise RuntimeError(f"unknown content encoding: {encoding}") - if encoding == "identity": - encoding = None - - # te encoding - te_encoding = payload.headers.get( - CONTENT_TRANSFER_ENCODING, - "", - ).lower() # type: Optional[str] - if te_encoding not in ("", "base64", "quoted-printable", "binary"): - raise RuntimeError( - "unknown content transfer encoding: {}" "".format(te_encoding) - ) - if te_encoding == "binary": - te_encoding = None - - # size - size = payload.size - if size is not None and not (encoding or te_encoding): - payload.headers[CONTENT_LENGTH] = str(size) - - self._parts.append((payload, encoding, te_encoding)) # type: ignore - return payload - - def append_json( - self, obj: Any, headers: Optional[MultiMapping[str]] = None - ) -> Payload: - """Helper to append JSON part.""" - if headers is None: - headers = CIMultiDict() - - return self.append_payload(JsonPayload(obj, headers=headers)) - - def append_form( - self, - obj: Union[Sequence[Tuple[str, str]], Mapping[str, str]], - headers: Optional[MultiMapping[str]] = None, - ) -> Payload: - """Helper to append form urlencoded part.""" - assert isinstance(obj, (Sequence, Mapping)) - - if headers is None: - headers = CIMultiDict() - - if isinstance(obj, Mapping): - obj = list(obj.items()) - data = urlencode(obj, doseq=True) - - return self.append_payload( - StringPayload( - data, headers=headers, content_type="application/x-www-form-urlencoded" - ) - ) - - @property - def size(self) -> Optional[int]: - """Size of the payload.""" - total = 0 - for part, encoding, te_encoding in self._parts: - if encoding or te_encoding or part.size is None: - return None - - total += int( - 2 - + len(self._boundary) - + 2 - + part.size # b'--'+self._boundary+b'\r\n' - + len(part._binary_headers) - + 2 # b'\r\n' - ) - - total += 2 + len(self._boundary) + 4 # b'--'+self._boundary+b'--\r\n' - return total - - async def write(self, writer: Any, close_boundary: bool = True) -> None: - """Write body.""" - for part, encoding, te_encoding in self._parts: - await writer.write(b"--" + self._boundary + b"\r\n") - await writer.write(part._binary_headers) - - if encoding or te_encoding: - w = MultipartPayloadWriter(writer) - if encoding: - w.enable_compression(encoding) - if te_encoding: - w.enable_encoding(te_encoding) - await part.write(w) # type: ignore - await w.write_eof() - else: - await part.write(writer) - - await writer.write(b"\r\n") - - if close_boundary: - await writer.write(b"--" + self._boundary + b"--\r\n") - - -class MultipartPayloadWriter: - def __init__(self, writer: Any) -> None: - self._writer = writer - self._encoding = None # type: Optional[str] - self._compress = None # type: Any - self._encoding_buffer = None # type: Optional[bytearray] - - def enable_encoding(self, encoding: str) -> None: - if encoding == "base64": - self._encoding = encoding - self._encoding_buffer = bytearray() - elif encoding == "quoted-printable": - self._encoding = "quoted-printable" - - def enable_compression(self, encoding: str = "deflate") -> None: - zlib_mode = 16 + zlib.MAX_WBITS if encoding == "gzip" else -zlib.MAX_WBITS - self._compress = zlib.compressobj(wbits=zlib_mode) - - async def write_eof(self) -> None: - if self._compress is not None: - chunk = self._compress.flush() - if chunk: - self._compress = None - await self.write(chunk) - - if self._encoding == "base64": - if self._encoding_buffer: - await self._writer.write(base64.b64encode(self._encoding_buffer)) - - async def write(self, chunk: bytes) -> None: - if self._compress is not None: - if chunk: - chunk = self._compress.compress(chunk) - if not chunk: - return - - if self._encoding == "base64": - buf = self._encoding_buffer - assert buf is not None - buf.extend(chunk) - - if buf: - div, mod = divmod(len(buf), 3) - enc_chunk, self._encoding_buffer = (buf[: div * 3], buf[div * 3 :]) - if enc_chunk: - b64chunk = base64.b64encode(enc_chunk) - await self._writer.write(b64chunk) - elif self._encoding == "quoted-printable": - await self._writer.write(binascii.b2a_qp(chunk)) - else: - await self._writer.write(chunk) diff --git a/third_party/python/aiohttp/aiohttp/payload.py b/third_party/python/aiohttp/aiohttp/payload.py deleted file mode 100644 index c63dd2204c01..000000000000 --- a/third_party/python/aiohttp/aiohttp/payload.py +++ /dev/null @@ -1,448 +0,0 @@ -import asyncio -import enum -import io -import json -import mimetypes -import os -import warnings -from abc import ABC, abstractmethod -from itertools import chain -from typing import ( - IO, - TYPE_CHECKING, - Any, - ByteString, - Dict, - Iterable, - Optional, - Text, - TextIO, - Tuple, - Type, - Union, -) - -from multidict import CIMultiDict - -from . import hdrs -from .abc import AbstractStreamWriter -from .helpers import ( - PY_36, - content_disposition_header, - guess_filename, - parse_mimetype, - sentinel, -) -from .streams import StreamReader -from .typedefs import JSONEncoder, _CIMultiDict - -__all__ = ( - "PAYLOAD_REGISTRY", - "get_payload", - "payload_type", - "Payload", - "BytesPayload", - "StringPayload", - "IOBasePayload", - "BytesIOPayload", - "BufferedReaderPayload", - "TextIOPayload", - "StringIOPayload", - "JsonPayload", - "AsyncIterablePayload", -) - -TOO_LARGE_BYTES_BODY = 2 ** 20 # 1 MB - - -if TYPE_CHECKING: # pragma: no cover - from typing import List - - -class LookupError(Exception): - pass - - -class Order(str, enum.Enum): - normal = "normal" - try_first = "try_first" - try_last = "try_last" - - -def get_payload(data: Any, *args: Any, **kwargs: Any) -> "Payload": - return PAYLOAD_REGISTRY.get(data, *args, **kwargs) - - -def register_payload( - factory: Type["Payload"], type: Any, *, order: Order = Order.normal -) -> None: - PAYLOAD_REGISTRY.register(factory, type, order=order) - - -class payload_type: - def __init__(self, type: Any, *, order: Order = Order.normal) -> None: - self.type = type - self.order = order - - def __call__(self, factory: Type["Payload"]) -> Type["Payload"]: - register_payload(factory, self.type, order=self.order) - return factory - - -class PayloadRegistry: - """Payload registry. - - note: we need zope.interface for more efficient adapter search - """ - - def __init__(self) -> None: - self._first = [] # type: List[Tuple[Type[Payload], Any]] - self._normal = [] # type: List[Tuple[Type[Payload], Any]] - self._last = [] # type: List[Tuple[Type[Payload], Any]] - - def get( - self, data: Any, *args: Any, _CHAIN: Any = chain, **kwargs: Any - ) -> "Payload": - if isinstance(data, Payload): - return data - for factory, type in _CHAIN(self._first, self._normal, self._last): - if isinstance(data, type): - return factory(data, *args, **kwargs) - - raise LookupError() - - def register( - self, factory: Type["Payload"], type: Any, *, order: Order = Order.normal - ) -> None: - if order is Order.try_first: - self._first.append((factory, type)) - elif order is Order.normal: - self._normal.append((factory, type)) - elif order is Order.try_last: - self._last.append((factory, type)) - else: - raise ValueError(f"Unsupported order {order!r}") - - -class Payload(ABC): - - _default_content_type = "application/octet-stream" # type: str - _size = None # type: Optional[int] - - def __init__( - self, - value: Any, - headers: Optional[ - Union[_CIMultiDict, Dict[str, str], Iterable[Tuple[str, str]]] - ] = None, - content_type: Optional[str] = sentinel, - filename: Optional[str] = None, - encoding: Optional[str] = None, - **kwargs: Any, - ) -> None: - self._encoding = encoding - self._filename = filename - self._headers = CIMultiDict() # type: _CIMultiDict - self._value = value - if content_type is not sentinel and content_type is not None: - self._headers[hdrs.CONTENT_TYPE] = content_type - elif self._filename is not None: - content_type = mimetypes.guess_type(self._filename)[0] - if content_type is None: - content_type = self._default_content_type - self._headers[hdrs.CONTENT_TYPE] = content_type - else: - self._headers[hdrs.CONTENT_TYPE] = self._default_content_type - self._headers.update(headers or {}) - - @property - def size(self) -> Optional[int]: - """Size of the payload.""" - return self._size - - @property - def filename(self) -> Optional[str]: - """Filename of the payload.""" - return self._filename - - @property - def headers(self) -> _CIMultiDict: - """Custom item headers""" - return self._headers - - @property - def _binary_headers(self) -> bytes: - return ( - "".join([k + ": " + v + "\r\n" for k, v in self.headers.items()]).encode( - "utf-8" - ) - + b"\r\n" - ) - - @property - def encoding(self) -> Optional[str]: - """Payload encoding""" - return self._encoding - - @property - def content_type(self) -> str: - """Content type""" - return self._headers[hdrs.CONTENT_TYPE] - - def set_content_disposition( - self, disptype: str, quote_fields: bool = True, **params: Any - ) -> None: - """Sets ``Content-Disposition`` header.""" - self._headers[hdrs.CONTENT_DISPOSITION] = content_disposition_header( - disptype, quote_fields=quote_fields, **params - ) - - @abstractmethod - async def write(self, writer: AbstractStreamWriter) -> None: - """Write payload. - - writer is an AbstractStreamWriter instance: - """ - - -class BytesPayload(Payload): - def __init__(self, value: ByteString, *args: Any, **kwargs: Any) -> None: - if not isinstance(value, (bytes, bytearray, memoryview)): - raise TypeError( - "value argument must be byte-ish, not {!r}".format(type(value)) - ) - - if "content_type" not in kwargs: - kwargs["content_type"] = "application/octet-stream" - - super().__init__(value, *args, **kwargs) - - if isinstance(value, memoryview): - self._size = value.nbytes - else: - self._size = len(value) - - if self._size > TOO_LARGE_BYTES_BODY: - if PY_36: - kwargs = {"source": self} - else: - kwargs = {} - warnings.warn( - "Sending a large body directly with raw bytes might" - " lock the event loop. You should probably pass an " - "io.BytesIO object instead", - ResourceWarning, - **kwargs, - ) - - async def write(self, writer: AbstractStreamWriter) -> None: - await writer.write(self._value) - - -class StringPayload(BytesPayload): - def __init__( - self, - value: Text, - *args: Any, - encoding: Optional[str] = None, - content_type: Optional[str] = None, - **kwargs: Any, - ) -> None: - - if encoding is None: - if content_type is None: - real_encoding = "utf-8" - content_type = "text/plain; charset=utf-8" - else: - mimetype = parse_mimetype(content_type) - real_encoding = mimetype.parameters.get("charset", "utf-8") - else: - if content_type is None: - content_type = "text/plain; charset=%s" % encoding - real_encoding = encoding - - super().__init__( - value.encode(real_encoding), - encoding=real_encoding, - content_type=content_type, - *args, - **kwargs, - ) - - -class StringIOPayload(StringPayload): - def __init__(self, value: IO[str], *args: Any, **kwargs: Any) -> None: - super().__init__(value.read(), *args, **kwargs) - - -class IOBasePayload(Payload): - def __init__( - self, value: IO[Any], disposition: str = "attachment", *args: Any, **kwargs: Any - ) -> None: - if "filename" not in kwargs: - kwargs["filename"] = guess_filename(value) - - super().__init__(value, *args, **kwargs) - - if self._filename is not None and disposition is not None: - if hdrs.CONTENT_DISPOSITION not in self.headers: - self.set_content_disposition(disposition, filename=self._filename) - - async def write(self, writer: AbstractStreamWriter) -> None: - loop = asyncio.get_event_loop() - try: - chunk = await loop.run_in_executor(None, self._value.read, 2 ** 16) - while chunk: - await writer.write(chunk) - chunk = await loop.run_in_executor(None, self._value.read, 2 ** 16) - finally: - await loop.run_in_executor(None, self._value.close) - - -class TextIOPayload(IOBasePayload): - def __init__( - self, - value: TextIO, - *args: Any, - encoding: Optional[str] = None, - content_type: Optional[str] = None, - **kwargs: Any, - ) -> None: - - if encoding is None: - if content_type is None: - encoding = "utf-8" - content_type = "text/plain; charset=utf-8" - else: - mimetype = parse_mimetype(content_type) - encoding = mimetype.parameters.get("charset", "utf-8") - else: - if content_type is None: - content_type = "text/plain; charset=%s" % encoding - - super().__init__( - value, - content_type=content_type, - encoding=encoding, - *args, - **kwargs, - ) - - @property - def size(self) -> Optional[int]: - try: - return os.fstat(self._value.fileno()).st_size - self._value.tell() - except OSError: - return None - - async def write(self, writer: AbstractStreamWriter) -> None: - loop = asyncio.get_event_loop() - try: - chunk = await loop.run_in_executor(None, self._value.read, 2 ** 16) - while chunk: - await writer.write(chunk.encode(self._encoding)) - chunk = await loop.run_in_executor(None, self._value.read, 2 ** 16) - finally: - await loop.run_in_executor(None, self._value.close) - - -class BytesIOPayload(IOBasePayload): - @property - def size(self) -> int: - position = self._value.tell() - end = self._value.seek(0, os.SEEK_END) - self._value.seek(position) - return end - position - - -class BufferedReaderPayload(IOBasePayload): - @property - def size(self) -> Optional[int]: - try: - return os.fstat(self._value.fileno()).st_size - self._value.tell() - except OSError: - # data.fileno() is not supported, e.g. - # io.BufferedReader(io.BytesIO(b'data')) - return None - - -class JsonPayload(BytesPayload): - def __init__( - self, - value: Any, - encoding: str = "utf-8", - content_type: str = "application/json", - dumps: JSONEncoder = json.dumps, - *args: Any, - **kwargs: Any, - ) -> None: - - super().__init__( - dumps(value).encode(encoding), - content_type=content_type, - encoding=encoding, - *args, - **kwargs, - ) - - -if TYPE_CHECKING: # pragma: no cover - from typing import AsyncIterable, AsyncIterator - - _AsyncIterator = AsyncIterator[bytes] - _AsyncIterable = AsyncIterable[bytes] -else: - from collections.abc import AsyncIterable, AsyncIterator - - _AsyncIterator = AsyncIterator - _AsyncIterable = AsyncIterable - - -class AsyncIterablePayload(Payload): - - _iter = None # type: Optional[_AsyncIterator] - - def __init__(self, value: _AsyncIterable, *args: Any, **kwargs: Any) -> None: - if not isinstance(value, AsyncIterable): - raise TypeError( - "value argument must support " - "collections.abc.AsyncIterablebe interface, " - "got {!r}".format(type(value)) - ) - - if "content_type" not in kwargs: - kwargs["content_type"] = "application/octet-stream" - - super().__init__(value, *args, **kwargs) - - self._iter = value.__aiter__() - - async def write(self, writer: AbstractStreamWriter) -> None: - if self._iter: - try: - # iter is not None check prevents rare cases - # when the case iterable is used twice - while True: - chunk = await self._iter.__anext__() - await writer.write(chunk) - except StopAsyncIteration: - self._iter = None - - -class StreamReaderPayload(AsyncIterablePayload): - def __init__(self, value: StreamReader, *args: Any, **kwargs: Any) -> None: - super().__init__(value.iter_any(), *args, **kwargs) - - -PAYLOAD_REGISTRY = PayloadRegistry() -PAYLOAD_REGISTRY.register(BytesPayload, (bytes, bytearray, memoryview)) -PAYLOAD_REGISTRY.register(StringPayload, str) -PAYLOAD_REGISTRY.register(StringIOPayload, io.StringIO) -PAYLOAD_REGISTRY.register(TextIOPayload, io.TextIOBase) -PAYLOAD_REGISTRY.register(BytesIOPayload, io.BytesIO) -PAYLOAD_REGISTRY.register(BufferedReaderPayload, (io.BufferedReader, io.BufferedRandom)) -PAYLOAD_REGISTRY.register(IOBasePayload, io.IOBase) -PAYLOAD_REGISTRY.register(StreamReaderPayload, StreamReader) -# try_last for giving a chance to more specialized async interables like -# multidict.BodyPartReaderPayload override the default -PAYLOAD_REGISTRY.register(AsyncIterablePayload, AsyncIterable, order=Order.try_last) diff --git a/third_party/python/aiohttp/aiohttp/payload_streamer.py b/third_party/python/aiohttp/aiohttp/payload_streamer.py deleted file mode 100644 index 3b2de1516406..000000000000 --- a/third_party/python/aiohttp/aiohttp/payload_streamer.py +++ /dev/null @@ -1,74 +0,0 @@ -""" Payload implemenation for coroutines as data provider. - -As a simple case, you can upload data from file:: - - @aiohttp.streamer - async def file_sender(writer, file_name=None): - with open(file_name, 'rb') as f: - chunk = f.read(2**16) - while chunk: - await writer.write(chunk) - - chunk = f.read(2**16) - -Then you can use `file_sender` like this: - - async with session.post('http://httpbin.org/post', - data=file_sender(file_name='huge_file')) as resp: - print(await resp.text()) - -..note:: Coroutine must accept `writer` as first argument - -""" - -import types -import warnings -from typing import Any, Awaitable, Callable, Dict, Tuple - -from .abc import AbstractStreamWriter -from .payload import Payload, payload_type - -__all__ = ("streamer",) - - -class _stream_wrapper: - def __init__( - self, - coro: Callable[..., Awaitable[None]], - args: Tuple[Any, ...], - kwargs: Dict[str, Any], - ) -> None: - self.coro = types.coroutine(coro) - self.args = args - self.kwargs = kwargs - - async def __call__(self, writer: AbstractStreamWriter) -> None: - await self.coro(writer, *self.args, **self.kwargs) # type: ignore - - -class streamer: - def __init__(self, coro: Callable[..., Awaitable[None]]) -> None: - warnings.warn( - "@streamer is deprecated, use async generators instead", - DeprecationWarning, - stacklevel=2, - ) - self.coro = coro - - def __call__(self, *args: Any, **kwargs: Any) -> _stream_wrapper: - return _stream_wrapper(self.coro, args, kwargs) - - -@payload_type(_stream_wrapper) -class StreamWrapperPayload(Payload): - async def write(self, writer: AbstractStreamWriter) -> None: - await self._value(writer) - - -@payload_type(streamer) -class StreamPayload(StreamWrapperPayload): - def __init__(self, value: Any, *args: Any, **kwargs: Any) -> None: - super().__init__(value(), *args, **kwargs) - - async def write(self, writer: AbstractStreamWriter) -> None: - await self._value(writer) diff --git a/third_party/python/aiohttp/aiohttp/py.typed b/third_party/python/aiohttp/aiohttp/py.typed deleted file mode 100644 index f5642f79f21d..000000000000 --- a/third_party/python/aiohttp/aiohttp/py.typed +++ /dev/null @@ -1 +0,0 @@ -Marker diff --git a/third_party/python/aiohttp/aiohttp/pytest_plugin.py b/third_party/python/aiohttp/aiohttp/pytest_plugin.py deleted file mode 100644 index 5204293410b0..000000000000 --- a/third_party/python/aiohttp/aiohttp/pytest_plugin.py +++ /dev/null @@ -1,380 +0,0 @@ -import asyncio -import contextlib -import warnings -from collections.abc import Callable - -import pytest - -from aiohttp.helpers import PY_37, isasyncgenfunction -from aiohttp.web import Application - -from .test_utils import ( - BaseTestServer, - RawTestServer, - TestClient, - TestServer, - loop_context, - setup_test_loop, - teardown_test_loop, - unused_port as _unused_port, -) - -try: - import uvloop -except ImportError: # pragma: no cover - uvloop = None - -try: - import tokio -except ImportError: # pragma: no cover - tokio = None - - -def pytest_addoption(parser): # type: ignore - parser.addoption( - "--aiohttp-fast", - action="store_true", - default=False, - help="run tests faster by disabling extra checks", - ) - parser.addoption( - "--aiohttp-loop", - action="store", - default="pyloop", - help="run tests with specific loop: pyloop, uvloop, tokio or all", - ) - parser.addoption( - "--aiohttp-enable-loop-debug", - action="store_true", - default=False, - help="enable event loop debug mode", - ) - - -def pytest_fixture_setup(fixturedef): # type: ignore - """ - Allow fixtures to be coroutines. Run coroutine fixtures in an event loop. - """ - func = fixturedef.func - - if isasyncgenfunction(func): - # async generator fixture - is_async_gen = True - elif asyncio.iscoroutinefunction(func): - # regular async fixture - is_async_gen = False - else: - # not an async fixture, nothing to do - return - - strip_request = False - if "request" not in fixturedef.argnames: - fixturedef.argnames += ("request",) - strip_request = True - - def wrapper(*args, **kwargs): # type: ignore - request = kwargs["request"] - if strip_request: - del kwargs["request"] - - # if neither the fixture nor the test use the 'loop' fixture, - # 'getfixturevalue' will fail because the test is not parameterized - # (this can be removed someday if 'loop' is no longer parameterized) - if "loop" not in request.fixturenames: - raise Exception( - "Asynchronous fixtures must depend on the 'loop' fixture or " - "be used in tests depending from it." - ) - - _loop = request.getfixturevalue("loop") - - if is_async_gen: - # for async generators, we need to advance the generator once, - # then advance it again in a finalizer - gen = func(*args, **kwargs) - - def finalizer(): # type: ignore - try: - return _loop.run_until_complete(gen.__anext__()) - except StopAsyncIteration: - pass - - request.addfinalizer(finalizer) - return _loop.run_until_complete(gen.__anext__()) - else: - return _loop.run_until_complete(func(*args, **kwargs)) - - fixturedef.func = wrapper - - -@pytest.fixture -def fast(request): # type: ignore - """--fast config option""" - return request.config.getoption("--aiohttp-fast") - - -@pytest.fixture -def loop_debug(request): # type: ignore - """--enable-loop-debug config option""" - return request.config.getoption("--aiohttp-enable-loop-debug") - - -@contextlib.contextmanager -def _runtime_warning_context(): # type: ignore - """ - Context manager which checks for RuntimeWarnings, specifically to - avoid "coroutine 'X' was never awaited" warnings being missed. - - If RuntimeWarnings occur in the context a RuntimeError is raised. - """ - with warnings.catch_warnings(record=True) as _warnings: - yield - rw = [ - "{w.filename}:{w.lineno}:{w.message}".format(w=w) - for w in _warnings - if w.category == RuntimeWarning - ] - if rw: - raise RuntimeError( - "{} Runtime Warning{},\n{}".format( - len(rw), "" if len(rw) == 1 else "s", "\n".join(rw) - ) - ) - - -@contextlib.contextmanager -def _passthrough_loop_context(loop, fast=False): # type: ignore - """ - setups and tears down a loop unless one is passed in via the loop - argument when it's passed straight through. - """ - if loop: - # loop already exists, pass it straight through - yield loop - else: - # this shadows loop_context's standard behavior - loop = setup_test_loop() - yield loop - teardown_test_loop(loop, fast=fast) - - -def pytest_pycollect_makeitem(collector, name, obj): # type: ignore - """ - Fix pytest collecting for coroutines. - """ - if collector.funcnamefilter(name) and asyncio.iscoroutinefunction(obj): - return list(collector._genfunctions(name, obj)) - - -def pytest_pyfunc_call(pyfuncitem): # type: ignore - """ - Run coroutines in an event loop instead of a normal function call. - """ - fast = pyfuncitem.config.getoption("--aiohttp-fast") - if asyncio.iscoroutinefunction(pyfuncitem.function): - existing_loop = pyfuncitem.funcargs.get( - "proactor_loop" - ) or pyfuncitem.funcargs.get("loop", None) - with _runtime_warning_context(): - with _passthrough_loop_context(existing_loop, fast=fast) as _loop: - testargs = { - arg: pyfuncitem.funcargs[arg] - for arg in pyfuncitem._fixtureinfo.argnames - } - _loop.run_until_complete(pyfuncitem.obj(**testargs)) - - return True - - -def pytest_generate_tests(metafunc): # type: ignore - if "loop_factory" not in metafunc.fixturenames: - return - - loops = metafunc.config.option.aiohttp_loop - avail_factories = {"pyloop": asyncio.DefaultEventLoopPolicy} - - if uvloop is not None: # pragma: no cover - avail_factories["uvloop"] = uvloop.EventLoopPolicy - - if tokio is not None: # pragma: no cover - avail_factories["tokio"] = tokio.EventLoopPolicy - - if loops == "all": - loops = "pyloop,uvloop?,tokio?" - - factories = {} # type: ignore - for name in loops.split(","): - required = not name.endswith("?") - name = name.strip(" ?") - if name not in avail_factories: # pragma: no cover - if required: - raise ValueError( - "Unknown loop '%s', available loops: %s" - % (name, list(factories.keys())) - ) - else: - continue - factories[name] = avail_factories[name] - metafunc.parametrize( - "loop_factory", list(factories.values()), ids=list(factories.keys()) - ) - - -@pytest.fixture -def loop(loop_factory, fast, loop_debug): # type: ignore - """Return an instance of the event loop.""" - policy = loop_factory() - asyncio.set_event_loop_policy(policy) - with loop_context(fast=fast) as _loop: - if loop_debug: - _loop.set_debug(True) # pragma: no cover - asyncio.set_event_loop(_loop) - yield _loop - - -@pytest.fixture -def proactor_loop(): # type: ignore - if not PY_37: - policy = asyncio.get_event_loop_policy() - policy._loop_factory = asyncio.ProactorEventLoop # type: ignore - else: - policy = asyncio.WindowsProactorEventLoopPolicy() # type: ignore - asyncio.set_event_loop_policy(policy) - - with loop_context(policy.new_event_loop) as _loop: - asyncio.set_event_loop(_loop) - yield _loop - - -@pytest.fixture -def unused_port(aiohttp_unused_port): # type: ignore # pragma: no cover - warnings.warn( - "Deprecated, use aiohttp_unused_port fixture instead", - DeprecationWarning, - stacklevel=2, - ) - return aiohttp_unused_port - - -@pytest.fixture -def aiohttp_unused_port(): # type: ignore - """Return a port that is unused on the current host.""" - return _unused_port - - -@pytest.fixture -def aiohttp_server(loop): # type: ignore - """Factory to create a TestServer instance, given an app. - - aiohttp_server(app, **kwargs) - """ - servers = [] - - async def go(app, *, port=None, **kwargs): # type: ignore - server = TestServer(app, port=port) - await server.start_server(loop=loop, **kwargs) - servers.append(server) - return server - - yield go - - async def finalize(): # type: ignore - while servers: - await servers.pop().close() - - loop.run_until_complete(finalize()) - - -@pytest.fixture -def test_server(aiohttp_server): # type: ignore # pragma: no cover - warnings.warn( - "Deprecated, use aiohttp_server fixture instead", - DeprecationWarning, - stacklevel=2, - ) - return aiohttp_server - - -@pytest.fixture -def aiohttp_raw_server(loop): # type: ignore - """Factory to create a RawTestServer instance, given a web handler. - - aiohttp_raw_server(handler, **kwargs) - """ - servers = [] - - async def go(handler, *, port=None, **kwargs): # type: ignore - server = RawTestServer(handler, port=port) - await server.start_server(loop=loop, **kwargs) - servers.append(server) - return server - - yield go - - async def finalize(): # type: ignore - while servers: - await servers.pop().close() - - loop.run_until_complete(finalize()) - - -@pytest.fixture -def raw_test_server(aiohttp_raw_server): # type: ignore # pragma: no cover - warnings.warn( - "Deprecated, use aiohttp_raw_server fixture instead", - DeprecationWarning, - stacklevel=2, - ) - return aiohttp_raw_server - - -@pytest.fixture -def aiohttp_client(loop): # type: ignore - """Factory to create a TestClient instance. - - aiohttp_client(app, **kwargs) - aiohttp_client(server, **kwargs) - aiohttp_client(raw_server, **kwargs) - """ - clients = [] - - async def go(__param, *args, server_kwargs=None, **kwargs): # type: ignore - - if isinstance(__param, Callable) and not isinstance( # type: ignore - __param, (Application, BaseTestServer) - ): - __param = __param(loop, *args, **kwargs) - kwargs = {} - else: - assert not args, "args should be empty" - - if isinstance(__param, Application): - server_kwargs = server_kwargs or {} - server = TestServer(__param, loop=loop, **server_kwargs) - client = TestClient(server, loop=loop, **kwargs) - elif isinstance(__param, BaseTestServer): - client = TestClient(__param, loop=loop, **kwargs) - else: - raise ValueError("Unknown argument type: %r" % type(__param)) - - await client.start_server() - clients.append(client) - return client - - yield go - - async def finalize(): # type: ignore - while clients: - await clients.pop().close() - - loop.run_until_complete(finalize()) - - -@pytest.fixture -def test_client(aiohttp_client): # type: ignore # pragma: no cover - warnings.warn( - "Deprecated, use aiohttp_client fixture instead", - DeprecationWarning, - stacklevel=2, - ) - return aiohttp_client diff --git a/third_party/python/aiohttp/aiohttp/resolver.py b/third_party/python/aiohttp/aiohttp/resolver.py deleted file mode 100644 index 2974bcad7afc..000000000000 --- a/third_party/python/aiohttp/aiohttp/resolver.py +++ /dev/null @@ -1,149 +0,0 @@ -import asyncio -import socket -from typing import Any, Dict, List, Optional - -from .abc import AbstractResolver -from .helpers import get_running_loop - -__all__ = ("ThreadedResolver", "AsyncResolver", "DefaultResolver") - -try: - import aiodns - - # aiodns_default = hasattr(aiodns.DNSResolver, 'gethostbyname') -except ImportError: # pragma: no cover - aiodns = None - -aiodns_default = False - - -class ThreadedResolver(AbstractResolver): - """Use Executor for synchronous getaddrinfo() calls, which defaults to - concurrent.futures.ThreadPoolExecutor. - """ - - def __init__(self, loop: Optional[asyncio.AbstractEventLoop] = None) -> None: - self._loop = get_running_loop(loop) - - async def resolve( - self, hostname: str, port: int = 0, family: int = socket.AF_INET - ) -> List[Dict[str, Any]]: - infos = await self._loop.getaddrinfo( - hostname, - port, - type=socket.SOCK_STREAM, - family=family, - flags=socket.AI_ADDRCONFIG, - ) - - hosts = [] - for family, _, proto, _, address in infos: - if family == socket.AF_INET6 and address[3]: # type: ignore - # This is essential for link-local IPv6 addresses. - # LL IPv6 is a VERY rare case. Strictly speaking, we should use - # getnameinfo() unconditionally, but performance makes sense. - host, _port = socket.getnameinfo( - address, socket.NI_NUMERICHOST | socket.NI_NUMERICSERV - ) - port = int(_port) - else: - host, port = address[:2] - hosts.append( - { - "hostname": hostname, - "host": host, - "port": port, - "family": family, - "proto": proto, - "flags": socket.AI_NUMERICHOST | socket.AI_NUMERICSERV, - } - ) - - return hosts - - async def close(self) -> None: - pass - - -class AsyncResolver(AbstractResolver): - """Use the `aiodns` package to make asynchronous DNS lookups""" - - def __init__( - self, - loop: Optional[asyncio.AbstractEventLoop] = None, - *args: Any, - **kwargs: Any - ) -> None: - if aiodns is None: - raise RuntimeError("Resolver requires aiodns library") - - self._loop = get_running_loop(loop) - self._resolver = aiodns.DNSResolver(*args, loop=loop, **kwargs) - - if not hasattr(self._resolver, "gethostbyname"): - # aiodns 1.1 is not available, fallback to DNSResolver.query - self.resolve = self._resolve_with_query # type: ignore - - async def resolve( - self, host: str, port: int = 0, family: int = socket.AF_INET - ) -> List[Dict[str, Any]]: - try: - resp = await self._resolver.gethostbyname(host, family) - except aiodns.error.DNSError as exc: - msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed" - raise OSError(msg) from exc - hosts = [] - for address in resp.addresses: - hosts.append( - { - "hostname": host, - "host": address, - "port": port, - "family": family, - "proto": 0, - "flags": socket.AI_NUMERICHOST | socket.AI_NUMERICSERV, - } - ) - - if not hosts: - raise OSError("DNS lookup failed") - - return hosts - - async def _resolve_with_query( - self, host: str, port: int = 0, family: int = socket.AF_INET - ) -> List[Dict[str, Any]]: - if family == socket.AF_INET6: - qtype = "AAAA" - else: - qtype = "A" - - try: - resp = await self._resolver.query(host, qtype) - except aiodns.error.DNSError as exc: - msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed" - raise OSError(msg) from exc - - hosts = [] - for rr in resp: - hosts.append( - { - "hostname": host, - "host": rr.host, - "port": port, - "family": family, - "proto": 0, - "flags": socket.AI_NUMERICHOST, - } - ) - - if not hosts: - raise OSError("DNS lookup failed") - - return hosts - - async def close(self) -> None: - return self._resolver.cancel() - - -DefaultResolver = AsyncResolver if aiodns_default else ThreadedResolver diff --git a/third_party/python/aiohttp/aiohttp/signals.py b/third_party/python/aiohttp/aiohttp/signals.py deleted file mode 100644 index d406c02423b3..000000000000 --- a/third_party/python/aiohttp/aiohttp/signals.py +++ /dev/null @@ -1,34 +0,0 @@ -from aiohttp.frozenlist import FrozenList - -__all__ = ("Signal",) - - -class Signal(FrozenList): - """Coroutine-based signal implementation. - - To connect a callback to a signal, use any list method. - - Signals are fired using the send() coroutine, which takes named - arguments. - """ - - __slots__ = ("_owner",) - - def __init__(self, owner): - super().__init__() - self._owner = owner - - def __repr__(self): - return "".format( - self._owner, self.frozen, list(self) - ) - - async def send(self, *args, **kwargs): - """ - Sends data to all registered receivers. - """ - if not self.frozen: - raise RuntimeError("Cannot send non-frozen signal.") - - for receiver in self: - await receiver(*args, **kwargs) # type: ignore diff --git a/third_party/python/aiohttp/aiohttp/signals.pyi b/third_party/python/aiohttp/aiohttp/signals.pyi deleted file mode 100644 index 455f8e2f2278..000000000000 --- a/third_party/python/aiohttp/aiohttp/signals.pyi +++ /dev/null @@ -1,12 +0,0 @@ -from typing import Any, Generic, TypeVar - -from aiohttp.frozenlist import FrozenList - -__all__ = ("Signal",) - -_T = TypeVar("_T") - -class Signal(FrozenList[_T], Generic[_T]): - def __init__(self, owner: Any) -> None: ... - def __repr__(self) -> str: ... - async def send(self, *args: Any, **kwargs: Any) -> None: ... diff --git a/third_party/python/aiohttp/aiohttp/streams.py b/third_party/python/aiohttp/aiohttp/streams.py deleted file mode 100644 index 42970b531d03..000000000000 --- a/third_party/python/aiohttp/aiohttp/streams.py +++ /dev/null @@ -1,647 +0,0 @@ -import asyncio -import collections -import warnings -from typing import Awaitable, Callable, Generic, List, Optional, Tuple, TypeVar - -from .base_protocol import BaseProtocol -from .helpers import BaseTimerContext, set_exception, set_result -from .log import internal_logger - -try: # pragma: no cover - from typing import Deque -except ImportError: - from typing_extensions import Deque - -__all__ = ( - "EMPTY_PAYLOAD", - "EofStream", - "StreamReader", - "DataQueue", - "FlowControlDataQueue", -) - -_T = TypeVar("_T") - - -class EofStream(Exception): - """eof stream indication.""" - - -class AsyncStreamIterator(Generic[_T]): - def __init__(self, read_func: Callable[[], Awaitable[_T]]) -> None: - self.read_func = read_func - - def __aiter__(self) -> "AsyncStreamIterator[_T]": - return self - - async def __anext__(self) -> _T: - try: - rv = await self.read_func() - except EofStream: - raise StopAsyncIteration - if rv == b"": - raise StopAsyncIteration - return rv - - -class ChunkTupleAsyncStreamIterator: - def __init__(self, stream: "StreamReader") -> None: - self._stream = stream - - def __aiter__(self) -> "ChunkTupleAsyncStreamIterator": - return self - - async def __anext__(self) -> Tuple[bytes, bool]: - rv = await self._stream.readchunk() - if rv == (b"", False): - raise StopAsyncIteration - return rv - - -class AsyncStreamReaderMixin: - def __aiter__(self) -> AsyncStreamIterator[bytes]: - return AsyncStreamIterator(self.readline) # type: ignore - - def iter_chunked(self, n: int) -> AsyncStreamIterator[bytes]: - """Returns an asynchronous iterator that yields chunks of size n. - - Python-3.5 available for Python 3.5+ only - """ - return AsyncStreamIterator(lambda: self.read(n)) # type: ignore - - def iter_any(self) -> AsyncStreamIterator[bytes]: - """Returns an asynchronous iterator that yields all the available - data as soon as it is received - - Python-3.5 available for Python 3.5+ only - """ - return AsyncStreamIterator(self.readany) # type: ignore - - def iter_chunks(self) -> ChunkTupleAsyncStreamIterator: - """Returns an asynchronous iterator that yields chunks of data - as they are received by the server. The yielded objects are tuples - of (bytes, bool) as returned by the StreamReader.readchunk method. - - Python-3.5 available for Python 3.5+ only - """ - return ChunkTupleAsyncStreamIterator(self) # type: ignore - - -class StreamReader(AsyncStreamReaderMixin): - """An enhancement of asyncio.StreamReader. - - Supports asynchronous iteration by line, chunk or as available:: - - async for line in reader: - ... - async for chunk in reader.iter_chunked(1024): - ... - async for slice in reader.iter_any(): - ... - - """ - - total_bytes = 0 - - def __init__( - self, - protocol: BaseProtocol, - limit: int, - *, - timer: Optional[BaseTimerContext] = None, - loop: Optional[asyncio.AbstractEventLoop] = None - ) -> None: - self._protocol = protocol - self._low_water = limit - self._high_water = limit * 2 - if loop is None: - loop = asyncio.get_event_loop() - self._loop = loop - self._size = 0 - self._cursor = 0 - self._http_chunk_splits = None # type: Optional[List[int]] - self._buffer = collections.deque() # type: Deque[bytes] - self._buffer_offset = 0 - self._eof = False - self._waiter = None # type: Optional[asyncio.Future[None]] - self._eof_waiter = None # type: Optional[asyncio.Future[None]] - self._exception = None # type: Optional[BaseException] - self._timer = timer - self._eof_callbacks = [] # type: List[Callable[[], None]] - - def __repr__(self) -> str: - info = [self.__class__.__name__] - if self._size: - info.append("%d bytes" % self._size) - if self._eof: - info.append("eof") - if self._low_water != 2 ** 16: # default limit - info.append("low=%d high=%d" % (self._low_water, self._high_water)) - if self._waiter: - info.append("w=%r" % self._waiter) - if self._exception: - info.append("e=%r" % self._exception) - return "<%s>" % " ".join(info) - - def get_read_buffer_limits(self) -> Tuple[int, int]: - return (self._low_water, self._high_water) - - def exception(self) -> Optional[BaseException]: - return self._exception - - def set_exception(self, exc: BaseException) -> None: - self._exception = exc - self._eof_callbacks.clear() - - waiter = self._waiter - if waiter is not None: - self._waiter = None - set_exception(waiter, exc) - - waiter = self._eof_waiter - if waiter is not None: - self._eof_waiter = None - set_exception(waiter, exc) - - def on_eof(self, callback: Callable[[], None]) -> None: - if self._eof: - try: - callback() - except Exception: - internal_logger.exception("Exception in eof callback") - else: - self._eof_callbacks.append(callback) - - def feed_eof(self) -> None: - self._eof = True - - waiter = self._waiter - if waiter is not None: - self._waiter = None - set_result(waiter, None) - - waiter = self._eof_waiter - if waiter is not None: - self._eof_waiter = None - set_result(waiter, None) - - for cb in self._eof_callbacks: - try: - cb() - except Exception: - internal_logger.exception("Exception in eof callback") - - self._eof_callbacks.clear() - - def is_eof(self) -> bool: - """Return True if 'feed_eof' was called.""" - return self._eof - - def at_eof(self) -> bool: - """Return True if the buffer is empty and 'feed_eof' was called.""" - return self._eof and not self._buffer - - async def wait_eof(self) -> None: - if self._eof: - return - - assert self._eof_waiter is None - self._eof_waiter = self._loop.create_future() - try: - await self._eof_waiter - finally: - self._eof_waiter = None - - def unread_data(self, data: bytes) -> None: - """rollback reading some data from stream, inserting it to buffer head.""" - warnings.warn( - "unread_data() is deprecated " - "and will be removed in future releases (#3260)", - DeprecationWarning, - stacklevel=2, - ) - if not data: - return - - if self._buffer_offset: - self._buffer[0] = self._buffer[0][self._buffer_offset :] - self._buffer_offset = 0 - self._size += len(data) - self._cursor -= len(data) - self._buffer.appendleft(data) - self._eof_counter = 0 - - # TODO: size is ignored, remove the param later - def feed_data(self, data: bytes, size: int = 0) -> None: - assert not self._eof, "feed_data after feed_eof" - - if not data: - return - - self._size += len(data) - self._buffer.append(data) - self.total_bytes += len(data) - - waiter = self._waiter - if waiter is not None: - self._waiter = None - set_result(waiter, None) - - if self._size > self._high_water and not self._protocol._reading_paused: - self._protocol.pause_reading() - - def begin_http_chunk_receiving(self) -> None: - if self._http_chunk_splits is None: - if self.total_bytes: - raise RuntimeError( - "Called begin_http_chunk_receiving when" "some data was already fed" - ) - self._http_chunk_splits = [] - - def end_http_chunk_receiving(self) -> None: - if self._http_chunk_splits is None: - raise RuntimeError( - "Called end_chunk_receiving without calling " - "begin_chunk_receiving first" - ) - - # self._http_chunk_splits contains logical byte offsets from start of - # the body transfer. Each offset is the offset of the end of a chunk. - # "Logical" means bytes, accessible for a user. - # If no chunks containig logical data were received, current position - # is difinitely zero. - pos = self._http_chunk_splits[-1] if self._http_chunk_splits else 0 - - if self.total_bytes == pos: - # We should not add empty chunks here. So we check for that. - # Note, when chunked + gzip is used, we can receive a chunk - # of compressed data, but that data may not be enough for gzip FSM - # to yield any uncompressed data. That's why current position may - # not change after receiving a chunk. - return - - self._http_chunk_splits.append(self.total_bytes) - - # wake up readchunk when end of http chunk received - waiter = self._waiter - if waiter is not None: - self._waiter = None - set_result(waiter, None) - - async def _wait(self, func_name: str) -> None: - # StreamReader uses a future to link the protocol feed_data() method - # to a read coroutine. Running two read coroutines at the same time - # would have an unexpected behaviour. It would not possible to know - # which coroutine would get the next data. - if self._waiter is not None: - raise RuntimeError( - "%s() called while another coroutine is " - "already waiting for incoming data" % func_name - ) - - waiter = self._waiter = self._loop.create_future() - try: - if self._timer: - with self._timer: - await waiter - else: - await waiter - finally: - self._waiter = None - - async def readline(self) -> bytes: - if self._exception is not None: - raise self._exception - - line = [] - line_size = 0 - not_enough = True - - while not_enough: - while self._buffer and not_enough: - offset = self._buffer_offset - ichar = self._buffer[0].find(b"\n", offset) + 1 - # Read from current offset to found b'\n' or to the end. - data = self._read_nowait_chunk(ichar - offset if ichar else -1) - line.append(data) - line_size += len(data) - if ichar: - not_enough = False - - if line_size > self._high_water: - raise ValueError("Line is too long") - - if self._eof: - break - - if not_enough: - await self._wait("readline") - - return b"".join(line) - - async def read(self, n: int = -1) -> bytes: - if self._exception is not None: - raise self._exception - - # migration problem; with DataQueue you have to catch - # EofStream exception, so common way is to run payload.read() inside - # infinite loop. what can cause real infinite loop with StreamReader - # lets keep this code one major release. - if __debug__: - if self._eof and not self._buffer: - self._eof_counter = getattr(self, "_eof_counter", 0) + 1 - if self._eof_counter > 5: - internal_logger.warning( - "Multiple access to StreamReader in eof state, " - "might be infinite loop.", - stack_info=True, - ) - - if not n: - return b"" - - if n < 0: - # This used to just loop creating a new waiter hoping to - # collect everything in self._buffer, but that would - # deadlock if the subprocess sends more than self.limit - # bytes. So just call self.readany() until EOF. - blocks = [] - while True: - block = await self.readany() - if not block: - break - blocks.append(block) - return b"".join(blocks) - - # TODO: should be `if` instead of `while` - # because waiter maybe triggered on chunk end, - # without feeding any data - while not self._buffer and not self._eof: - await self._wait("read") - - return self._read_nowait(n) - - async def readany(self) -> bytes: - if self._exception is not None: - raise self._exception - - # TODO: should be `if` instead of `while` - # because waiter maybe triggered on chunk end, - # without feeding any data - while not self._buffer and not self._eof: - await self._wait("readany") - - return self._read_nowait(-1) - - async def readchunk(self) -> Tuple[bytes, bool]: - """Returns a tuple of (data, end_of_http_chunk). When chunked transfer - encoding is used, end_of_http_chunk is a boolean indicating if the end - of the data corresponds to the end of a HTTP chunk , otherwise it is - always False. - """ - while True: - if self._exception is not None: - raise self._exception - - while self._http_chunk_splits: - pos = self._http_chunk_splits.pop(0) - if pos == self._cursor: - return (b"", True) - if pos > self._cursor: - return (self._read_nowait(pos - self._cursor), True) - internal_logger.warning( - "Skipping HTTP chunk end due to data " - "consumption beyond chunk boundary" - ) - - if self._buffer: - return (self._read_nowait_chunk(-1), False) - # return (self._read_nowait(-1), False) - - if self._eof: - # Special case for signifying EOF. - # (b'', True) is not a final return value actually. - return (b"", False) - - await self._wait("readchunk") - - async def readexactly(self, n: int) -> bytes: - if self._exception is not None: - raise self._exception - - blocks = [] # type: List[bytes] - while n > 0: - block = await self.read(n) - if not block: - partial = b"".join(blocks) - raise asyncio.IncompleteReadError(partial, len(partial) + n) - blocks.append(block) - n -= len(block) - - return b"".join(blocks) - - def read_nowait(self, n: int = -1) -> bytes: - # default was changed to be consistent with .read(-1) - # - # I believe the most users don't know about the method and - # they are not affected. - if self._exception is not None: - raise self._exception - - if self._waiter and not self._waiter.done(): - raise RuntimeError( - "Called while some coroutine is waiting for incoming data." - ) - - return self._read_nowait(n) - - def _read_nowait_chunk(self, n: int) -> bytes: - first_buffer = self._buffer[0] - offset = self._buffer_offset - if n != -1 and len(first_buffer) - offset > n: - data = first_buffer[offset : offset + n] - self._buffer_offset += n - - elif offset: - self._buffer.popleft() - data = first_buffer[offset:] - self._buffer_offset = 0 - - else: - data = self._buffer.popleft() - - self._size -= len(data) - self._cursor += len(data) - - chunk_splits = self._http_chunk_splits - # Prevent memory leak: drop useless chunk splits - while chunk_splits and chunk_splits[0] < self._cursor: - chunk_splits.pop(0) - - if self._size < self._low_water and self._protocol._reading_paused: - self._protocol.resume_reading() - return data - - def _read_nowait(self, n: int) -> bytes: - """ Read not more than n bytes, or whole buffer if n == -1 """ - chunks = [] - - while self._buffer: - chunk = self._read_nowait_chunk(n) - chunks.append(chunk) - if n != -1: - n -= len(chunk) - if n == 0: - break - - return b"".join(chunks) if chunks else b"" - - -class EmptyStreamReader(AsyncStreamReaderMixin): - def exception(self) -> Optional[BaseException]: - return None - - def set_exception(self, exc: BaseException) -> None: - pass - - def on_eof(self, callback: Callable[[], None]) -> None: - try: - callback() - except Exception: - internal_logger.exception("Exception in eof callback") - - def feed_eof(self) -> None: - pass - - def is_eof(self) -> bool: - return True - - def at_eof(self) -> bool: - return True - - async def wait_eof(self) -> None: - return - - def feed_data(self, data: bytes, n: int = 0) -> None: - pass - - async def readline(self) -> bytes: - return b"" - - async def read(self, n: int = -1) -> bytes: - return b"" - - async def readany(self) -> bytes: - return b"" - - async def readchunk(self) -> Tuple[bytes, bool]: - return (b"", True) - - async def readexactly(self, n: int) -> bytes: - raise asyncio.IncompleteReadError(b"", n) - - def read_nowait(self) -> bytes: - return b"" - - -EMPTY_PAYLOAD = EmptyStreamReader() - - -class DataQueue(Generic[_T]): - """DataQueue is a general-purpose blocking queue with one reader.""" - - def __init__(self, loop: asyncio.AbstractEventLoop) -> None: - self._loop = loop - self._eof = False - self._waiter = None # type: Optional[asyncio.Future[None]] - self._exception = None # type: Optional[BaseException] - self._size = 0 - self._buffer = collections.deque() # type: Deque[Tuple[_T, int]] - - def __len__(self) -> int: - return len(self._buffer) - - def is_eof(self) -> bool: - return self._eof - - def at_eof(self) -> bool: - return self._eof and not self._buffer - - def exception(self) -> Optional[BaseException]: - return self._exception - - def set_exception(self, exc: BaseException) -> None: - self._eof = True - self._exception = exc - - waiter = self._waiter - if waiter is not None: - self._waiter = None - set_exception(waiter, exc) - - def feed_data(self, data: _T, size: int = 0) -> None: - self._size += size - self._buffer.append((data, size)) - - waiter = self._waiter - if waiter is not None: - self._waiter = None - set_result(waiter, None) - - def feed_eof(self) -> None: - self._eof = True - - waiter = self._waiter - if waiter is not None: - self._waiter = None - set_result(waiter, None) - - async def read(self) -> _T: - if not self._buffer and not self._eof: - assert not self._waiter - self._waiter = self._loop.create_future() - try: - await self._waiter - except (asyncio.CancelledError, asyncio.TimeoutError): - self._waiter = None - raise - - if self._buffer: - data, size = self._buffer.popleft() - self._size -= size - return data - else: - if self._exception is not None: - raise self._exception - else: - raise EofStream - - def __aiter__(self) -> AsyncStreamIterator[_T]: - return AsyncStreamIterator(self.read) - - -class FlowControlDataQueue(DataQueue[_T]): - """FlowControlDataQueue resumes and pauses an underlying stream. - - It is a destination for parsed data.""" - - def __init__( - self, protocol: BaseProtocol, limit: int, *, loop: asyncio.AbstractEventLoop - ) -> None: - super().__init__(loop=loop) - - self._protocol = protocol - self._limit = limit * 2 - - def feed_data(self, data: _T, size: int = 0) -> None: - super().feed_data(data, size) - - if self._size > self._limit and not self._protocol._reading_paused: - self._protocol.pause_reading() - - async def read(self) -> _T: - try: - return await super().read() - finally: - if self._size < self._limit and self._protocol._reading_paused: - self._protocol.resume_reading() diff --git a/third_party/python/aiohttp/aiohttp/tcp_helpers.py b/third_party/python/aiohttp/aiohttp/tcp_helpers.py deleted file mode 100644 index 0e1dbf16552f..000000000000 --- a/third_party/python/aiohttp/aiohttp/tcp_helpers.py +++ /dev/null @@ -1,38 +0,0 @@ -"""Helper methods to tune a TCP connection""" - -import asyncio -import socket -from contextlib import suppress -from typing import Optional # noqa - -__all__ = ("tcp_keepalive", "tcp_nodelay") - - -if hasattr(socket, "SO_KEEPALIVE"): - - def tcp_keepalive(transport: asyncio.Transport) -> None: - sock = transport.get_extra_info("socket") - if sock is not None: - sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) - - -else: - - def tcp_keepalive(transport: asyncio.Transport) -> None: # pragma: no cover - pass - - -def tcp_nodelay(transport: asyncio.Transport, value: bool) -> None: - sock = transport.get_extra_info("socket") - - if sock is None: - return - - if sock.family not in (socket.AF_INET, socket.AF_INET6): - return - - value = bool(value) - - # socket may be closed already, on windows OSError get raised - with suppress(OSError): - sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, value) diff --git a/third_party/python/aiohttp/aiohttp/test_utils.py b/third_party/python/aiohttp/aiohttp/test_utils.py deleted file mode 100644 index 7a9ca7ddf3e7..000000000000 --- a/third_party/python/aiohttp/aiohttp/test_utils.py +++ /dev/null @@ -1,676 +0,0 @@ -"""Utilities shared by tests.""" - -import asyncio -import contextlib -import functools -import gc -import inspect -import os -import socket -import sys -import unittest -from abc import ABC, abstractmethod -from types import TracebackType -from typing import TYPE_CHECKING, Any, Callable, Iterator, List, Optional, Type, Union -from unittest import mock - -from multidict import CIMultiDict, CIMultiDictProxy -from yarl import URL - -import aiohttp -from aiohttp.client import ( - ClientResponse, - _RequestContextManager, - _WSRequestContextManager, -) - -from . import ClientSession, hdrs -from .abc import AbstractCookieJar -from .client_reqrep import ClientResponse -from .client_ws import ClientWebSocketResponse -from .helpers import sentinel -from .http import HttpVersion, RawRequestMessage -from .signals import Signal -from .web import ( - Application, - AppRunner, - BaseRunner, - Request, - Server, - ServerRunner, - SockSite, - UrlMappingMatchInfo, -) -from .web_protocol import _RequestHandler - -if TYPE_CHECKING: # pragma: no cover - from ssl import SSLContext -else: - SSLContext = None - - -REUSE_ADDRESS = os.name == "posix" and sys.platform != "cygwin" - - -def get_unused_port_socket(host: str) -> socket.socket: - return get_port_socket(host, 0) - - -def get_port_socket(host: str, port: int) -> socket.socket: - s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - if REUSE_ADDRESS: - # Windows has different semantics for SO_REUSEADDR, - # so don't set it. Ref: - # https://docs.microsoft.com/en-us/windows/win32/winsock/using-so-reuseaddr-and-so-exclusiveaddruse - s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - s.bind((host, port)) - return s - - -def unused_port() -> int: - """Return a port that is unused on the current host.""" - with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: - s.bind(("127.0.0.1", 0)) - return s.getsockname()[1] - - -class BaseTestServer(ABC): - __test__ = False - - def __init__( - self, - *, - scheme: Union[str, object] = sentinel, - loop: Optional[asyncio.AbstractEventLoop] = None, - host: str = "127.0.0.1", - port: Optional[int] = None, - skip_url_asserts: bool = False, - **kwargs: Any, - ) -> None: - self._loop = loop - self.runner = None # type: Optional[BaseRunner] - self._root = None # type: Optional[URL] - self.host = host - self.port = port - self._closed = False - self.scheme = scheme - self.skip_url_asserts = skip_url_asserts - - async def start_server( - self, loop: Optional[asyncio.AbstractEventLoop] = None, **kwargs: Any - ) -> None: - if self.runner: - return - self._loop = loop - self._ssl = kwargs.pop("ssl", None) - self.runner = await self._make_runner(**kwargs) - await self.runner.setup() - if not self.port: - self.port = 0 - _sock = get_port_socket(self.host, self.port) - self.host, self.port = _sock.getsockname()[:2] - site = SockSite(self.runner, sock=_sock, ssl_context=self._ssl) - await site.start() - server = site._server - assert server is not None - sockets = server.sockets - assert sockets is not None - self.port = sockets[0].getsockname()[1] - if self.scheme is sentinel: - if self._ssl: - scheme = "https" - else: - scheme = "http" - self.scheme = scheme - self._root = URL(f"{self.scheme}://{self.host}:{self.port}") - - @abstractmethod # pragma: no cover - async def _make_runner(self, **kwargs: Any) -> BaseRunner: - pass - - def make_url(self, path: str) -> URL: - assert self._root is not None - url = URL(path) - if not self.skip_url_asserts: - assert not url.is_absolute() - return self._root.join(url) - else: - return URL(str(self._root) + path) - - @property - def started(self) -> bool: - return self.runner is not None - - @property - def closed(self) -> bool: - return self._closed - - @property - def handler(self) -> Server: - # for backward compatibility - # web.Server instance - runner = self.runner - assert runner is not None - assert runner.server is not None - return runner.server - - async def close(self) -> None: - """Close all fixtures created by the test client. - - After that point, the TestClient is no longer usable. - - This is an idempotent function: running close multiple times - will not have any additional effects. - - close is also run when the object is garbage collected, and on - exit when used as a context manager. - - """ - if self.started and not self.closed: - assert self.runner is not None - await self.runner.cleanup() - self._root = None - self.port = None - self._closed = True - - def __enter__(self) -> None: - raise TypeError("Use async with instead") - - def __exit__( - self, - exc_type: Optional[Type[BaseException]], - exc_value: Optional[BaseException], - traceback: Optional[TracebackType], - ) -> None: - # __exit__ should exist in pair with __enter__ but never executed - pass # pragma: no cover - - async def __aenter__(self) -> "BaseTestServer": - await self.start_server(loop=self._loop) - return self - - async def __aexit__( - self, - exc_type: Optional[Type[BaseException]], - exc_value: Optional[BaseException], - traceback: Optional[TracebackType], - ) -> None: - await self.close() - - -class TestServer(BaseTestServer): - def __init__( - self, - app: Application, - *, - scheme: Union[str, object] = sentinel, - host: str = "127.0.0.1", - port: Optional[int] = None, - **kwargs: Any, - ): - self.app = app - super().__init__(scheme=scheme, host=host, port=port, **kwargs) - - async def _make_runner(self, **kwargs: Any) -> BaseRunner: - return AppRunner(self.app, **kwargs) - - -class RawTestServer(BaseTestServer): - def __init__( - self, - handler: _RequestHandler, - *, - scheme: Union[str, object] = sentinel, - host: str = "127.0.0.1", - port: Optional[int] = None, - **kwargs: Any, - ) -> None: - self._handler = handler - super().__init__(scheme=scheme, host=host, port=port, **kwargs) - - async def _make_runner(self, debug: bool = True, **kwargs: Any) -> ServerRunner: - srv = Server(self._handler, loop=self._loop, debug=debug, **kwargs) - return ServerRunner(srv, debug=debug, **kwargs) - - -class TestClient: - """ - A test client implementation. - - To write functional tests for aiohttp based servers. - - """ - - __test__ = False - - def __init__( - self, - server: BaseTestServer, - *, - cookie_jar: Optional[AbstractCookieJar] = None, - loop: Optional[asyncio.AbstractEventLoop] = None, - **kwargs: Any, - ) -> None: - if not isinstance(server, BaseTestServer): - raise TypeError( - "server must be TestServer " "instance, found type: %r" % type(server) - ) - self._server = server - self._loop = loop - if cookie_jar is None: - cookie_jar = aiohttp.CookieJar(unsafe=True, loop=loop) - self._session = ClientSession(loop=loop, cookie_jar=cookie_jar, **kwargs) - self._closed = False - self._responses = [] # type: List[ClientResponse] - self._websockets = [] # type: List[ClientWebSocketResponse] - - async def start_server(self) -> None: - await self._server.start_server(loop=self._loop) - - @property - def host(self) -> str: - return self._server.host - - @property - def port(self) -> Optional[int]: - return self._server.port - - @property - def server(self) -> BaseTestServer: - return self._server - - @property - def app(self) -> Application: - return getattr(self._server, "app", None) - - @property - def session(self) -> ClientSession: - """An internal aiohttp.ClientSession. - - Unlike the methods on the TestClient, client session requests - do not automatically include the host in the url queried, and - will require an absolute path to the resource. - - """ - return self._session - - def make_url(self, path: str) -> URL: - return self._server.make_url(path) - - async def _request(self, method: str, path: str, **kwargs: Any) -> ClientResponse: - resp = await self._session.request(method, self.make_url(path), **kwargs) - # save it to close later - self._responses.append(resp) - return resp - - def request(self, method: str, path: str, **kwargs: Any) -> _RequestContextManager: - """Routes a request to tested http server. - - The interface is identical to aiohttp.ClientSession.request, - except the loop kwarg is overridden by the instance used by the - test server. - - """ - return _RequestContextManager(self._request(method, path, **kwargs)) - - def get(self, path: str, **kwargs: Any) -> _RequestContextManager: - """Perform an HTTP GET request.""" - return _RequestContextManager(self._request(hdrs.METH_GET, path, **kwargs)) - - def post(self, path: str, **kwargs: Any) -> _RequestContextManager: - """Perform an HTTP POST request.""" - return _RequestContextManager(self._request(hdrs.METH_POST, path, **kwargs)) - - def options(self, path: str, **kwargs: Any) -> _RequestContextManager: - """Perform an HTTP OPTIONS request.""" - return _RequestContextManager(self._request(hdrs.METH_OPTIONS, path, **kwargs)) - - def head(self, path: str, **kwargs: Any) -> _RequestContextManager: - """Perform an HTTP HEAD request.""" - return _RequestContextManager(self._request(hdrs.METH_HEAD, path, **kwargs)) - - def put(self, path: str, **kwargs: Any) -> _RequestContextManager: - """Perform an HTTP PUT request.""" - return _RequestContextManager(self._request(hdrs.METH_PUT, path, **kwargs)) - - def patch(self, path: str, **kwargs: Any) -> _RequestContextManager: - """Perform an HTTP PATCH request.""" - return _RequestContextManager(self._request(hdrs.METH_PATCH, path, **kwargs)) - - def delete(self, path: str, **kwargs: Any) -> _RequestContextManager: - """Perform an HTTP PATCH request.""" - return _RequestContextManager(self._request(hdrs.METH_DELETE, path, **kwargs)) - - def ws_connect(self, path: str, **kwargs: Any) -> _WSRequestContextManager: - """Initiate websocket connection. - - The api corresponds to aiohttp.ClientSession.ws_connect. - - """ - return _WSRequestContextManager(self._ws_connect(path, **kwargs)) - - async def _ws_connect(self, path: str, **kwargs: Any) -> ClientWebSocketResponse: - ws = await self._session.ws_connect(self.make_url(path), **kwargs) - self._websockets.append(ws) - return ws - - async def close(self) -> None: - """Close all fixtures created by the test client. - - After that point, the TestClient is no longer usable. - - This is an idempotent function: running close multiple times - will not have any additional effects. - - close is also run on exit when used as a(n) (asynchronous) - context manager. - - """ - if not self._closed: - for resp in self._responses: - resp.close() - for ws in self._websockets: - await ws.close() - await self._session.close() - await self._server.close() - self._closed = True - - def __enter__(self) -> None: - raise TypeError("Use async with instead") - - def __exit__( - self, - exc_type: Optional[Type[BaseException]], - exc: Optional[BaseException], - tb: Optional[TracebackType], - ) -> None: - # __exit__ should exist in pair with __enter__ but never executed - pass # pragma: no cover - - async def __aenter__(self) -> "TestClient": - await self.start_server() - return self - - async def __aexit__( - self, - exc_type: Optional[Type[BaseException]], - exc: Optional[BaseException], - tb: Optional[TracebackType], - ) -> None: - await self.close() - - -class AioHTTPTestCase(unittest.TestCase): - """A base class to allow for unittest web applications using - aiohttp. - - Provides the following: - - * self.client (aiohttp.test_utils.TestClient): an aiohttp test client. - * self.loop (asyncio.BaseEventLoop): the event loop in which the - application and server are running. - * self.app (aiohttp.web.Application): the application returned by - self.get_application() - - Note that the TestClient's methods are asynchronous: you have to - execute function on the test client using asynchronous methods. - """ - - async def get_application(self) -> Application: - """ - This method should be overridden - to return the aiohttp.web.Application - object to test. - - """ - return self.get_app() - - def get_app(self) -> Application: - """Obsolete method used to constructing web application. - - Use .get_application() coroutine instead - - """ - raise RuntimeError("Did you forget to define get_application()?") - - def setUp(self) -> None: - self.loop = setup_test_loop() - - self.app = self.loop.run_until_complete(self.get_application()) - self.server = self.loop.run_until_complete(self.get_server(self.app)) - self.client = self.loop.run_until_complete(self.get_client(self.server)) - - self.loop.run_until_complete(self.client.start_server()) - - self.loop.run_until_complete(self.setUpAsync()) - - async def setUpAsync(self) -> None: - pass - - def tearDown(self) -> None: - self.loop.run_until_complete(self.tearDownAsync()) - self.loop.run_until_complete(self.client.close()) - teardown_test_loop(self.loop) - - async def tearDownAsync(self) -> None: - pass - - async def get_server(self, app: Application) -> TestServer: - """Return a TestServer instance.""" - return TestServer(app, loop=self.loop) - - async def get_client(self, server: TestServer) -> TestClient: - """Return a TestClient instance.""" - return TestClient(server, loop=self.loop) - - -def unittest_run_loop(func: Any, *args: Any, **kwargs: Any) -> Any: - """A decorator dedicated to use with asynchronous methods of an - AioHTTPTestCase. - - Handles executing an asynchronous function, using - the self.loop of the AioHTTPTestCase. - """ - - @functools.wraps(func, *args, **kwargs) - def new_func(self: Any, *inner_args: Any, **inner_kwargs: Any) -> Any: - return self.loop.run_until_complete(func(self, *inner_args, **inner_kwargs)) - - return new_func - - -_LOOP_FACTORY = Callable[[], asyncio.AbstractEventLoop] - - -@contextlib.contextmanager -def loop_context( - loop_factory: _LOOP_FACTORY = asyncio.new_event_loop, fast: bool = False -) -> Iterator[asyncio.AbstractEventLoop]: - """A contextmanager that creates an event_loop, for test purposes. - - Handles the creation and cleanup of a test loop. - """ - loop = setup_test_loop(loop_factory) - yield loop - teardown_test_loop(loop, fast=fast) - - -def setup_test_loop( - loop_factory: _LOOP_FACTORY = asyncio.new_event_loop, -) -> asyncio.AbstractEventLoop: - """Create and return an asyncio.BaseEventLoop - instance. - - The caller should also call teardown_test_loop, - once they are done with the loop. - """ - loop = loop_factory() - try: - module = loop.__class__.__module__ - skip_watcher = "uvloop" in module - except AttributeError: # pragma: no cover - # Just in case - skip_watcher = True - asyncio.set_event_loop(loop) - if sys.platform != "win32" and not skip_watcher: - policy = asyncio.get_event_loop_policy() - watcher = asyncio.SafeChildWatcher() - watcher.attach_loop(loop) - with contextlib.suppress(NotImplementedError): - policy.set_child_watcher(watcher) - return loop - - -def teardown_test_loop(loop: asyncio.AbstractEventLoop, fast: bool = False) -> None: - """Teardown and cleanup an event_loop created - by setup_test_loop. - - """ - closed = loop.is_closed() - if not closed: - loop.call_soon(loop.stop) - loop.run_forever() - loop.close() - - if not fast: - gc.collect() - - asyncio.set_event_loop(None) - - -def _create_app_mock() -> mock.MagicMock: - def get_dict(app: Any, key: str) -> Any: - return app.__app_dict[key] - - def set_dict(app: Any, key: str, value: Any) -> None: - app.__app_dict[key] = value - - app = mock.MagicMock() - app.__app_dict = {} - app.__getitem__ = get_dict - app.__setitem__ = set_dict - - app._debug = False - app.on_response_prepare = Signal(app) - app.on_response_prepare.freeze() - return app - - -def _create_transport(sslcontext: Optional[SSLContext] = None) -> mock.Mock: - transport = mock.Mock() - - def get_extra_info(key: str) -> Optional[SSLContext]: - if key == "sslcontext": - return sslcontext - else: - return None - - transport.get_extra_info.side_effect = get_extra_info - return transport - - -def make_mocked_request( - method: str, - path: str, - headers: Any = None, - *, - match_info: Any = sentinel, - version: HttpVersion = HttpVersion(1, 1), - closing: bool = False, - app: Any = None, - writer: Any = sentinel, - protocol: Any = sentinel, - transport: Any = sentinel, - payload: Any = sentinel, - sslcontext: Optional[SSLContext] = None, - client_max_size: int = 1024 ** 2, - loop: Any = ..., -) -> Request: - """Creates mocked web.Request testing purposes. - - Useful in unit tests, when spinning full web server is overkill or - specific conditions and errors are hard to trigger. - - """ - - task = mock.Mock() - if loop is ...: - loop = mock.Mock() - loop.create_future.return_value = () - - if version < HttpVersion(1, 1): - closing = True - - if headers: - headers = CIMultiDictProxy(CIMultiDict(headers)) - raw_hdrs = tuple( - (k.encode("utf-8"), v.encode("utf-8")) for k, v in headers.items() - ) - else: - headers = CIMultiDictProxy(CIMultiDict()) - raw_hdrs = () - - chunked = "chunked" in headers.get(hdrs.TRANSFER_ENCODING, "").lower() - - message = RawRequestMessage( - method, - path, - version, - headers, - raw_hdrs, - closing, - False, - False, - chunked, - URL(path), - ) - if app is None: - app = _create_app_mock() - - if transport is sentinel: - transport = _create_transport(sslcontext) - - if protocol is sentinel: - protocol = mock.Mock() - protocol.transport = transport - - if writer is sentinel: - writer = mock.Mock() - writer.write_headers = make_mocked_coro(None) - writer.write = make_mocked_coro(None) - writer.write_eof = make_mocked_coro(None) - writer.drain = make_mocked_coro(None) - writer.transport = transport - - protocol.transport = transport - protocol.writer = writer - - if payload is sentinel: - payload = mock.Mock() - - req = Request( - message, payload, protocol, writer, task, loop, client_max_size=client_max_size - ) - - match_info = UrlMappingMatchInfo( - {} if match_info is sentinel else match_info, mock.Mock() - ) - match_info.add_app(app) - req._match_info = match_info - - return req - - -def make_mocked_coro( - return_value: Any = sentinel, raise_exception: Any = sentinel -) -> Any: - """Creates a coroutine mock.""" - - async def mock_coro(*args: Any, **kwargs: Any) -> Any: - if raise_exception is not sentinel: - raise raise_exception - if not inspect.isawaitable(return_value): - return return_value - await return_value - - return mock.Mock(wraps=mock_coro) diff --git a/third_party/python/aiohttp/aiohttp/tracing.py b/third_party/python/aiohttp/aiohttp/tracing.py deleted file mode 100644 index 7ae7948f9ac1..000000000000 --- a/third_party/python/aiohttp/aiohttp/tracing.py +++ /dev/null @@ -1,442 +0,0 @@ -from types import SimpleNamespace -from typing import TYPE_CHECKING, Awaitable, Optional, Type, TypeVar - -import attr -from multidict import CIMultiDict -from yarl import URL - -from .client_reqrep import ClientResponse -from .signals import Signal - -if TYPE_CHECKING: # pragma: no cover - from typing_extensions import Protocol - - from .client import ClientSession - - _ParamT_contra = TypeVar("_ParamT_contra", contravariant=True) - - class _SignalCallback(Protocol[_ParamT_contra]): - def __call__( - self, - __client_session: ClientSession, - __trace_config_ctx: SimpleNamespace, - __params: _ParamT_contra, - ) -> Awaitable[None]: - ... - - -__all__ = ( - "TraceConfig", - "TraceRequestStartParams", - "TraceRequestEndParams", - "TraceRequestExceptionParams", - "TraceConnectionQueuedStartParams", - "TraceConnectionQueuedEndParams", - "TraceConnectionCreateStartParams", - "TraceConnectionCreateEndParams", - "TraceConnectionReuseconnParams", - "TraceDnsResolveHostStartParams", - "TraceDnsResolveHostEndParams", - "TraceDnsCacheHitParams", - "TraceDnsCacheMissParams", - "TraceRequestRedirectParams", - "TraceRequestChunkSentParams", - "TraceResponseChunkReceivedParams", -) - - -class TraceConfig: - """First-class used to trace requests launched via ClientSession - objects.""" - - def __init__( - self, trace_config_ctx_factory: Type[SimpleNamespace] = SimpleNamespace - ) -> None: - self._on_request_start = Signal( - self - ) # type: Signal[_SignalCallback[TraceRequestStartParams]] - self._on_request_chunk_sent = Signal( - self - ) # type: Signal[_SignalCallback[TraceRequestChunkSentParams]] - self._on_response_chunk_received = Signal( - self - ) # type: Signal[_SignalCallback[TraceResponseChunkReceivedParams]] - self._on_request_end = Signal( - self - ) # type: Signal[_SignalCallback[TraceRequestEndParams]] - self._on_request_exception = Signal( - self - ) # type: Signal[_SignalCallback[TraceRequestExceptionParams]] - self._on_request_redirect = Signal( - self - ) # type: Signal[_SignalCallback[TraceRequestRedirectParams]] - self._on_connection_queued_start = Signal( - self - ) # type: Signal[_SignalCallback[TraceConnectionQueuedStartParams]] - self._on_connection_queued_end = Signal( - self - ) # type: Signal[_SignalCallback[TraceConnectionQueuedEndParams]] - self._on_connection_create_start = Signal( - self - ) # type: Signal[_SignalCallback[TraceConnectionCreateStartParams]] - self._on_connection_create_end = Signal( - self - ) # type: Signal[_SignalCallback[TraceConnectionCreateEndParams]] - self._on_connection_reuseconn = Signal( - self - ) # type: Signal[_SignalCallback[TraceConnectionReuseconnParams]] - self._on_dns_resolvehost_start = Signal( - self - ) # type: Signal[_SignalCallback[TraceDnsResolveHostStartParams]] - self._on_dns_resolvehost_end = Signal( - self - ) # type: Signal[_SignalCallback[TraceDnsResolveHostEndParams]] - self._on_dns_cache_hit = Signal( - self - ) # type: Signal[_SignalCallback[TraceDnsCacheHitParams]] - self._on_dns_cache_miss = Signal( - self - ) # type: Signal[_SignalCallback[TraceDnsCacheMissParams]] - - self._trace_config_ctx_factory = trace_config_ctx_factory - - def trace_config_ctx( - self, trace_request_ctx: Optional[SimpleNamespace] = None - ) -> SimpleNamespace: - """ Return a new trace_config_ctx instance """ - return self._trace_config_ctx_factory(trace_request_ctx=trace_request_ctx) - - def freeze(self) -> None: - self._on_request_start.freeze() - self._on_request_chunk_sent.freeze() - self._on_response_chunk_received.freeze() - self._on_request_end.freeze() - self._on_request_exception.freeze() - self._on_request_redirect.freeze() - self._on_connection_queued_start.freeze() - self._on_connection_queued_end.freeze() - self._on_connection_create_start.freeze() - self._on_connection_create_end.freeze() - self._on_connection_reuseconn.freeze() - self._on_dns_resolvehost_start.freeze() - self._on_dns_resolvehost_end.freeze() - self._on_dns_cache_hit.freeze() - self._on_dns_cache_miss.freeze() - - @property - def on_request_start(self) -> "Signal[_SignalCallback[TraceRequestStartParams]]": - return self._on_request_start - - @property - def on_request_chunk_sent( - self, - ) -> "Signal[_SignalCallback[TraceRequestChunkSentParams]]": - return self._on_request_chunk_sent - - @property - def on_response_chunk_received( - self, - ) -> "Signal[_SignalCallback[TraceResponseChunkReceivedParams]]": - return self._on_response_chunk_received - - @property - def on_request_end(self) -> "Signal[_SignalCallback[TraceRequestEndParams]]": - return self._on_request_end - - @property - def on_request_exception( - self, - ) -> "Signal[_SignalCallback[TraceRequestExceptionParams]]": - return self._on_request_exception - - @property - def on_request_redirect( - self, - ) -> "Signal[_SignalCallback[TraceRequestRedirectParams]]": - return self._on_request_redirect - - @property - def on_connection_queued_start( - self, - ) -> "Signal[_SignalCallback[TraceConnectionQueuedStartParams]]": - return self._on_connection_queued_start - - @property - def on_connection_queued_end( - self, - ) -> "Signal[_SignalCallback[TraceConnectionQueuedEndParams]]": - return self._on_connection_queued_end - - @property - def on_connection_create_start( - self, - ) -> "Signal[_SignalCallback[TraceConnectionCreateStartParams]]": - return self._on_connection_create_start - - @property - def on_connection_create_end( - self, - ) -> "Signal[_SignalCallback[TraceConnectionCreateEndParams]]": - return self._on_connection_create_end - - @property - def on_connection_reuseconn( - self, - ) -> "Signal[_SignalCallback[TraceConnectionReuseconnParams]]": - return self._on_connection_reuseconn - - @property - def on_dns_resolvehost_start( - self, - ) -> "Signal[_SignalCallback[TraceDnsResolveHostStartParams]]": - return self._on_dns_resolvehost_start - - @property - def on_dns_resolvehost_end( - self, - ) -> "Signal[_SignalCallback[TraceDnsResolveHostEndParams]]": - return self._on_dns_resolvehost_end - - @property - def on_dns_cache_hit(self) -> "Signal[_SignalCallback[TraceDnsCacheHitParams]]": - return self._on_dns_cache_hit - - @property - def on_dns_cache_miss(self) -> "Signal[_SignalCallback[TraceDnsCacheMissParams]]": - return self._on_dns_cache_miss - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class TraceRequestStartParams: - """ Parameters sent by the `on_request_start` signal""" - - method: str - url: URL - headers: "CIMultiDict[str]" - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class TraceRequestChunkSentParams: - """ Parameters sent by the `on_request_chunk_sent` signal""" - - method: str - url: URL - chunk: bytes - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class TraceResponseChunkReceivedParams: - """ Parameters sent by the `on_response_chunk_received` signal""" - - method: str - url: URL - chunk: bytes - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class TraceRequestEndParams: - """ Parameters sent by the `on_request_end` signal""" - - method: str - url: URL - headers: "CIMultiDict[str]" - response: ClientResponse - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class TraceRequestExceptionParams: - """ Parameters sent by the `on_request_exception` signal""" - - method: str - url: URL - headers: "CIMultiDict[str]" - exception: BaseException - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class TraceRequestRedirectParams: - """ Parameters sent by the `on_request_redirect` signal""" - - method: str - url: URL - headers: "CIMultiDict[str]" - response: ClientResponse - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class TraceConnectionQueuedStartParams: - """ Parameters sent by the `on_connection_queued_start` signal""" - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class TraceConnectionQueuedEndParams: - """ Parameters sent by the `on_connection_queued_end` signal""" - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class TraceConnectionCreateStartParams: - """ Parameters sent by the `on_connection_create_start` signal""" - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class TraceConnectionCreateEndParams: - """ Parameters sent by the `on_connection_create_end` signal""" - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class TraceConnectionReuseconnParams: - """ Parameters sent by the `on_connection_reuseconn` signal""" - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class TraceDnsResolveHostStartParams: - """ Parameters sent by the `on_dns_resolvehost_start` signal""" - - host: str - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class TraceDnsResolveHostEndParams: - """ Parameters sent by the `on_dns_resolvehost_end` signal""" - - host: str - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class TraceDnsCacheHitParams: - """ Parameters sent by the `on_dns_cache_hit` signal""" - - host: str - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class TraceDnsCacheMissParams: - """ Parameters sent by the `on_dns_cache_miss` signal""" - - host: str - - -class Trace: - """Internal class used to keep together the main dependencies used - at the moment of send a signal.""" - - def __init__( - self, - session: "ClientSession", - trace_config: TraceConfig, - trace_config_ctx: SimpleNamespace, - ) -> None: - self._trace_config = trace_config - self._trace_config_ctx = trace_config_ctx - self._session = session - - async def send_request_start( - self, method: str, url: URL, headers: "CIMultiDict[str]" - ) -> None: - return await self._trace_config.on_request_start.send( - self._session, - self._trace_config_ctx, - TraceRequestStartParams(method, url, headers), - ) - - async def send_request_chunk_sent( - self, method: str, url: URL, chunk: bytes - ) -> None: - return await self._trace_config.on_request_chunk_sent.send( - self._session, - self._trace_config_ctx, - TraceRequestChunkSentParams(method, url, chunk), - ) - - async def send_response_chunk_received( - self, method: str, url: URL, chunk: bytes - ) -> None: - return await self._trace_config.on_response_chunk_received.send( - self._session, - self._trace_config_ctx, - TraceResponseChunkReceivedParams(method, url, chunk), - ) - - async def send_request_end( - self, - method: str, - url: URL, - headers: "CIMultiDict[str]", - response: ClientResponse, - ) -> None: - return await self._trace_config.on_request_end.send( - self._session, - self._trace_config_ctx, - TraceRequestEndParams(method, url, headers, response), - ) - - async def send_request_exception( - self, - method: str, - url: URL, - headers: "CIMultiDict[str]", - exception: BaseException, - ) -> None: - return await self._trace_config.on_request_exception.send( - self._session, - self._trace_config_ctx, - TraceRequestExceptionParams(method, url, headers, exception), - ) - - async def send_request_redirect( - self, - method: str, - url: URL, - headers: "CIMultiDict[str]", - response: ClientResponse, - ) -> None: - return await self._trace_config._on_request_redirect.send( - self._session, - self._trace_config_ctx, - TraceRequestRedirectParams(method, url, headers, response), - ) - - async def send_connection_queued_start(self) -> None: - return await self._trace_config.on_connection_queued_start.send( - self._session, self._trace_config_ctx, TraceConnectionQueuedStartParams() - ) - - async def send_connection_queued_end(self) -> None: - return await self._trace_config.on_connection_queued_end.send( - self._session, self._trace_config_ctx, TraceConnectionQueuedEndParams() - ) - - async def send_connection_create_start(self) -> None: - return await self._trace_config.on_connection_create_start.send( - self._session, self._trace_config_ctx, TraceConnectionCreateStartParams() - ) - - async def send_connection_create_end(self) -> None: - return await self._trace_config.on_connection_create_end.send( - self._session, self._trace_config_ctx, TraceConnectionCreateEndParams() - ) - - async def send_connection_reuseconn(self) -> None: - return await self._trace_config.on_connection_reuseconn.send( - self._session, self._trace_config_ctx, TraceConnectionReuseconnParams() - ) - - async def send_dns_resolvehost_start(self, host: str) -> None: - return await self._trace_config.on_dns_resolvehost_start.send( - self._session, self._trace_config_ctx, TraceDnsResolveHostStartParams(host) - ) - - async def send_dns_resolvehost_end(self, host: str) -> None: - return await self._trace_config.on_dns_resolvehost_end.send( - self._session, self._trace_config_ctx, TraceDnsResolveHostEndParams(host) - ) - - async def send_dns_cache_hit(self, host: str) -> None: - return await self._trace_config.on_dns_cache_hit.send( - self._session, self._trace_config_ctx, TraceDnsCacheHitParams(host) - ) - - async def send_dns_cache_miss(self, host: str) -> None: - return await self._trace_config.on_dns_cache_miss.send( - self._session, self._trace_config_ctx, TraceDnsCacheMissParams(host) - ) diff --git a/third_party/python/aiohttp/aiohttp/typedefs.py b/third_party/python/aiohttp/aiohttp/typedefs.py deleted file mode 100644 index 1b68a242af50..000000000000 --- a/third_party/python/aiohttp/aiohttp/typedefs.py +++ /dev/null @@ -1,46 +0,0 @@ -import json -import os -import pathlib -import sys -from typing import TYPE_CHECKING, Any, Callable, Iterable, Mapping, Tuple, Union - -from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy, istr -from yarl import URL - -DEFAULT_JSON_ENCODER = json.dumps -DEFAULT_JSON_DECODER = json.loads - -if TYPE_CHECKING: # pragma: no cover - _CIMultiDict = CIMultiDict[str] - _CIMultiDictProxy = CIMultiDictProxy[str] - _MultiDict = MultiDict[str] - _MultiDictProxy = MultiDictProxy[str] - from http.cookies import BaseCookie, Morsel -else: - _CIMultiDict = CIMultiDict - _CIMultiDictProxy = CIMultiDictProxy - _MultiDict = MultiDict - _MultiDictProxy = MultiDictProxy - -Byteish = Union[bytes, bytearray, memoryview] -JSONEncoder = Callable[[Any], str] -JSONDecoder = Callable[[str], Any] -LooseHeaders = Union[Mapping[Union[str, istr], str], _CIMultiDict, _CIMultiDictProxy] -RawHeaders = Tuple[Tuple[bytes, bytes], ...] -StrOrURL = Union[str, URL] - -LooseCookiesMappings = Mapping[str, Union[str, "BaseCookie[str]", "Morsel[Any]"]] -LooseCookiesIterables = Iterable[ - Tuple[str, Union[str, "BaseCookie[str]", "Morsel[Any]"]] -] -LooseCookies = Union[ - LooseCookiesMappings, - LooseCookiesIterables, - "BaseCookie[str]", -] - - -if sys.version_info >= (3, 6): - PathLike = Union[str, "os.PathLike[str]"] -else: - PathLike = Union[str, pathlib.PurePath] diff --git a/third_party/python/aiohttp/aiohttp/web.py b/third_party/python/aiohttp/aiohttp/web.py deleted file mode 100644 index 557e3c3b4d00..000000000000 --- a/third_party/python/aiohttp/aiohttp/web.py +++ /dev/null @@ -1,581 +0,0 @@ -import asyncio -import logging -import socket -import sys -from argparse import ArgumentParser -from collections.abc import Iterable -from importlib import import_module -from typing import ( - Any as Any, - Awaitable as Awaitable, - Callable as Callable, - Iterable as TypingIterable, - List as List, - Optional as Optional, - Set as Set, - Type as Type, - Union as Union, - cast as cast, -) - -from .abc import AbstractAccessLogger -from .helpers import all_tasks -from .log import access_logger -from .web_app import Application as Application, CleanupError as CleanupError -from .web_exceptions import ( - HTTPAccepted as HTTPAccepted, - HTTPBadGateway as HTTPBadGateway, - HTTPBadRequest as HTTPBadRequest, - HTTPClientError as HTTPClientError, - HTTPConflict as HTTPConflict, - HTTPCreated as HTTPCreated, - HTTPError as HTTPError, - HTTPException as HTTPException, - HTTPExpectationFailed as HTTPExpectationFailed, - HTTPFailedDependency as HTTPFailedDependency, - HTTPForbidden as HTTPForbidden, - HTTPFound as HTTPFound, - HTTPGatewayTimeout as HTTPGatewayTimeout, - HTTPGone as HTTPGone, - HTTPInsufficientStorage as HTTPInsufficientStorage, - HTTPInternalServerError as HTTPInternalServerError, - HTTPLengthRequired as HTTPLengthRequired, - HTTPMethodNotAllowed as HTTPMethodNotAllowed, - HTTPMisdirectedRequest as HTTPMisdirectedRequest, - HTTPMovedPermanently as HTTPMovedPermanently, - HTTPMultipleChoices as HTTPMultipleChoices, - HTTPNetworkAuthenticationRequired as HTTPNetworkAuthenticationRequired, - HTTPNoContent as HTTPNoContent, - HTTPNonAuthoritativeInformation as HTTPNonAuthoritativeInformation, - HTTPNotAcceptable as HTTPNotAcceptable, - HTTPNotExtended as HTTPNotExtended, - HTTPNotFound as HTTPNotFound, - HTTPNotImplemented as HTTPNotImplemented, - HTTPNotModified as HTTPNotModified, - HTTPOk as HTTPOk, - HTTPPartialContent as HTTPPartialContent, - HTTPPaymentRequired as HTTPPaymentRequired, - HTTPPermanentRedirect as HTTPPermanentRedirect, - HTTPPreconditionFailed as HTTPPreconditionFailed, - HTTPPreconditionRequired as HTTPPreconditionRequired, - HTTPProxyAuthenticationRequired as HTTPProxyAuthenticationRequired, - HTTPRedirection as HTTPRedirection, - HTTPRequestEntityTooLarge as HTTPRequestEntityTooLarge, - HTTPRequestHeaderFieldsTooLarge as HTTPRequestHeaderFieldsTooLarge, - HTTPRequestRangeNotSatisfiable as HTTPRequestRangeNotSatisfiable, - HTTPRequestTimeout as HTTPRequestTimeout, - HTTPRequestURITooLong as HTTPRequestURITooLong, - HTTPResetContent as HTTPResetContent, - HTTPSeeOther as HTTPSeeOther, - HTTPServerError as HTTPServerError, - HTTPServiceUnavailable as HTTPServiceUnavailable, - HTTPSuccessful as HTTPSuccessful, - HTTPTemporaryRedirect as HTTPTemporaryRedirect, - HTTPTooManyRequests as HTTPTooManyRequests, - HTTPUnauthorized as HTTPUnauthorized, - HTTPUnavailableForLegalReasons as HTTPUnavailableForLegalReasons, - HTTPUnprocessableEntity as HTTPUnprocessableEntity, - HTTPUnsupportedMediaType as HTTPUnsupportedMediaType, - HTTPUpgradeRequired as HTTPUpgradeRequired, - HTTPUseProxy as HTTPUseProxy, - HTTPVariantAlsoNegotiates as HTTPVariantAlsoNegotiates, - HTTPVersionNotSupported as HTTPVersionNotSupported, -) -from .web_fileresponse import FileResponse as FileResponse -from .web_log import AccessLogger -from .web_middlewares import ( - middleware as middleware, - normalize_path_middleware as normalize_path_middleware, -) -from .web_protocol import ( - PayloadAccessError as PayloadAccessError, - RequestHandler as RequestHandler, - RequestPayloadError as RequestPayloadError, -) -from .web_request import ( - BaseRequest as BaseRequest, - FileField as FileField, - Request as Request, -) -from .web_response import ( - ContentCoding as ContentCoding, - Response as Response, - StreamResponse as StreamResponse, - json_response as json_response, -) -from .web_routedef import ( - AbstractRouteDef as AbstractRouteDef, - RouteDef as RouteDef, - RouteTableDef as RouteTableDef, - StaticDef as StaticDef, - delete as delete, - get as get, - head as head, - options as options, - patch as patch, - post as post, - put as put, - route as route, - static as static, - view as view, -) -from .web_runner import ( - AppRunner as AppRunner, - BaseRunner as BaseRunner, - BaseSite as BaseSite, - GracefulExit as GracefulExit, - NamedPipeSite as NamedPipeSite, - ServerRunner as ServerRunner, - SockSite as SockSite, - TCPSite as TCPSite, - UnixSite as UnixSite, -) -from .web_server import Server as Server -from .web_urldispatcher import ( - AbstractResource as AbstractResource, - AbstractRoute as AbstractRoute, - DynamicResource as DynamicResource, - PlainResource as PlainResource, - Resource as Resource, - ResourceRoute as ResourceRoute, - StaticResource as StaticResource, - UrlDispatcher as UrlDispatcher, - UrlMappingMatchInfo as UrlMappingMatchInfo, - View as View, -) -from .web_ws import ( - WebSocketReady as WebSocketReady, - WebSocketResponse as WebSocketResponse, - WSMsgType as WSMsgType, -) - -__all__ = ( - # web_app - "Application", - "CleanupError", - # web_exceptions - "HTTPAccepted", - "HTTPBadGateway", - "HTTPBadRequest", - "HTTPClientError", - "HTTPConflict", - "HTTPCreated", - "HTTPError", - "HTTPException", - "HTTPExpectationFailed", - "HTTPFailedDependency", - "HTTPForbidden", - "HTTPFound", - "HTTPGatewayTimeout", - "HTTPGone", - "HTTPInsufficientStorage", - "HTTPInternalServerError", - "HTTPLengthRequired", - "HTTPMethodNotAllowed", - "HTTPMisdirectedRequest", - "HTTPMovedPermanently", - "HTTPMultipleChoices", - "HTTPNetworkAuthenticationRequired", - "HTTPNoContent", - "HTTPNonAuthoritativeInformation", - "HTTPNotAcceptable", - "HTTPNotExtended", - "HTTPNotFound", - "HTTPNotImplemented", - "HTTPNotModified", - "HTTPOk", - "HTTPPartialContent", - "HTTPPaymentRequired", - "HTTPPermanentRedirect", - "HTTPPreconditionFailed", - "HTTPPreconditionRequired", - "HTTPProxyAuthenticationRequired", - "HTTPRedirection", - "HTTPRequestEntityTooLarge", - "HTTPRequestHeaderFieldsTooLarge", - "HTTPRequestRangeNotSatisfiable", - "HTTPRequestTimeout", - "HTTPRequestURITooLong", - "HTTPResetContent", - "HTTPSeeOther", - "HTTPServerError", - "HTTPServiceUnavailable", - "HTTPSuccessful", - "HTTPTemporaryRedirect", - "HTTPTooManyRequests", - "HTTPUnauthorized", - "HTTPUnavailableForLegalReasons", - "HTTPUnprocessableEntity", - "HTTPUnsupportedMediaType", - "HTTPUpgradeRequired", - "HTTPUseProxy", - "HTTPVariantAlsoNegotiates", - "HTTPVersionNotSupported", - # web_fileresponse - "FileResponse", - # web_middlewares - "middleware", - "normalize_path_middleware", - # web_protocol - "PayloadAccessError", - "RequestHandler", - "RequestPayloadError", - # web_request - "BaseRequest", - "FileField", - "Request", - # web_response - "ContentCoding", - "Response", - "StreamResponse", - "json_response", - # web_routedef - "AbstractRouteDef", - "RouteDef", - "RouteTableDef", - "StaticDef", - "delete", - "get", - "head", - "options", - "patch", - "post", - "put", - "route", - "static", - "view", - # web_runner - "AppRunner", - "BaseRunner", - "BaseSite", - "GracefulExit", - "ServerRunner", - "SockSite", - "TCPSite", - "UnixSite", - "NamedPipeSite", - # web_server - "Server", - # web_urldispatcher - "AbstractResource", - "AbstractRoute", - "DynamicResource", - "PlainResource", - "Resource", - "ResourceRoute", - "StaticResource", - "UrlDispatcher", - "UrlMappingMatchInfo", - "View", - # web_ws - "WebSocketReady", - "WebSocketResponse", - "WSMsgType", - # web - "run_app", -) - - -try: - from ssl import SSLContext -except ImportError: # pragma: no cover - SSLContext = Any # type: ignore - -HostSequence = TypingIterable[str] - - -async def _run_app( - app: Union[Application, Awaitable[Application]], - *, - host: Optional[Union[str, HostSequence]] = None, - port: Optional[int] = None, - path: Optional[str] = None, - sock: Optional[socket.socket] = None, - shutdown_timeout: float = 60.0, - ssl_context: Optional[SSLContext] = None, - print: Callable[..., None] = print, - backlog: int = 128, - access_log_class: Type[AbstractAccessLogger] = AccessLogger, - access_log_format: str = AccessLogger.LOG_FORMAT, - access_log: Optional[logging.Logger] = access_logger, - handle_signals: bool = True, - reuse_address: Optional[bool] = None, - reuse_port: Optional[bool] = None, -) -> None: - # A internal functio to actually do all dirty job for application running - if asyncio.iscoroutine(app): - app = await app # type: ignore - - app = cast(Application, app) - - runner = AppRunner( - app, - handle_signals=handle_signals, - access_log_class=access_log_class, - access_log_format=access_log_format, - access_log=access_log, - ) - - await runner.setup() - - sites = [] # type: List[BaseSite] - - try: - if host is not None: - if isinstance(host, (str, bytes, bytearray, memoryview)): - sites.append( - TCPSite( - runner, - host, - port, - shutdown_timeout=shutdown_timeout, - ssl_context=ssl_context, - backlog=backlog, - reuse_address=reuse_address, - reuse_port=reuse_port, - ) - ) - else: - for h in host: - sites.append( - TCPSite( - runner, - h, - port, - shutdown_timeout=shutdown_timeout, - ssl_context=ssl_context, - backlog=backlog, - reuse_address=reuse_address, - reuse_port=reuse_port, - ) - ) - elif path is None and sock is None or port is not None: - sites.append( - TCPSite( - runner, - port=port, - shutdown_timeout=shutdown_timeout, - ssl_context=ssl_context, - backlog=backlog, - reuse_address=reuse_address, - reuse_port=reuse_port, - ) - ) - - if path is not None: - if isinstance(path, (str, bytes, bytearray, memoryview)): - sites.append( - UnixSite( - runner, - path, - shutdown_timeout=shutdown_timeout, - ssl_context=ssl_context, - backlog=backlog, - ) - ) - else: - for p in path: - sites.append( - UnixSite( - runner, - p, - shutdown_timeout=shutdown_timeout, - ssl_context=ssl_context, - backlog=backlog, - ) - ) - - if sock is not None: - if not isinstance(sock, Iterable): - sites.append( - SockSite( - runner, - sock, - shutdown_timeout=shutdown_timeout, - ssl_context=ssl_context, - backlog=backlog, - ) - ) - else: - for s in sock: - sites.append( - SockSite( - runner, - s, - shutdown_timeout=shutdown_timeout, - ssl_context=ssl_context, - backlog=backlog, - ) - ) - for site in sites: - await site.start() - - if print: # pragma: no branch - names = sorted(str(s.name) for s in runner.sites) - print( - "======== Running on {} ========\n" - "(Press CTRL+C to quit)".format(", ".join(names)) - ) - - # sleep forever by 1 hour intervals, - # on Windows before Python 3.8 wake up every 1 second to handle - # Ctrl+C smoothly - if sys.platform == "win32" and sys.version_info < (3, 8): - delay = 1 - else: - delay = 3600 - - while True: - await asyncio.sleep(delay) - finally: - await runner.cleanup() - - -def _cancel_tasks( - to_cancel: Set["asyncio.Task[Any]"], loop: asyncio.AbstractEventLoop -) -> None: - if not to_cancel: - return - - for task in to_cancel: - task.cancel() - - loop.run_until_complete( - asyncio.gather(*to_cancel, loop=loop, return_exceptions=True) - ) - - for task in to_cancel: - if task.cancelled(): - continue - if task.exception() is not None: - loop.call_exception_handler( - { - "message": "unhandled exception during asyncio.run() shutdown", - "exception": task.exception(), - "task": task, - } - ) - - -def run_app( - app: Union[Application, Awaitable[Application]], - *, - host: Optional[Union[str, HostSequence]] = None, - port: Optional[int] = None, - path: Optional[str] = None, - sock: Optional[socket.socket] = None, - shutdown_timeout: float = 60.0, - ssl_context: Optional[SSLContext] = None, - print: Callable[..., None] = print, - backlog: int = 128, - access_log_class: Type[AbstractAccessLogger] = AccessLogger, - access_log_format: str = AccessLogger.LOG_FORMAT, - access_log: Optional[logging.Logger] = access_logger, - handle_signals: bool = True, - reuse_address: Optional[bool] = None, - reuse_port: Optional[bool] = None, -) -> None: - """Run an app locally""" - loop = asyncio.get_event_loop() - - # Configure if and only if in debugging mode and using the default logger - if loop.get_debug() and access_log and access_log.name == "aiohttp.access": - if access_log.level == logging.NOTSET: - access_log.setLevel(logging.DEBUG) - if not access_log.hasHandlers(): - access_log.addHandler(logging.StreamHandler()) - - try: - main_task = loop.create_task( - _run_app( - app, - host=host, - port=port, - path=path, - sock=sock, - shutdown_timeout=shutdown_timeout, - ssl_context=ssl_context, - print=print, - backlog=backlog, - access_log_class=access_log_class, - access_log_format=access_log_format, - access_log=access_log, - handle_signals=handle_signals, - reuse_address=reuse_address, - reuse_port=reuse_port, - ) - ) - loop.run_until_complete(main_task) - except (GracefulExit, KeyboardInterrupt): # pragma: no cover - pass - finally: - _cancel_tasks({main_task}, loop) - _cancel_tasks(all_tasks(loop), loop) - if sys.version_info >= (3, 6): # don't use PY_36 to pass mypy - loop.run_until_complete(loop.shutdown_asyncgens()) - loop.close() - - -def main(argv: List[str]) -> None: - arg_parser = ArgumentParser( - description="aiohttp.web Application server", prog="aiohttp.web" - ) - arg_parser.add_argument( - "entry_func", - help=( - "Callable returning the `aiohttp.web.Application` instance to " - "run. Should be specified in the 'module:function' syntax." - ), - metavar="entry-func", - ) - arg_parser.add_argument( - "-H", - "--hostname", - help="TCP/IP hostname to serve on (default: %(default)r)", - default="localhost", - ) - arg_parser.add_argument( - "-P", - "--port", - help="TCP/IP port to serve on (default: %(default)r)", - type=int, - default="8080", - ) - arg_parser.add_argument( - "-U", - "--path", - help="Unix file system path to serve on. Specifying a path will cause " - "hostname and port arguments to be ignored.", - ) - args, extra_argv = arg_parser.parse_known_args(argv) - - # Import logic - mod_str, _, func_str = args.entry_func.partition(":") - if not func_str or not mod_str: - arg_parser.error("'entry-func' not in 'module:function' syntax") - if mod_str.startswith("."): - arg_parser.error("relative module names not supported") - try: - module = import_module(mod_str) - except ImportError as ex: - arg_parser.error(f"unable to import {mod_str}: {ex}") - try: - func = getattr(module, func_str) - except AttributeError: - arg_parser.error(f"module {mod_str!r} has no attribute {func_str!r}") - - # Compatibility logic - if args.path is not None and not hasattr(socket, "AF_UNIX"): - arg_parser.error( - "file system paths not supported by your operating" " environment" - ) - - logging.basicConfig(level=logging.DEBUG) - - app = func(extra_argv) - run_app(app, host=args.hostname, port=args.port, path=args.path) - arg_parser.exit(message="Stopped\n") - - -if __name__ == "__main__": # pragma: no branch - main(sys.argv[1:]) # pragma: no cover diff --git a/third_party/python/aiohttp/aiohttp/web_app.py b/third_party/python/aiohttp/aiohttp/web_app.py deleted file mode 100644 index 14f2937ae55a..000000000000 --- a/third_party/python/aiohttp/aiohttp/web_app.py +++ /dev/null @@ -1,552 +0,0 @@ -import asyncio -import logging -import warnings -from functools import partial, update_wrapper -from typing import ( - TYPE_CHECKING, - Any, - AsyncIterator, - Awaitable, - Callable, - Dict, - Iterable, - Iterator, - List, - Mapping, - MutableMapping, - Optional, - Sequence, - Tuple, - Type, - Union, - cast, -) - -from . import hdrs -from .abc import ( - AbstractAccessLogger, - AbstractMatchInfo, - AbstractRouter, - AbstractStreamWriter, -) -from .frozenlist import FrozenList -from .helpers import DEBUG -from .http_parser import RawRequestMessage -from .log import web_logger -from .signals import Signal -from .streams import StreamReader -from .web_log import AccessLogger -from .web_middlewares import _fix_request_current_app -from .web_protocol import RequestHandler -from .web_request import Request -from .web_response import StreamResponse -from .web_routedef import AbstractRouteDef -from .web_server import Server -from .web_urldispatcher import ( - AbstractResource, - AbstractRoute, - Domain, - MaskDomain, - MatchedSubAppResource, - PrefixedSubAppResource, - UrlDispatcher, -) - -__all__ = ("Application", "CleanupError") - - -if TYPE_CHECKING: # pragma: no cover - _AppSignal = Signal[Callable[["Application"], Awaitable[None]]] - _RespPrepareSignal = Signal[Callable[[Request, StreamResponse], Awaitable[None]]] - _Handler = Callable[[Request], Awaitable[StreamResponse]] - _Middleware = Union[ - Callable[[Request, _Handler], Awaitable[StreamResponse]], - Callable[["Application", _Handler], Awaitable[_Handler]], # old-style - ] - _Middlewares = FrozenList[_Middleware] - _MiddlewaresHandlers = Optional[Sequence[Tuple[_Middleware, bool]]] - _Subapps = List["Application"] -else: - # No type checker mode, skip types - _AppSignal = Signal - _RespPrepareSignal = Signal - _Handler = Callable - _Middleware = Callable - _Middlewares = FrozenList - _MiddlewaresHandlers = Optional[Sequence] - _Subapps = List - - -class Application(MutableMapping[str, Any]): - ATTRS = frozenset( - [ - "logger", - "_debug", - "_router", - "_loop", - "_handler_args", - "_middlewares", - "_middlewares_handlers", - "_run_middlewares", - "_state", - "_frozen", - "_pre_frozen", - "_subapps", - "_on_response_prepare", - "_on_startup", - "_on_shutdown", - "_on_cleanup", - "_client_max_size", - "_cleanup_ctx", - ] - ) - - def __init__( - self, - *, - logger: logging.Logger = web_logger, - router: Optional[UrlDispatcher] = None, - middlewares: Iterable[_Middleware] = (), - handler_args: Optional[Mapping[str, Any]] = None, - client_max_size: int = 1024 ** 2, - loop: Optional[asyncio.AbstractEventLoop] = None, - debug: Any = ..., # mypy doesn't support ellipsis - ) -> None: - if router is None: - router = UrlDispatcher() - else: - warnings.warn( - "router argument is deprecated", DeprecationWarning, stacklevel=2 - ) - assert isinstance(router, AbstractRouter), router - - if loop is not None: - warnings.warn( - "loop argument is deprecated", DeprecationWarning, stacklevel=2 - ) - - if debug is not ...: - warnings.warn( - "debug argument is deprecated", DeprecationWarning, stacklevel=2 - ) - self._debug = debug - self._router = router # type: UrlDispatcher - self._loop = loop - self._handler_args = handler_args - self.logger = logger - - self._middlewares = FrozenList(middlewares) # type: _Middlewares - - # initialized on freezing - self._middlewares_handlers = None # type: _MiddlewaresHandlers - # initialized on freezing - self._run_middlewares = None # type: Optional[bool] - - self._state = {} # type: Dict[str, Any] - self._frozen = False - self._pre_frozen = False - self._subapps = [] # type: _Subapps - - self._on_response_prepare = Signal(self) # type: _RespPrepareSignal - self._on_startup = Signal(self) # type: _AppSignal - self._on_shutdown = Signal(self) # type: _AppSignal - self._on_cleanup = Signal(self) # type: _AppSignal - self._cleanup_ctx = CleanupContext() - self._on_startup.append(self._cleanup_ctx._on_startup) - self._on_cleanup.append(self._cleanup_ctx._on_cleanup) - self._client_max_size = client_max_size - - def __init_subclass__(cls: Type["Application"]) -> None: - warnings.warn( - "Inheritance class {} from web.Application " - "is discouraged".format(cls.__name__), - DeprecationWarning, - stacklevel=2, - ) - - if DEBUG: # pragma: no cover - - def __setattr__(self, name: str, val: Any) -> None: - if name not in self.ATTRS: - warnings.warn( - "Setting custom web.Application.{} attribute " - "is discouraged".format(name), - DeprecationWarning, - stacklevel=2, - ) - super().__setattr__(name, val) - - # MutableMapping API - - def __eq__(self, other: object) -> bool: - return self is other - - def __getitem__(self, key: str) -> Any: - return self._state[key] - - def _check_frozen(self) -> None: - if self._frozen: - warnings.warn( - "Changing state of started or joined " "application is deprecated", - DeprecationWarning, - stacklevel=3, - ) - - def __setitem__(self, key: str, value: Any) -> None: - self._check_frozen() - self._state[key] = value - - def __delitem__(self, key: str) -> None: - self._check_frozen() - del self._state[key] - - def __len__(self) -> int: - return len(self._state) - - def __iter__(self) -> Iterator[str]: - return iter(self._state) - - ######## - @property - def loop(self) -> asyncio.AbstractEventLoop: - # Technically the loop can be None - # but we mask it by explicit type cast - # to provide more convinient type annotation - warnings.warn("loop property is deprecated", DeprecationWarning, stacklevel=2) - return cast(asyncio.AbstractEventLoop, self._loop) - - def _set_loop(self, loop: Optional[asyncio.AbstractEventLoop]) -> None: - if loop is None: - loop = asyncio.get_event_loop() - if self._loop is not None and self._loop is not loop: - raise RuntimeError( - "web.Application instance initialized with different loop" - ) - - self._loop = loop - - # set loop debug - if self._debug is ...: - self._debug = loop.get_debug() - - # set loop to sub applications - for subapp in self._subapps: - subapp._set_loop(loop) - - @property - def pre_frozen(self) -> bool: - return self._pre_frozen - - def pre_freeze(self) -> None: - if self._pre_frozen: - return - - self._pre_frozen = True - self._middlewares.freeze() - self._router.freeze() - self._on_response_prepare.freeze() - self._cleanup_ctx.freeze() - self._on_startup.freeze() - self._on_shutdown.freeze() - self._on_cleanup.freeze() - self._middlewares_handlers = tuple(self._prepare_middleware()) - - # If current app and any subapp do not have middlewares avoid run all - # of the code footprint that it implies, which have a middleware - # hardcoded per app that sets up the current_app attribute. If no - # middlewares are configured the handler will receive the proper - # current_app without needing all of this code. - self._run_middlewares = True if self.middlewares else False - - for subapp in self._subapps: - subapp.pre_freeze() - self._run_middlewares = self._run_middlewares or subapp._run_middlewares - - @property - def frozen(self) -> bool: - return self._frozen - - def freeze(self) -> None: - if self._frozen: - return - - self.pre_freeze() - self._frozen = True - for subapp in self._subapps: - subapp.freeze() - - @property - def debug(self) -> bool: - warnings.warn("debug property is deprecated", DeprecationWarning, stacklevel=2) - return self._debug - - def _reg_subapp_signals(self, subapp: "Application") -> None: - def reg_handler(signame: str) -> None: - subsig = getattr(subapp, signame) - - async def handler(app: "Application") -> None: - await subsig.send(subapp) - - appsig = getattr(self, signame) - appsig.append(handler) - - reg_handler("on_startup") - reg_handler("on_shutdown") - reg_handler("on_cleanup") - - def add_subapp(self, prefix: str, subapp: "Application") -> AbstractResource: - if not isinstance(prefix, str): - raise TypeError("Prefix must be str") - prefix = prefix.rstrip("/") - if not prefix: - raise ValueError("Prefix cannot be empty") - factory = partial(PrefixedSubAppResource, prefix, subapp) - return self._add_subapp(factory, subapp) - - def _add_subapp( - self, resource_factory: Callable[[], AbstractResource], subapp: "Application" - ) -> AbstractResource: - if self.frozen: - raise RuntimeError("Cannot add sub application to frozen application") - if subapp.frozen: - raise RuntimeError("Cannot add frozen application") - resource = resource_factory() - self.router.register_resource(resource) - self._reg_subapp_signals(subapp) - self._subapps.append(subapp) - subapp.pre_freeze() - if self._loop is not None: - subapp._set_loop(self._loop) - return resource - - def add_domain(self, domain: str, subapp: "Application") -> AbstractResource: - if not isinstance(domain, str): - raise TypeError("Domain must be str") - elif "*" in domain: - rule = MaskDomain(domain) # type: Domain - else: - rule = Domain(domain) - factory = partial(MatchedSubAppResource, rule, subapp) - return self._add_subapp(factory, subapp) - - def add_routes(self, routes: Iterable[AbstractRouteDef]) -> List[AbstractRoute]: - return self.router.add_routes(routes) - - @property - def on_response_prepare(self) -> _RespPrepareSignal: - return self._on_response_prepare - - @property - def on_startup(self) -> _AppSignal: - return self._on_startup - - @property - def on_shutdown(self) -> _AppSignal: - return self._on_shutdown - - @property - def on_cleanup(self) -> _AppSignal: - return self._on_cleanup - - @property - def cleanup_ctx(self) -> "CleanupContext": - return self._cleanup_ctx - - @property - def router(self) -> UrlDispatcher: - return self._router - - @property - def middlewares(self) -> _Middlewares: - return self._middlewares - - def _make_handler( - self, - *, - loop: Optional[asyncio.AbstractEventLoop] = None, - access_log_class: Type[AbstractAccessLogger] = AccessLogger, - **kwargs: Any, - ) -> Server: - - if not issubclass(access_log_class, AbstractAccessLogger): - raise TypeError( - "access_log_class must be subclass of " - "aiohttp.abc.AbstractAccessLogger, got {}".format(access_log_class) - ) - - self._set_loop(loop) - self.freeze() - - kwargs["debug"] = self._debug - kwargs["access_log_class"] = access_log_class - if self._handler_args: - for k, v in self._handler_args.items(): - kwargs[k] = v - - return Server( - self._handle, # type: ignore - request_factory=self._make_request, - loop=self._loop, - **kwargs, - ) - - def make_handler( - self, - *, - loop: Optional[asyncio.AbstractEventLoop] = None, - access_log_class: Type[AbstractAccessLogger] = AccessLogger, - **kwargs: Any, - ) -> Server: - - warnings.warn( - "Application.make_handler(...) is deprecated, " "use AppRunner API instead", - DeprecationWarning, - stacklevel=2, - ) - - return self._make_handler( - loop=loop, access_log_class=access_log_class, **kwargs - ) - - async def startup(self) -> None: - """Causes on_startup signal - - Should be called in the event loop along with the request handler. - """ - await self.on_startup.send(self) - - async def shutdown(self) -> None: - """Causes on_shutdown signal - - Should be called before cleanup() - """ - await self.on_shutdown.send(self) - - async def cleanup(self) -> None: - """Causes on_cleanup signal - - Should be called after shutdown() - """ - await self.on_cleanup.send(self) - - def _make_request( - self, - message: RawRequestMessage, - payload: StreamReader, - protocol: RequestHandler, - writer: AbstractStreamWriter, - task: "asyncio.Task[None]", - _cls: Type[Request] = Request, - ) -> Request: - return _cls( - message, - payload, - protocol, - writer, - task, - self._loop, - client_max_size=self._client_max_size, - ) - - def _prepare_middleware(self) -> Iterator[Tuple[_Middleware, bool]]: - for m in reversed(self._middlewares): - if getattr(m, "__middleware_version__", None) == 1: - yield m, True - else: - warnings.warn( - 'old-style middleware "{!r}" deprecated, ' "see #2252".format(m), - DeprecationWarning, - stacklevel=2, - ) - yield m, False - - yield _fix_request_current_app(self), True - - async def _handle(self, request: Request) -> StreamResponse: - loop = asyncio.get_event_loop() - debug = loop.get_debug() - match_info = await self._router.resolve(request) - if debug: # pragma: no cover - if not isinstance(match_info, AbstractMatchInfo): - raise TypeError( - "match_info should be AbstractMatchInfo " - "instance, not {!r}".format(match_info) - ) - match_info.add_app(self) - - match_info.freeze() - - resp = None - request._match_info = match_info # type: ignore - expect = request.headers.get(hdrs.EXPECT) - if expect: - resp = await match_info.expect_handler(request) - await request.writer.drain() - - if resp is None: - handler = match_info.handler - - if self._run_middlewares: - for app in match_info.apps[::-1]: - for m, new_style in app._middlewares_handlers: # type: ignore - if new_style: - handler = update_wrapper( - partial(m, handler=handler), handler - ) - else: - handler = await m(app, handler) # type: ignore - - resp = await handler(request) - - return resp - - def __call__(self) -> "Application": - """gunicorn compatibility""" - return self - - def __repr__(self) -> str: - return "".format(id(self)) - - def __bool__(self) -> bool: - return True - - -class CleanupError(RuntimeError): - @property - def exceptions(self) -> List[BaseException]: - return self.args[1] - - -if TYPE_CHECKING: # pragma: no cover - _CleanupContextBase = FrozenList[Callable[[Application], AsyncIterator[None]]] -else: - _CleanupContextBase = FrozenList - - -class CleanupContext(_CleanupContextBase): - def __init__(self) -> None: - super().__init__() - self._exits = [] # type: List[AsyncIterator[None]] - - async def _on_startup(self, app: Application) -> None: - for cb in self: - it = cb(app).__aiter__() - await it.__anext__() - self._exits.append(it) - - async def _on_cleanup(self, app: Application) -> None: - errors = [] - for it in reversed(self._exits): - try: - await it.__anext__() - except StopAsyncIteration: - pass - except Exception as exc: - errors.append(exc) - else: - errors.append(RuntimeError(f"{it!r} has more than one 'yield'")) - if errors: - if len(errors) == 1: - raise errors[0] - else: - raise CleanupError("Multiple errors on cleanup stage", errors) diff --git a/third_party/python/aiohttp/aiohttp/web_exceptions.py b/third_party/python/aiohttp/aiohttp/web_exceptions.py deleted file mode 100644 index 2eadca0386aa..000000000000 --- a/third_party/python/aiohttp/aiohttp/web_exceptions.py +++ /dev/null @@ -1,441 +0,0 @@ -import warnings -from typing import Any, Dict, Iterable, List, Optional, Set # noqa - -from yarl import URL - -from .typedefs import LooseHeaders, StrOrURL -from .web_response import Response - -__all__ = ( - "HTTPException", - "HTTPError", - "HTTPRedirection", - "HTTPSuccessful", - "HTTPOk", - "HTTPCreated", - "HTTPAccepted", - "HTTPNonAuthoritativeInformation", - "HTTPNoContent", - "HTTPResetContent", - "HTTPPartialContent", - "HTTPMultipleChoices", - "HTTPMovedPermanently", - "HTTPFound", - "HTTPSeeOther", - "HTTPNotModified", - "HTTPUseProxy", - "HTTPTemporaryRedirect", - "HTTPPermanentRedirect", - "HTTPClientError", - "HTTPBadRequest", - "HTTPUnauthorized", - "HTTPPaymentRequired", - "HTTPForbidden", - "HTTPNotFound", - "HTTPMethodNotAllowed", - "HTTPNotAcceptable", - "HTTPProxyAuthenticationRequired", - "HTTPRequestTimeout", - "HTTPConflict", - "HTTPGone", - "HTTPLengthRequired", - "HTTPPreconditionFailed", - "HTTPRequestEntityTooLarge", - "HTTPRequestURITooLong", - "HTTPUnsupportedMediaType", - "HTTPRequestRangeNotSatisfiable", - "HTTPExpectationFailed", - "HTTPMisdirectedRequest", - "HTTPUnprocessableEntity", - "HTTPFailedDependency", - "HTTPUpgradeRequired", - "HTTPPreconditionRequired", - "HTTPTooManyRequests", - "HTTPRequestHeaderFieldsTooLarge", - "HTTPUnavailableForLegalReasons", - "HTTPServerError", - "HTTPInternalServerError", - "HTTPNotImplemented", - "HTTPBadGateway", - "HTTPServiceUnavailable", - "HTTPGatewayTimeout", - "HTTPVersionNotSupported", - "HTTPVariantAlsoNegotiates", - "HTTPInsufficientStorage", - "HTTPNotExtended", - "HTTPNetworkAuthenticationRequired", -) - - -############################################################ -# HTTP Exceptions -############################################################ - - -class HTTPException(Response, Exception): - - # You should set in subclasses: - # status = 200 - - status_code = -1 - empty_body = False - - __http_exception__ = True - - def __init__( - self, - *, - headers: Optional[LooseHeaders] = None, - reason: Optional[str] = None, - body: Any = None, - text: Optional[str] = None, - content_type: Optional[str] = None, - ) -> None: - if body is not None: - warnings.warn( - "body argument is deprecated for http web exceptions", - DeprecationWarning, - ) - Response.__init__( - self, - status=self.status_code, - headers=headers, - reason=reason, - body=body, - text=text, - content_type=content_type, - ) - Exception.__init__(self, self.reason) - if self.body is None and not self.empty_body: - self.text = f"{self.status}: {self.reason}" - - def __bool__(self) -> bool: - return True - - -class HTTPError(HTTPException): - """Base class for exceptions with status codes in the 400s and 500s.""" - - -class HTTPRedirection(HTTPException): - """Base class for exceptions with status codes in the 300s.""" - - -class HTTPSuccessful(HTTPException): - """Base class for exceptions with status codes in the 200s.""" - - -class HTTPOk(HTTPSuccessful): - status_code = 200 - - -class HTTPCreated(HTTPSuccessful): - status_code = 201 - - -class HTTPAccepted(HTTPSuccessful): - status_code = 202 - - -class HTTPNonAuthoritativeInformation(HTTPSuccessful): - status_code = 203 - - -class HTTPNoContent(HTTPSuccessful): - status_code = 204 - empty_body = True - - -class HTTPResetContent(HTTPSuccessful): - status_code = 205 - empty_body = True - - -class HTTPPartialContent(HTTPSuccessful): - status_code = 206 - - -############################################################ -# 3xx redirection -############################################################ - - -class _HTTPMove(HTTPRedirection): - def __init__( - self, - location: StrOrURL, - *, - headers: Optional[LooseHeaders] = None, - reason: Optional[str] = None, - body: Any = None, - text: Optional[str] = None, - content_type: Optional[str] = None, - ) -> None: - if not location: - raise ValueError("HTTP redirects need a location to redirect to.") - super().__init__( - headers=headers, - reason=reason, - body=body, - text=text, - content_type=content_type, - ) - self.headers["Location"] = str(URL(location)) - self.location = location - - -class HTTPMultipleChoices(_HTTPMove): - status_code = 300 - - -class HTTPMovedPermanently(_HTTPMove): - status_code = 301 - - -class HTTPFound(_HTTPMove): - status_code = 302 - - -# This one is safe after a POST (the redirected location will be -# retrieved with GET): -class HTTPSeeOther(_HTTPMove): - status_code = 303 - - -class HTTPNotModified(HTTPRedirection): - # FIXME: this should include a date or etag header - status_code = 304 - empty_body = True - - -class HTTPUseProxy(_HTTPMove): - # Not a move, but looks a little like one - status_code = 305 - - -class HTTPTemporaryRedirect(_HTTPMove): - status_code = 307 - - -class HTTPPermanentRedirect(_HTTPMove): - status_code = 308 - - -############################################################ -# 4xx client error -############################################################ - - -class HTTPClientError(HTTPError): - pass - - -class HTTPBadRequest(HTTPClientError): - status_code = 400 - - -class HTTPUnauthorized(HTTPClientError): - status_code = 401 - - -class HTTPPaymentRequired(HTTPClientError): - status_code = 402 - - -class HTTPForbidden(HTTPClientError): - status_code = 403 - - -class HTTPNotFound(HTTPClientError): - status_code = 404 - - -class HTTPMethodNotAllowed(HTTPClientError): - status_code = 405 - - def __init__( - self, - method: str, - allowed_methods: Iterable[str], - *, - headers: Optional[LooseHeaders] = None, - reason: Optional[str] = None, - body: Any = None, - text: Optional[str] = None, - content_type: Optional[str] = None, - ) -> None: - allow = ",".join(sorted(allowed_methods)) - super().__init__( - headers=headers, - reason=reason, - body=body, - text=text, - content_type=content_type, - ) - self.headers["Allow"] = allow - self.allowed_methods = set(allowed_methods) # type: Set[str] - self.method = method.upper() - - -class HTTPNotAcceptable(HTTPClientError): - status_code = 406 - - -class HTTPProxyAuthenticationRequired(HTTPClientError): - status_code = 407 - - -class HTTPRequestTimeout(HTTPClientError): - status_code = 408 - - -class HTTPConflict(HTTPClientError): - status_code = 409 - - -class HTTPGone(HTTPClientError): - status_code = 410 - - -class HTTPLengthRequired(HTTPClientError): - status_code = 411 - - -class HTTPPreconditionFailed(HTTPClientError): - status_code = 412 - - -class HTTPRequestEntityTooLarge(HTTPClientError): - status_code = 413 - - def __init__(self, max_size: float, actual_size: float, **kwargs: Any) -> None: - kwargs.setdefault( - "text", - "Maximum request body size {} exceeded, " - "actual body size {}".format(max_size, actual_size), - ) - super().__init__(**kwargs) - - -class HTTPRequestURITooLong(HTTPClientError): - status_code = 414 - - -class HTTPUnsupportedMediaType(HTTPClientError): - status_code = 415 - - -class HTTPRequestRangeNotSatisfiable(HTTPClientError): - status_code = 416 - - -class HTTPExpectationFailed(HTTPClientError): - status_code = 417 - - -class HTTPMisdirectedRequest(HTTPClientError): - status_code = 421 - - -class HTTPUnprocessableEntity(HTTPClientError): - status_code = 422 - - -class HTTPFailedDependency(HTTPClientError): - status_code = 424 - - -class HTTPUpgradeRequired(HTTPClientError): - status_code = 426 - - -class HTTPPreconditionRequired(HTTPClientError): - status_code = 428 - - -class HTTPTooManyRequests(HTTPClientError): - status_code = 429 - - -class HTTPRequestHeaderFieldsTooLarge(HTTPClientError): - status_code = 431 - - -class HTTPUnavailableForLegalReasons(HTTPClientError): - status_code = 451 - - def __init__( - self, - link: str, - *, - headers: Optional[LooseHeaders] = None, - reason: Optional[str] = None, - body: Any = None, - text: Optional[str] = None, - content_type: Optional[str] = None, - ) -> None: - super().__init__( - headers=headers, - reason=reason, - body=body, - text=text, - content_type=content_type, - ) - self.headers["Link"] = '<%s>; rel="blocked-by"' % link - self.link = link - - -############################################################ -# 5xx Server Error -############################################################ -# Response status codes beginning with the digit "5" indicate cases in -# which the server is aware that it has erred or is incapable of -# performing the request. Except when responding to a HEAD request, the -# server SHOULD include an entity containing an explanation of the error -# situation, and whether it is a temporary or permanent condition. User -# agents SHOULD display any included entity to the user. These response -# codes are applicable to any request method. - - -class HTTPServerError(HTTPError): - pass - - -class HTTPInternalServerError(HTTPServerError): - status_code = 500 - - -class HTTPNotImplemented(HTTPServerError): - status_code = 501 - - -class HTTPBadGateway(HTTPServerError): - status_code = 502 - - -class HTTPServiceUnavailable(HTTPServerError): - status_code = 503 - - -class HTTPGatewayTimeout(HTTPServerError): - status_code = 504 - - -class HTTPVersionNotSupported(HTTPServerError): - status_code = 505 - - -class HTTPVariantAlsoNegotiates(HTTPServerError): - status_code = 506 - - -class HTTPInsufficientStorage(HTTPServerError): - status_code = 507 - - -class HTTPNotExtended(HTTPServerError): - status_code = 510 - - -class HTTPNetworkAuthenticationRequired(HTTPServerError): - status_code = 511 diff --git a/third_party/python/aiohttp/aiohttp/web_fileresponse.py b/third_party/python/aiohttp/aiohttp/web_fileresponse.py deleted file mode 100644 index 0737c4f42d7c..000000000000 --- a/third_party/python/aiohttp/aiohttp/web_fileresponse.py +++ /dev/null @@ -1,243 +0,0 @@ -import asyncio -import mimetypes -import os -import pathlib -import sys -from typing import ( # noqa - IO, - TYPE_CHECKING, - Any, - Awaitable, - Callable, - List, - Optional, - Union, - cast, -) - -from . import hdrs -from .abc import AbstractStreamWriter -from .typedefs import LooseHeaders -from .web_exceptions import ( - HTTPNotModified, - HTTPPartialContent, - HTTPPreconditionFailed, - HTTPRequestRangeNotSatisfiable, -) -from .web_response import StreamResponse - -__all__ = ("FileResponse",) - -if TYPE_CHECKING: # pragma: no cover - from .web_request import BaseRequest - - -_T_OnChunkSent = Optional[Callable[[bytes], Awaitable[None]]] - - -NOSENDFILE = bool(os.environ.get("AIOHTTP_NOSENDFILE")) - - -class FileResponse(StreamResponse): - """A response object can be used to send files.""" - - def __init__( - self, - path: Union[str, pathlib.Path], - chunk_size: int = 256 * 1024, - status: int = 200, - reason: Optional[str] = None, - headers: Optional[LooseHeaders] = None, - ) -> None: - super().__init__(status=status, reason=reason, headers=headers) - - if isinstance(path, str): - path = pathlib.Path(path) - - self._path = path - self._chunk_size = chunk_size - - async def _sendfile_fallback( - self, writer: AbstractStreamWriter, fobj: IO[Any], offset: int, count: int - ) -> AbstractStreamWriter: - # To keep memory usage low,fobj is transferred in chunks - # controlled by the constructor's chunk_size argument. - - chunk_size = self._chunk_size - loop = asyncio.get_event_loop() - - await loop.run_in_executor(None, fobj.seek, offset) - - chunk = await loop.run_in_executor(None, fobj.read, chunk_size) - while chunk: - await writer.write(chunk) - count = count - chunk_size - if count <= 0: - break - chunk = await loop.run_in_executor(None, fobj.read, min(chunk_size, count)) - - await writer.drain() - return writer - - async def _sendfile( - self, request: "BaseRequest", fobj: IO[Any], offset: int, count: int - ) -> AbstractStreamWriter: - writer = await super().prepare(request) - assert writer is not None - - if NOSENDFILE or sys.version_info < (3, 7) or self.compression: - return await self._sendfile_fallback(writer, fobj, offset, count) - - loop = request._loop - transport = request.transport - assert transport is not None - - try: - await loop.sendfile(transport, fobj, offset, count) - except NotImplementedError: - return await self._sendfile_fallback(writer, fobj, offset, count) - - await super().write_eof() - return writer - - async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]: - filepath = self._path - - gzip = False - if "gzip" in request.headers.get(hdrs.ACCEPT_ENCODING, ""): - gzip_path = filepath.with_name(filepath.name + ".gz") - - if gzip_path.is_file(): - filepath = gzip_path - gzip = True - - loop = asyncio.get_event_loop() - st = await loop.run_in_executor(None, filepath.stat) - - modsince = request.if_modified_since - if modsince is not None and st.st_mtime <= modsince.timestamp(): - self.set_status(HTTPNotModified.status_code) - self._length_check = False - # Delete any Content-Length headers provided by user. HTTP 304 - # should always have empty response body - return await super().prepare(request) - - unmodsince = request.if_unmodified_since - if unmodsince is not None and st.st_mtime > unmodsince.timestamp(): - self.set_status(HTTPPreconditionFailed.status_code) - return await super().prepare(request) - - if hdrs.CONTENT_TYPE not in self.headers: - ct, encoding = mimetypes.guess_type(str(filepath)) - if not ct: - ct = "application/octet-stream" - should_set_ct = True - else: - encoding = "gzip" if gzip else None - should_set_ct = False - - status = self._status - file_size = st.st_size - count = file_size - - start = None - - ifrange = request.if_range - if ifrange is None or st.st_mtime <= ifrange.timestamp(): - # If-Range header check: - # condition = cached date >= last modification date - # return 206 if True else 200. - # if False: - # Range header would not be processed, return 200 - # if True but Range header missing - # return 200 - try: - rng = request.http_range - start = rng.start - end = rng.stop - except ValueError: - # https://tools.ietf.org/html/rfc7233: - # A server generating a 416 (Range Not Satisfiable) response to - # a byte-range request SHOULD send a Content-Range header field - # with an unsatisfied-range value. - # The complete-length in a 416 response indicates the current - # length of the selected representation. - # - # Will do the same below. Many servers ignore this and do not - # send a Content-Range header with HTTP 416 - self.headers[hdrs.CONTENT_RANGE] = f"bytes */{file_size}" - self.set_status(HTTPRequestRangeNotSatisfiable.status_code) - return await super().prepare(request) - - # If a range request has been made, convert start, end slice - # notation into file pointer offset and count - if start is not None or end is not None: - if start < 0 and end is None: # return tail of file - start += file_size - if start < 0: - # if Range:bytes=-1000 in request header but file size - # is only 200, there would be trouble without this - start = 0 - count = file_size - start - else: - # rfc7233:If the last-byte-pos value is - # absent, or if the value is greater than or equal to - # the current length of the representation data, - # the byte range is interpreted as the remainder - # of the representation (i.e., the server replaces the - # value of last-byte-pos with a value that is one less than - # the current length of the selected representation). - count = ( - min(end if end is not None else file_size, file_size) - start - ) - - if start >= file_size: - # HTTP 416 should be returned in this case. - # - # According to https://tools.ietf.org/html/rfc7233: - # If a valid byte-range-set includes at least one - # byte-range-spec with a first-byte-pos that is less than - # the current length of the representation, or at least one - # suffix-byte-range-spec with a non-zero suffix-length, - # then the byte-range-set is satisfiable. Otherwise, the - # byte-range-set is unsatisfiable. - self.headers[hdrs.CONTENT_RANGE] = f"bytes */{file_size}" - self.set_status(HTTPRequestRangeNotSatisfiable.status_code) - return await super().prepare(request) - - status = HTTPPartialContent.status_code - # Even though you are sending the whole file, you should still - # return a HTTP 206 for a Range request. - self.set_status(status) - - if should_set_ct: - self.content_type = ct # type: ignore - if encoding: - self.headers[hdrs.CONTENT_ENCODING] = encoding - if gzip: - self.headers[hdrs.VARY] = hdrs.ACCEPT_ENCODING - self.last_modified = st.st_mtime # type: ignore - self.content_length = count - - self.headers[hdrs.ACCEPT_RANGES] = "bytes" - - real_start = cast(int, start) - - if status == HTTPPartialContent.status_code: - self.headers[hdrs.CONTENT_RANGE] = "bytes {}-{}/{}".format( - real_start, real_start + count - 1, file_size - ) - - if request.method == hdrs.METH_HEAD or self.status in [204, 304]: - return await super().prepare(request) - - fobj = await loop.run_in_executor(None, filepath.open, "rb") - if start: # be aware that start could be None or int=0 here. - offset = start - else: - offset = 0 - - try: - return await self._sendfile(request, fobj, offset, count) - finally: - await loop.run_in_executor(None, fobj.close) diff --git a/third_party/python/aiohttp/aiohttp/web_log.py b/third_party/python/aiohttp/aiohttp/web_log.py deleted file mode 100644 index 4cfa57929a97..000000000000 --- a/third_party/python/aiohttp/aiohttp/web_log.py +++ /dev/null @@ -1,208 +0,0 @@ -import datetime -import functools -import logging -import os -import re -from collections import namedtuple -from typing import Any, Callable, Dict, Iterable, List, Tuple # noqa - -from .abc import AbstractAccessLogger -from .web_request import BaseRequest -from .web_response import StreamResponse - -KeyMethod = namedtuple("KeyMethod", "key method") - - -class AccessLogger(AbstractAccessLogger): - """Helper object to log access. - - Usage: - log = logging.getLogger("spam") - log_format = "%a %{User-Agent}i" - access_logger = AccessLogger(log, log_format) - access_logger.log(request, response, time) - - Format: - %% The percent sign - %a Remote IP-address (IP-address of proxy if using reverse proxy) - %t Time when the request was started to process - %P The process ID of the child that serviced the request - %r First line of request - %s Response status code - %b Size of response in bytes, including HTTP headers - %T Time taken to serve the request, in seconds - %Tf Time taken to serve the request, in seconds with floating fraction - in .06f format - %D Time taken to serve the request, in microseconds - %{FOO}i request.headers['FOO'] - %{FOO}o response.headers['FOO'] - %{FOO}e os.environ['FOO'] - - """ - - LOG_FORMAT_MAP = { - "a": "remote_address", - "t": "request_start_time", - "P": "process_id", - "r": "first_request_line", - "s": "response_status", - "b": "response_size", - "T": "request_time", - "Tf": "request_time_frac", - "D": "request_time_micro", - "i": "request_header", - "o": "response_header", - } - - LOG_FORMAT = '%a %t "%r" %s %b "%{Referer}i" "%{User-Agent}i"' - FORMAT_RE = re.compile(r"%(\{([A-Za-z0-9\-_]+)\}([ioe])|[atPrsbOD]|Tf?)") - CLEANUP_RE = re.compile(r"(%[^s])") - _FORMAT_CACHE = {} # type: Dict[str, Tuple[str, List[KeyMethod]]] - - def __init__(self, logger: logging.Logger, log_format: str = LOG_FORMAT) -> None: - """Initialise the logger. - - logger is a logger object to be used for logging. - log_format is a string with apache compatible log format description. - - """ - super().__init__(logger, log_format=log_format) - - _compiled_format = AccessLogger._FORMAT_CACHE.get(log_format) - if not _compiled_format: - _compiled_format = self.compile_format(log_format) - AccessLogger._FORMAT_CACHE[log_format] = _compiled_format - - self._log_format, self._methods = _compiled_format - - def compile_format(self, log_format: str) -> Tuple[str, List[KeyMethod]]: - """Translate log_format into form usable by modulo formatting - - All known atoms will be replaced with %s - Also methods for formatting of those atoms will be added to - _methods in appropriate order - - For example we have log_format = "%a %t" - This format will be translated to "%s %s" - Also contents of _methods will be - [self._format_a, self._format_t] - These method will be called and results will be passed - to translated string format. - - Each _format_* method receive 'args' which is list of arguments - given to self.log - - Exceptions are _format_e, _format_i and _format_o methods which - also receive key name (by functools.partial) - - """ - # list of (key, method) tuples, we don't use an OrderedDict as users - # can repeat the same key more than once - methods = list() - - for atom in self.FORMAT_RE.findall(log_format): - if atom[1] == "": - format_key1 = self.LOG_FORMAT_MAP[atom[0]] - m = getattr(AccessLogger, "_format_%s" % atom[0]) - key_method = KeyMethod(format_key1, m) - else: - format_key2 = (self.LOG_FORMAT_MAP[atom[2]], atom[1]) - m = getattr(AccessLogger, "_format_%s" % atom[2]) - key_method = KeyMethod(format_key2, functools.partial(m, atom[1])) - - methods.append(key_method) - - log_format = self.FORMAT_RE.sub(r"%s", log_format) - log_format = self.CLEANUP_RE.sub(r"%\1", log_format) - return log_format, methods - - @staticmethod - def _format_i( - key: str, request: BaseRequest, response: StreamResponse, time: float - ) -> str: - if request is None: - return "(no headers)" - - # suboptimal, make istr(key) once - return request.headers.get(key, "-") - - @staticmethod - def _format_o( - key: str, request: BaseRequest, response: StreamResponse, time: float - ) -> str: - # suboptimal, make istr(key) once - return response.headers.get(key, "-") - - @staticmethod - def _format_a(request: BaseRequest, response: StreamResponse, time: float) -> str: - if request is None: - return "-" - ip = request.remote - return ip if ip is not None else "-" - - @staticmethod - def _format_t(request: BaseRequest, response: StreamResponse, time: float) -> str: - now = datetime.datetime.utcnow() - start_time = now - datetime.timedelta(seconds=time) - return start_time.strftime("[%d/%b/%Y:%H:%M:%S +0000]") - - @staticmethod - def _format_P(request: BaseRequest, response: StreamResponse, time: float) -> str: - return "<%s>" % os.getpid() - - @staticmethod - def _format_r(request: BaseRequest, response: StreamResponse, time: float) -> str: - if request is None: - return "-" - return "{} {} HTTP/{}.{}".format( - request.method, - request.path_qs, - request.version.major, - request.version.minor, - ) - - @staticmethod - def _format_s(request: BaseRequest, response: StreamResponse, time: float) -> int: - return response.status - - @staticmethod - def _format_b(request: BaseRequest, response: StreamResponse, time: float) -> int: - return response.body_length - - @staticmethod - def _format_T(request: BaseRequest, response: StreamResponse, time: float) -> str: - return str(round(time)) - - @staticmethod - def _format_Tf(request: BaseRequest, response: StreamResponse, time: float) -> str: - return "%06f" % time - - @staticmethod - def _format_D(request: BaseRequest, response: StreamResponse, time: float) -> str: - return str(round(time * 1000000)) - - def _format_line( - self, request: BaseRequest, response: StreamResponse, time: float - ) -> Iterable[Tuple[str, Callable[[BaseRequest, StreamResponse, float], str]]]: - return [(key, method(request, response, time)) for key, method in self._methods] - - def log(self, request: BaseRequest, response: StreamResponse, time: float) -> None: - try: - fmt_info = self._format_line(request, response, time) - - values = list() - extra = dict() - for key, value in fmt_info: - values.append(value) - - if key.__class__ is str: - extra[key] = value - else: - k1, k2 = key # type: ignore - dct = extra.get(k1, {}) # type: ignore - dct[k2] = value # type: ignore - extra[k1] = dct # type: ignore - - self.logger.info(self._log_format % tuple(values), extra=extra) - except Exception: - self.logger.exception("Error in logging") diff --git a/third_party/python/aiohttp/aiohttp/web_middlewares.py b/third_party/python/aiohttp/aiohttp/web_middlewares.py deleted file mode 100644 index 8a8967e81312..000000000000 --- a/third_party/python/aiohttp/aiohttp/web_middlewares.py +++ /dev/null @@ -1,121 +0,0 @@ -import re -from typing import TYPE_CHECKING, Awaitable, Callable, Tuple, Type, TypeVar - -from .web_exceptions import HTTPPermanentRedirect, _HTTPMove -from .web_request import Request -from .web_response import StreamResponse -from .web_urldispatcher import SystemRoute - -__all__ = ( - "middleware", - "normalize_path_middleware", -) - -if TYPE_CHECKING: # pragma: no cover - from .web_app import Application - -_Func = TypeVar("_Func") - - -async def _check_request_resolves(request: Request, path: str) -> Tuple[bool, Request]: - alt_request = request.clone(rel_url=path) - - match_info = await request.app.router.resolve(alt_request) - alt_request._match_info = match_info # type: ignore - - if match_info.http_exception is None: - return True, alt_request - - return False, request - - -def middleware(f: _Func) -> _Func: - f.__middleware_version__ = 1 # type: ignore - return f - - -_Handler = Callable[[Request], Awaitable[StreamResponse]] -_Middleware = Callable[[Request, _Handler], Awaitable[StreamResponse]] - - -def normalize_path_middleware( - *, - append_slash: bool = True, - remove_slash: bool = False, - merge_slashes: bool = True, - redirect_class: Type[_HTTPMove] = HTTPPermanentRedirect -) -> _Middleware: - """ - Middleware factory which produces a middleware that normalizes - the path of a request. By normalizing it means: - - - Add or remove a trailing slash to the path. - - Double slashes are replaced by one. - - The middleware returns as soon as it finds a path that resolves - correctly. The order if both merge and append/remove are enabled is - 1) merge slashes - 2) append/remove slash - 3) both merge slashes and append/remove slash. - If the path resolves with at least one of those conditions, it will - redirect to the new path. - - Only one of `append_slash` and `remove_slash` can be enabled. If both - are `True` the factory will raise an assertion error - - If `append_slash` is `True` the middleware will append a slash when - needed. If a resource is defined with trailing slash and the request - comes without it, it will append it automatically. - - If `remove_slash` is `True`, `append_slash` must be `False`. When enabled - the middleware will remove trailing slashes and redirect if the resource - is defined - - If merge_slashes is True, merge multiple consecutive slashes in the - path into one. - """ - - correct_configuration = not (append_slash and remove_slash) - assert correct_configuration, "Cannot both remove and append slash" - - @middleware - async def impl(request: Request, handler: _Handler) -> StreamResponse: - if isinstance(request.match_info.route, SystemRoute): - paths_to_check = [] - if "?" in request.raw_path: - path, query = request.raw_path.split("?", 1) - query = "?" + query - else: - query = "" - path = request.raw_path - - if merge_slashes: - paths_to_check.append(re.sub("//+", "/", path)) - if append_slash and not request.path.endswith("/"): - paths_to_check.append(path + "/") - if remove_slash and request.path.endswith("/"): - paths_to_check.append(path[:-1]) - if merge_slashes and append_slash: - paths_to_check.append(re.sub("//+", "/", path + "/")) - if merge_slashes and remove_slash: - merged_slashes = re.sub("//+", "/", path) - paths_to_check.append(merged_slashes[:-1]) - - for path in paths_to_check: - path = re.sub("^//+", "/", path) # SECURITY: GHSA-v6wp-4m6f-gcjg - resolves, request = await _check_request_resolves(request, path) - if resolves: - raise redirect_class(request.raw_path + query) - - return await handler(request) - - return impl - - -def _fix_request_current_app(app: "Application") -> _Middleware: - @middleware - async def impl(request: Request, handler: _Handler) -> StreamResponse: - with request.match_info.set_current_app(app): - return await handler(request) - - return impl diff --git a/third_party/python/aiohttp/aiohttp/web_protocol.py b/third_party/python/aiohttp/aiohttp/web_protocol.py deleted file mode 100644 index 8e02bc4aab7c..000000000000 --- a/third_party/python/aiohttp/aiohttp/web_protocol.py +++ /dev/null @@ -1,667 +0,0 @@ -import asyncio -import asyncio.streams -import traceback -import warnings -from collections import deque -from contextlib import suppress -from html import escape as html_escape -from http import HTTPStatus -from logging import Logger -from typing import TYPE_CHECKING, Any, Awaitable, Callable, Optional, Tuple, Type, cast - -import yarl - -from .abc import AbstractAccessLogger, AbstractStreamWriter -from .base_protocol import BaseProtocol -from .helpers import CeilTimeout, current_task -from .http import ( - HttpProcessingError, - HttpRequestParser, - HttpVersion10, - RawRequestMessage, - StreamWriter, -) -from .log import access_logger, server_logger -from .streams import EMPTY_PAYLOAD, StreamReader -from .tcp_helpers import tcp_keepalive -from .web_exceptions import HTTPException -from .web_log import AccessLogger -from .web_request import BaseRequest -from .web_response import Response, StreamResponse - -__all__ = ("RequestHandler", "RequestPayloadError", "PayloadAccessError") - -if TYPE_CHECKING: # pragma: no cover - from .web_server import Server - - -_RequestFactory = Callable[ - [ - RawRequestMessage, - StreamReader, - "RequestHandler", - AbstractStreamWriter, - "asyncio.Task[None]", - ], - BaseRequest, -] - -_RequestHandler = Callable[[BaseRequest], Awaitable[StreamResponse]] - - -ERROR = RawRequestMessage( - "UNKNOWN", "/", HttpVersion10, {}, {}, True, False, False, False, yarl.URL("/") -) - - -class RequestPayloadError(Exception): - """Payload parsing error.""" - - -class PayloadAccessError(Exception): - """Payload was accessed after response was sent.""" - - -class RequestHandler(BaseProtocol): - """HTTP protocol implementation. - - RequestHandler handles incoming HTTP request. It reads request line, - request headers and request payload and calls handle_request() method. - By default it always returns with 404 response. - - RequestHandler handles errors in incoming request, like bad - status line, bad headers or incomplete payload. If any error occurs, - connection gets closed. - - :param keepalive_timeout: number of seconds before closing - keep-alive connection - :type keepalive_timeout: int or None - - :param bool tcp_keepalive: TCP keep-alive is on, default is on - - :param bool debug: enable debug mode - - :param logger: custom logger object - :type logger: aiohttp.log.server_logger - - :param access_log_class: custom class for access_logger - :type access_log_class: aiohttp.abc.AbstractAccessLogger - - :param access_log: custom logging object - :type access_log: aiohttp.log.server_logger - - :param str access_log_format: access log format string - - :param loop: Optional event loop - - :param int max_line_size: Optional maximum header line size - - :param int max_field_size: Optional maximum header field size - - :param int max_headers: Optional maximum header size - - """ - - KEEPALIVE_RESCHEDULE_DELAY = 1 - - __slots__ = ( - "_request_count", - "_keepalive", - "_manager", - "_request_handler", - "_request_factory", - "_tcp_keepalive", - "_keepalive_time", - "_keepalive_handle", - "_keepalive_timeout", - "_lingering_time", - "_messages", - "_message_tail", - "_waiter", - "_error_handler", - "_task_handler", - "_upgrade", - "_payload_parser", - "_request_parser", - "_reading_paused", - "logger", - "debug", - "access_log", - "access_logger", - "_close", - "_force_close", - "_current_request", - ) - - def __init__( - self, - manager: "Server", - *, - loop: asyncio.AbstractEventLoop, - keepalive_timeout: float = 75.0, # NGINX default is 75 secs - tcp_keepalive: bool = True, - logger: Logger = server_logger, - access_log_class: Type[AbstractAccessLogger] = AccessLogger, - access_log: Logger = access_logger, - access_log_format: str = AccessLogger.LOG_FORMAT, - debug: bool = False, - max_line_size: int = 8190, - max_headers: int = 32768, - max_field_size: int = 8190, - lingering_time: float = 10.0, - read_bufsize: int = 2 ** 16, - ): - - super().__init__(loop) - - self._request_count = 0 - self._keepalive = False - self._current_request = None # type: Optional[BaseRequest] - self._manager = manager # type: Optional[Server] - self._request_handler = ( - manager.request_handler - ) # type: Optional[_RequestHandler] - self._request_factory = ( - manager.request_factory - ) # type: Optional[_RequestFactory] - - self._tcp_keepalive = tcp_keepalive - # placeholder to be replaced on keepalive timeout setup - self._keepalive_time = 0.0 - self._keepalive_handle = None # type: Optional[asyncio.Handle] - self._keepalive_timeout = keepalive_timeout - self._lingering_time = float(lingering_time) - - self._messages = deque() # type: Any # Python 3.5 has no typing.Deque - self._message_tail = b"" - - self._waiter = None # type: Optional[asyncio.Future[None]] - self._error_handler = None # type: Optional[asyncio.Task[None]] - self._task_handler = None # type: Optional[asyncio.Task[None]] - - self._upgrade = False - self._payload_parser = None # type: Any - self._request_parser = HttpRequestParser( - self, - loop, - read_bufsize, - max_line_size=max_line_size, - max_field_size=max_field_size, - max_headers=max_headers, - payload_exception=RequestPayloadError, - ) # type: Optional[HttpRequestParser] - - self.logger = logger - self.debug = debug - self.access_log = access_log - if access_log: - self.access_logger = access_log_class( - access_log, access_log_format - ) # type: Optional[AbstractAccessLogger] - else: - self.access_logger = None - - self._close = False - self._force_close = False - - def __repr__(self) -> str: - return "<{} {}>".format( - self.__class__.__name__, - "connected" if self.transport is not None else "disconnected", - ) - - @property - def keepalive_timeout(self) -> float: - return self._keepalive_timeout - - async def shutdown(self, timeout: Optional[float] = 15.0) -> None: - """Worker process is about to exit, we need cleanup everything and - stop accepting requests. It is especially important for keep-alive - connections.""" - self._force_close = True - - if self._keepalive_handle is not None: - self._keepalive_handle.cancel() - - if self._waiter: - self._waiter.cancel() - - # wait for handlers - with suppress(asyncio.CancelledError, asyncio.TimeoutError): - with CeilTimeout(timeout, loop=self._loop): - if self._error_handler is not None and not self._error_handler.done(): - await self._error_handler - - if self._current_request is not None: - self._current_request._cancel(asyncio.CancelledError()) - - if self._task_handler is not None and not self._task_handler.done(): - await self._task_handler - - # force-close non-idle handler - if self._task_handler is not None: - self._task_handler.cancel() - - if self.transport is not None: - self.transport.close() - self.transport = None - - def connection_made(self, transport: asyncio.BaseTransport) -> None: - super().connection_made(transport) - - real_transport = cast(asyncio.Transport, transport) - if self._tcp_keepalive: - tcp_keepalive(real_transport) - - self._task_handler = self._loop.create_task(self.start()) - assert self._manager is not None - self._manager.connection_made(self, real_transport) - - def connection_lost(self, exc: Optional[BaseException]) -> None: - if self._manager is None: - return - self._manager.connection_lost(self, exc) - - super().connection_lost(exc) - - self._manager = None - self._force_close = True - self._request_factory = None - self._request_handler = None - self._request_parser = None - - if self._keepalive_handle is not None: - self._keepalive_handle.cancel() - - if self._current_request is not None: - if exc is None: - exc = ConnectionResetError("Connection lost") - self._current_request._cancel(exc) - - if self._error_handler is not None: - self._error_handler.cancel() - if self._task_handler is not None: - self._task_handler.cancel() - if self._waiter is not None: - self._waiter.cancel() - - self._task_handler = None - - if self._payload_parser is not None: - self._payload_parser.feed_eof() - self._payload_parser = None - - def set_parser(self, parser: Any) -> None: - # Actual type is WebReader - assert self._payload_parser is None - - self._payload_parser = parser - - if self._message_tail: - self._payload_parser.feed_data(self._message_tail) - self._message_tail = b"" - - def eof_received(self) -> None: - pass - - def data_received(self, data: bytes) -> None: - if self._force_close or self._close: - return - # parse http messages - if self._payload_parser is None and not self._upgrade: - assert self._request_parser is not None - try: - messages, upgraded, tail = self._request_parser.feed_data(data) - except HttpProcessingError as exc: - # something happened during parsing - self._error_handler = self._loop.create_task( - self.handle_parse_error( - StreamWriter(self, self._loop), 400, exc, exc.message - ) - ) - self.close() - except Exception as exc: - # 500: internal error - self._error_handler = self._loop.create_task( - self.handle_parse_error(StreamWriter(self, self._loop), 500, exc) - ) - self.close() - else: - if messages: - # sometimes the parser returns no messages - for (msg, payload) in messages: - self._request_count += 1 - self._messages.append((msg, payload)) - - waiter = self._waiter - if waiter is not None: - if not waiter.done(): - # don't set result twice - waiter.set_result(None) - - self._upgrade = upgraded - if upgraded and tail: - self._message_tail = tail - - # no parser, just store - elif self._payload_parser is None and self._upgrade and data: - self._message_tail += data - - # feed payload - elif data: - eof, tail = self._payload_parser.feed_data(data) - if eof: - self.close() - - def keep_alive(self, val: bool) -> None: - """Set keep-alive connection mode. - - :param bool val: new state. - """ - self._keepalive = val - if self._keepalive_handle: - self._keepalive_handle.cancel() - self._keepalive_handle = None - - def close(self) -> None: - """Stop accepting new pipelinig messages and close - connection when handlers done processing messages""" - self._close = True - if self._waiter: - self._waiter.cancel() - - def force_close(self) -> None: - """Force close connection""" - self._force_close = True - if self._waiter: - self._waiter.cancel() - if self.transport is not None: - self.transport.close() - self.transport = None - - def log_access( - self, request: BaseRequest, response: StreamResponse, time: float - ) -> None: - if self.access_logger is not None: - self.access_logger.log(request, response, self._loop.time() - time) - - def log_debug(self, *args: Any, **kw: Any) -> None: - if self.debug: - self.logger.debug(*args, **kw) - - def log_exception(self, *args: Any, **kw: Any) -> None: - self.logger.exception(*args, **kw) - - def _process_keepalive(self) -> None: - if self._force_close or not self._keepalive: - return - - next = self._keepalive_time + self._keepalive_timeout - - # handler in idle state - if self._waiter: - if self._loop.time() > next: - self.force_close() - return - - # not all request handlers are done, - # reschedule itself to next second - self._keepalive_handle = self._loop.call_later( - self.KEEPALIVE_RESCHEDULE_DELAY, self._process_keepalive - ) - - async def _handle_request( - self, - request: BaseRequest, - start_time: float, - ) -> Tuple[StreamResponse, bool]: - assert self._request_handler is not None - try: - try: - self._current_request = request - resp = await self._request_handler(request) - finally: - self._current_request = None - except HTTPException as exc: - resp = Response( - status=exc.status, reason=exc.reason, text=exc.text, headers=exc.headers - ) - reset = await self.finish_response(request, resp, start_time) - except asyncio.CancelledError: - raise - except asyncio.TimeoutError as exc: - self.log_debug("Request handler timed out.", exc_info=exc) - resp = self.handle_error(request, 504) - reset = await self.finish_response(request, resp, start_time) - except Exception as exc: - resp = self.handle_error(request, 500, exc) - reset = await self.finish_response(request, resp, start_time) - else: - reset = await self.finish_response(request, resp, start_time) - - return resp, reset - - async def start(self) -> None: - """Process incoming request. - - It reads request line, request headers and request payload, then - calls handle_request() method. Subclass has to override - handle_request(). start() handles various exceptions in request - or response handling. Connection is being closed always unless - keep_alive(True) specified. - """ - loop = self._loop - handler = self._task_handler - assert handler is not None - manager = self._manager - assert manager is not None - keepalive_timeout = self._keepalive_timeout - resp = None - assert self._request_factory is not None - assert self._request_handler is not None - - while not self._force_close: - if not self._messages: - try: - # wait for next request - self._waiter = loop.create_future() - await self._waiter - except asyncio.CancelledError: - break - finally: - self._waiter = None - - message, payload = self._messages.popleft() - - start = loop.time() - - manager.requests_count += 1 - writer = StreamWriter(self, loop) - request = self._request_factory(message, payload, self, writer, handler) - try: - # a new task is used for copy context vars (#3406) - task = self._loop.create_task(self._handle_request(request, start)) - try: - resp, reset = await task - except (asyncio.CancelledError, ConnectionError): - self.log_debug("Ignored premature client disconnection") - break - # Deprecation warning (See #2415) - if getattr(resp, "__http_exception__", False): - warnings.warn( - "returning HTTPException object is deprecated " - "(#2415) and will be removed, " - "please raise the exception instead", - DeprecationWarning, - ) - - # Drop the processed task from asyncio.Task.all_tasks() early - del task - if reset: - self.log_debug("Ignored premature client disconnection 2") - break - - # notify server about keep-alive - self._keepalive = bool(resp.keep_alive) - - # check payload - if not payload.is_eof(): - lingering_time = self._lingering_time - if not self._force_close and lingering_time: - self.log_debug( - "Start lingering close timer for %s sec.", lingering_time - ) - - now = loop.time() - end_t = now + lingering_time - - with suppress(asyncio.TimeoutError, asyncio.CancelledError): - while not payload.is_eof() and now < end_t: - with CeilTimeout(end_t - now, loop=loop): - # read and ignore - await payload.readany() - now = loop.time() - - # if payload still uncompleted - if not payload.is_eof() and not self._force_close: - self.log_debug("Uncompleted request.") - self.close() - - payload.set_exception(PayloadAccessError()) - - except asyncio.CancelledError: - self.log_debug("Ignored premature client disconnection ") - break - except RuntimeError as exc: - if self.debug: - self.log_exception("Unhandled runtime exception", exc_info=exc) - self.force_close() - except Exception as exc: - self.log_exception("Unhandled exception", exc_info=exc) - self.force_close() - finally: - if self.transport is None and resp is not None: - self.log_debug("Ignored premature client disconnection.") - elif not self._force_close: - if self._keepalive and not self._close: - # start keep-alive timer - if keepalive_timeout is not None: - now = self._loop.time() - self._keepalive_time = now - if self._keepalive_handle is None: - self._keepalive_handle = loop.call_at( - now + keepalive_timeout, self._process_keepalive - ) - else: - break - - # remove handler, close transport if no handlers left - if not self._force_close: - self._task_handler = None - if self.transport is not None and self._error_handler is None: - self.transport.close() - - async def finish_response( - self, request: BaseRequest, resp: StreamResponse, start_time: float - ) -> bool: - """ - Prepare the response and write_eof, then log access. This has to - be called within the context of any exception so the access logger - can get exception information. Returns True if the client disconnects - prematurely. - """ - if self._request_parser is not None: - self._request_parser.set_upgraded(False) - self._upgrade = False - if self._message_tail: - self._request_parser.feed_data(self._message_tail) - self._message_tail = b"" - try: - prepare_meth = resp.prepare - except AttributeError: - if resp is None: - raise RuntimeError("Missing return " "statement on request handler") - else: - raise RuntimeError( - "Web-handler should return " - "a response instance, " - "got {!r}".format(resp) - ) - try: - await prepare_meth(request) - await resp.write_eof() - except ConnectionError: - self.log_access(request, resp, start_time) - return True - else: - self.log_access(request, resp, start_time) - return False - - def handle_error( - self, - request: BaseRequest, - status: int = 500, - exc: Optional[BaseException] = None, - message: Optional[str] = None, - ) -> StreamResponse: - """Handle errors. - - Returns HTTP response with specific status code. Logs additional - information. It always closes current connection.""" - self.log_exception("Error handling request", exc_info=exc) - - ct = "text/plain" - if status == HTTPStatus.INTERNAL_SERVER_ERROR: - title = "{0.value} {0.phrase}".format(HTTPStatus.INTERNAL_SERVER_ERROR) - msg = HTTPStatus.INTERNAL_SERVER_ERROR.description - tb = None - if self.debug: - with suppress(Exception): - tb = traceback.format_exc() - - if "text/html" in request.headers.get("Accept", ""): - if tb: - tb = html_escape(tb) - msg = f"

Traceback:

\n
{tb}
" - message = ( - "" - "{title}" - "\n

{title}

" - "\n{msg}\n\n" - ).format(title=title, msg=msg) - ct = "text/html" - else: - if tb: - msg = tb - message = title + "\n\n" + msg - - resp = Response(status=status, text=message, content_type=ct) - resp.force_close() - - # some data already got sent, connection is broken - if request.writer.output_size > 0 or self.transport is None: - self.force_close() - - return resp - - async def handle_parse_error( - self, - writer: AbstractStreamWriter, - status: int, - exc: Optional[BaseException] = None, - message: Optional[str] = None, - ) -> None: - task = current_task() - assert task is not None - request = BaseRequest( - ERROR, EMPTY_PAYLOAD, self, writer, task, self._loop # type: ignore - ) - - resp = self.handle_error(request, status, exc, message) - await resp.prepare(request) - await resp.write_eof() - - if self.transport is not None: - self.transport.close() - - self._error_handler = None diff --git a/third_party/python/aiohttp/aiohttp/web_request.py b/third_party/python/aiohttp/aiohttp/web_request.py deleted file mode 100644 index f11e7be44bec..000000000000 --- a/third_party/python/aiohttp/aiohttp/web_request.py +++ /dev/null @@ -1,824 +0,0 @@ -import asyncio -import datetime -import io -import re -import socket -import string -import tempfile -import types -import warnings -from email.utils import parsedate -from http.cookies import SimpleCookie -from types import MappingProxyType -from typing import ( - TYPE_CHECKING, - Any, - Dict, - Iterator, - Mapping, - MutableMapping, - Optional, - Tuple, - Union, - cast, -) -from urllib.parse import parse_qsl - -import attr -from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy -from yarl import URL - -from . import hdrs -from .abc import AbstractStreamWriter -from .helpers import DEBUG, ChainMapProxy, HeadersMixin, reify, sentinel -from .http_parser import RawRequestMessage -from .http_writer import HttpVersion -from .multipart import BodyPartReader, MultipartReader -from .streams import EmptyStreamReader, StreamReader -from .typedefs import ( - DEFAULT_JSON_DECODER, - JSONDecoder, - LooseHeaders, - RawHeaders, - StrOrURL, -) -from .web_exceptions import HTTPRequestEntityTooLarge -from .web_response import StreamResponse - -__all__ = ("BaseRequest", "FileField", "Request") - - -if TYPE_CHECKING: # pragma: no cover - from .web_app import Application - from .web_protocol import RequestHandler - from .web_urldispatcher import UrlMappingMatchInfo - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class FileField: - name: str - filename: str - file: io.BufferedReader - content_type: str - headers: "CIMultiDictProxy[str]" - - -_TCHAR = string.digits + string.ascii_letters + r"!#$%&'*+.^_`|~-" -# '-' at the end to prevent interpretation as range in a char class - -_TOKEN = fr"[{_TCHAR}]+" - -_QDTEXT = r"[{}]".format( - r"".join(chr(c) for c in (0x09, 0x20, 0x21) + tuple(range(0x23, 0x7F))) -) -# qdtext includes 0x5C to escape 0x5D ('\]') -# qdtext excludes obs-text (because obsoleted, and encoding not specified) - -_QUOTED_PAIR = r"\\[\t !-~]" - -_QUOTED_STRING = r'"(?:{quoted_pair}|{qdtext})*"'.format( - qdtext=_QDTEXT, quoted_pair=_QUOTED_PAIR -) - -_FORWARDED_PAIR = r"({token})=({token}|{quoted_string})(:\d{{1,4}})?".format( - token=_TOKEN, quoted_string=_QUOTED_STRING -) - -_QUOTED_PAIR_REPLACE_RE = re.compile(r"\\([\t !-~])") -# same pattern as _QUOTED_PAIR but contains a capture group - -_FORWARDED_PAIR_RE = re.compile(_FORWARDED_PAIR) - -############################################################ -# HTTP Request -############################################################ - - -class BaseRequest(MutableMapping[str, Any], HeadersMixin): - - POST_METHODS = { - hdrs.METH_PATCH, - hdrs.METH_POST, - hdrs.METH_PUT, - hdrs.METH_TRACE, - hdrs.METH_DELETE, - } - - ATTRS = HeadersMixin.ATTRS | frozenset( - [ - "_message", - "_protocol", - "_payload_writer", - "_payload", - "_headers", - "_method", - "_version", - "_rel_url", - "_post", - "_read_bytes", - "_state", - "_cache", - "_task", - "_client_max_size", - "_loop", - "_transport_sslcontext", - "_transport_peername", - ] - ) - - def __init__( - self, - message: RawRequestMessage, - payload: StreamReader, - protocol: "RequestHandler", - payload_writer: AbstractStreamWriter, - task: "asyncio.Task[None]", - loop: asyncio.AbstractEventLoop, - *, - client_max_size: int = 1024 ** 2, - state: Optional[Dict[str, Any]] = None, - scheme: Optional[str] = None, - host: Optional[str] = None, - remote: Optional[str] = None, - ) -> None: - if state is None: - state = {} - self._message = message - self._protocol = protocol - self._payload_writer = payload_writer - - self._payload = payload - self._headers = message.headers - self._method = message.method - self._version = message.version - self._rel_url = message.url - self._post = ( - None - ) # type: Optional[MultiDictProxy[Union[str, bytes, FileField]]] - self._read_bytes = None # type: Optional[bytes] - - self._state = state - self._cache = {} # type: Dict[str, Any] - self._task = task - self._client_max_size = client_max_size - self._loop = loop - - transport = self._protocol.transport - assert transport is not None - self._transport_sslcontext = transport.get_extra_info("sslcontext") - self._transport_peername = transport.get_extra_info("peername") - - if scheme is not None: - self._cache["scheme"] = scheme - if host is not None: - self._cache["host"] = host - if remote is not None: - self._cache["remote"] = remote - - def clone( - self, - *, - method: str = sentinel, - rel_url: StrOrURL = sentinel, - headers: LooseHeaders = sentinel, - scheme: str = sentinel, - host: str = sentinel, - remote: str = sentinel, - ) -> "BaseRequest": - """Clone itself with replacement some attributes. - - Creates and returns a new instance of Request object. If no parameters - are given, an exact copy is returned. If a parameter is not passed, it - will reuse the one from the current request object. - - """ - - if self._read_bytes: - raise RuntimeError("Cannot clone request " "after reading its content") - - dct = {} # type: Dict[str, Any] - if method is not sentinel: - dct["method"] = method - if rel_url is not sentinel: - new_url = URL(rel_url) - dct["url"] = new_url - dct["path"] = str(new_url) - if headers is not sentinel: - # a copy semantic - dct["headers"] = CIMultiDictProxy(CIMultiDict(headers)) - dct["raw_headers"] = tuple( - (k.encode("utf-8"), v.encode("utf-8")) for k, v in headers.items() - ) - - message = self._message._replace(**dct) - - kwargs = {} - if scheme is not sentinel: - kwargs["scheme"] = scheme - if host is not sentinel: - kwargs["host"] = host - if remote is not sentinel: - kwargs["remote"] = remote - - return self.__class__( - message, - self._payload, - self._protocol, - self._payload_writer, - self._task, - self._loop, - client_max_size=self._client_max_size, - state=self._state.copy(), - **kwargs, - ) - - @property - def task(self) -> "asyncio.Task[None]": - return self._task - - @property - def protocol(self) -> "RequestHandler": - return self._protocol - - @property - def transport(self) -> Optional[asyncio.Transport]: - if self._protocol is None: - return None - return self._protocol.transport - - @property - def writer(self) -> AbstractStreamWriter: - return self._payload_writer - - @reify - def message(self) -> RawRequestMessage: - warnings.warn("Request.message is deprecated", DeprecationWarning, stacklevel=3) - return self._message - - @reify - def rel_url(self) -> URL: - return self._rel_url - - @reify - def loop(self) -> asyncio.AbstractEventLoop: - warnings.warn( - "request.loop property is deprecated", DeprecationWarning, stacklevel=2 - ) - return self._loop - - # MutableMapping API - - def __getitem__(self, key: str) -> Any: - return self._state[key] - - def __setitem__(self, key: str, value: Any) -> None: - self._state[key] = value - - def __delitem__(self, key: str) -> None: - del self._state[key] - - def __len__(self) -> int: - return len(self._state) - - def __iter__(self) -> Iterator[str]: - return iter(self._state) - - ######## - - @reify - def secure(self) -> bool: - """A bool indicating if the request is handled with SSL.""" - return self.scheme == "https" - - @reify - def forwarded(self) -> Tuple[Mapping[str, str], ...]: - """A tuple containing all parsed Forwarded header(s). - - Makes an effort to parse Forwarded headers as specified by RFC 7239: - - - It adds one (immutable) dictionary per Forwarded 'field-value', ie - per proxy. The element corresponds to the data in the Forwarded - field-value added by the first proxy encountered by the client. Each - subsequent item corresponds to those added by later proxies. - - It checks that every value has valid syntax in general as specified - in section 4: either a 'token' or a 'quoted-string'. - - It un-escapes found escape sequences. - - It does NOT validate 'by' and 'for' contents as specified in section - 6. - - It does NOT validate 'host' contents (Host ABNF). - - It does NOT validate 'proto' contents for valid URI scheme names. - - Returns a tuple containing one or more immutable dicts - """ - elems = [] - for field_value in self._message.headers.getall(hdrs.FORWARDED, ()): - length = len(field_value) - pos = 0 - need_separator = False - elem = {} # type: Dict[str, str] - elems.append(types.MappingProxyType(elem)) - while 0 <= pos < length: - match = _FORWARDED_PAIR_RE.match(field_value, pos) - if match is not None: # got a valid forwarded-pair - if need_separator: - # bad syntax here, skip to next comma - pos = field_value.find(",", pos) - else: - name, value, port = match.groups() - if value[0] == '"': - # quoted string: remove quotes and unescape - value = _QUOTED_PAIR_REPLACE_RE.sub(r"\1", value[1:-1]) - if port: - value += port - elem[name.lower()] = value - pos += len(match.group(0)) - need_separator = True - elif field_value[pos] == ",": # next forwarded-element - need_separator = False - elem = {} - elems.append(types.MappingProxyType(elem)) - pos += 1 - elif field_value[pos] == ";": # next forwarded-pair - need_separator = False - pos += 1 - elif field_value[pos] in " \t": - # Allow whitespace even between forwarded-pairs, though - # RFC 7239 doesn't. This simplifies code and is in line - # with Postel's law. - pos += 1 - else: - # bad syntax here, skip to next comma - pos = field_value.find(",", pos) - return tuple(elems) - - @reify - def scheme(self) -> str: - """A string representing the scheme of the request. - - Hostname is resolved in this order: - - - overridden value by .clone(scheme=new_scheme) call. - - type of connection to peer: HTTPS if socket is SSL, HTTP otherwise. - - 'http' or 'https'. - """ - if self._transport_sslcontext: - return "https" - else: - return "http" - - @reify - def method(self) -> str: - """Read only property for getting HTTP method. - - The value is upper-cased str like 'GET', 'POST', 'PUT' etc. - """ - return self._method - - @reify - def version(self) -> HttpVersion: - """Read only property for getting HTTP version of request. - - Returns aiohttp.protocol.HttpVersion instance. - """ - return self._version - - @reify - def host(self) -> str: - """Hostname of the request. - - Hostname is resolved in this order: - - - overridden value by .clone(host=new_host) call. - - HOST HTTP header - - socket.getfqdn() value - """ - host = self._message.headers.get(hdrs.HOST) - if host is not None: - return host - else: - return socket.getfqdn() - - @reify - def remote(self) -> Optional[str]: - """Remote IP of client initiated HTTP request. - - The IP is resolved in this order: - - - overridden value by .clone(remote=new_remote) call. - - peername of opened socket - """ - if isinstance(self._transport_peername, (list, tuple)): - return self._transport_peername[0] - else: - return self._transport_peername - - @reify - def url(self) -> URL: - url = URL.build(scheme=self.scheme, host=self.host) - return url.join(self._rel_url) - - @reify - def path(self) -> str: - """The URL including *PATH INFO* without the host or scheme. - - E.g., ``/app/blog`` - """ - return self._rel_url.path - - @reify - def path_qs(self) -> str: - """The URL including PATH_INFO and the query string. - - E.g, /app/blog?id=10 - """ - return str(self._rel_url) - - @reify - def raw_path(self) -> str: - """The URL including raw *PATH INFO* without the host or scheme. - Warning, the path is unquoted and may contains non valid URL characters - - E.g., ``/my%2Fpath%7Cwith%21some%25strange%24characters`` - """ - return self._message.path - - @reify - def query(self) -> "MultiDictProxy[str]": - """A multidict with all the variables in the query string.""" - return self._rel_url.query - - @reify - def query_string(self) -> str: - """The query string in the URL. - - E.g., id=10 - """ - return self._rel_url.query_string - - @reify - def headers(self) -> "CIMultiDictProxy[str]": - """A case-insensitive multidict proxy with all headers.""" - return self._headers - - @reify - def raw_headers(self) -> RawHeaders: - """A sequence of pairs for all headers.""" - return self._message.raw_headers - - @staticmethod - def _http_date(_date_str: Optional[str]) -> Optional[datetime.datetime]: - """Process a date string, return a datetime object""" - if _date_str is not None: - timetuple = parsedate(_date_str) - if timetuple is not None: - return datetime.datetime(*timetuple[:6], tzinfo=datetime.timezone.utc) - return None - - @reify - def if_modified_since(self) -> Optional[datetime.datetime]: - """The value of If-Modified-Since HTTP header, or None. - - This header is represented as a `datetime` object. - """ - return self._http_date(self.headers.get(hdrs.IF_MODIFIED_SINCE)) - - @reify - def if_unmodified_since(self) -> Optional[datetime.datetime]: - """The value of If-Unmodified-Since HTTP header, or None. - - This header is represented as a `datetime` object. - """ - return self._http_date(self.headers.get(hdrs.IF_UNMODIFIED_SINCE)) - - @reify - def if_range(self) -> Optional[datetime.datetime]: - """The value of If-Range HTTP header, or None. - - This header is represented as a `datetime` object. - """ - return self._http_date(self.headers.get(hdrs.IF_RANGE)) - - @reify - def keep_alive(self) -> bool: - """Is keepalive enabled by client?""" - return not self._message.should_close - - @reify - def cookies(self) -> Mapping[str, str]: - """Return request cookies. - - A read-only dictionary-like object. - """ - raw = self.headers.get(hdrs.COOKIE, "") - parsed = SimpleCookie(raw) # type: SimpleCookie[str] - return MappingProxyType({key: val.value for key, val in parsed.items()}) - - @reify - def http_range(self) -> slice: - """The content of Range HTTP header. - - Return a slice instance. - - """ - rng = self._headers.get(hdrs.RANGE) - start, end = None, None - if rng is not None: - try: - pattern = r"^bytes=(\d*)-(\d*)$" - start, end = re.findall(pattern, rng)[0] - except IndexError: # pattern was not found in header - raise ValueError("range not in acceptable format") - - end = int(end) if end else None - start = int(start) if start else None - - if start is None and end is not None: - # end with no start is to return tail of content - start = -end - end = None - - if start is not None and end is not None: - # end is inclusive in range header, exclusive for slice - end += 1 - - if start >= end: - raise ValueError("start cannot be after end") - - if start is end is None: # No valid range supplied - raise ValueError("No start or end of range specified") - - return slice(start, end, 1) - - @reify - def content(self) -> StreamReader: - """Return raw payload stream.""" - return self._payload - - @property - def has_body(self) -> bool: - """Return True if request's HTTP BODY can be read, False otherwise.""" - warnings.warn( - "Deprecated, use .can_read_body #2005", DeprecationWarning, stacklevel=2 - ) - return not self._payload.at_eof() - - @property - def can_read_body(self) -> bool: - """Return True if request's HTTP BODY can be read, False otherwise.""" - return not self._payload.at_eof() - - @reify - def body_exists(self) -> bool: - """Return True if request has HTTP BODY, False otherwise.""" - return type(self._payload) is not EmptyStreamReader - - async def release(self) -> None: - """Release request. - - Eat unread part of HTTP BODY if present. - """ - while not self._payload.at_eof(): - await self._payload.readany() - - async def read(self) -> bytes: - """Read request body if present. - - Returns bytes object with full request content. - """ - if self._read_bytes is None: - body = bytearray() - while True: - chunk = await self._payload.readany() - body.extend(chunk) - if self._client_max_size: - body_size = len(body) - if body_size >= self._client_max_size: - raise HTTPRequestEntityTooLarge( - max_size=self._client_max_size, actual_size=body_size - ) - if not chunk: - break - self._read_bytes = bytes(body) - return self._read_bytes - - async def text(self) -> str: - """Return BODY as text using encoding from .charset.""" - bytes_body = await self.read() - encoding = self.charset or "utf-8" - return bytes_body.decode(encoding) - - async def json(self, *, loads: JSONDecoder = DEFAULT_JSON_DECODER) -> Any: - """Return BODY as JSON.""" - body = await self.text() - return loads(body) - - async def multipart(self) -> MultipartReader: - """Return async iterator to process BODY as multipart.""" - return MultipartReader(self._headers, self._payload) - - async def post(self) -> "MultiDictProxy[Union[str, bytes, FileField]]": - """Return POST parameters.""" - if self._post is not None: - return self._post - if self._method not in self.POST_METHODS: - self._post = MultiDictProxy(MultiDict()) - return self._post - - content_type = self.content_type - if content_type not in ( - "", - "application/x-www-form-urlencoded", - "multipart/form-data", - ): - self._post = MultiDictProxy(MultiDict()) - return self._post - - out = MultiDict() # type: MultiDict[Union[str, bytes, FileField]] - - if content_type == "multipart/form-data": - multipart = await self.multipart() - max_size = self._client_max_size - - field = await multipart.next() - while field is not None: - size = 0 - field_ct = field.headers.get(hdrs.CONTENT_TYPE) - - if isinstance(field, BodyPartReader): - assert field.name is not None - - # Note that according to RFC 7578, the Content-Type header - # is optional, even for files, so we can't assume it's - # present. - # https://tools.ietf.org/html/rfc7578#section-4.4 - if field.filename: - # store file in temp file - tmp = tempfile.TemporaryFile() - chunk = await field.read_chunk(size=2 ** 16) - while chunk: - chunk = field.decode(chunk) - tmp.write(chunk) - size += len(chunk) - if 0 < max_size < size: - raise HTTPRequestEntityTooLarge( - max_size=max_size, actual_size=size - ) - chunk = await field.read_chunk(size=2 ** 16) - tmp.seek(0) - - if field_ct is None: - field_ct = "application/octet-stream" - - ff = FileField( - field.name, - field.filename, - cast(io.BufferedReader, tmp), - field_ct, - field.headers, - ) - out.add(field.name, ff) - else: - # deal with ordinary data - value = await field.read(decode=True) - if field_ct is None or field_ct.startswith("text/"): - charset = field.get_charset(default="utf-8") - out.add(field.name, value.decode(charset)) - else: - out.add(field.name, value) - size += len(value) - if 0 < max_size < size: - raise HTTPRequestEntityTooLarge( - max_size=max_size, actual_size=size - ) - else: - raise ValueError( - "To decode nested multipart you need " "to use custom reader", - ) - - field = await multipart.next() - else: - data = await self.read() - if data: - charset = self.charset or "utf-8" - out.extend( - parse_qsl( - data.rstrip().decode(charset), - keep_blank_values=True, - encoding=charset, - ) - ) - - self._post = MultiDictProxy(out) - return self._post - - def get_extra_info(self, name: str, default: Any = None) -> Any: - """Extra info from protocol transport""" - protocol = self._protocol - if protocol is None: - return default - - transport = protocol.transport - if transport is None: - return default - - return transport.get_extra_info(name, default) - - def __repr__(self) -> str: - ascii_encodable_path = self.path.encode("ascii", "backslashreplace").decode( - "ascii" - ) - return "<{} {} {} >".format( - self.__class__.__name__, self._method, ascii_encodable_path - ) - - def __eq__(self, other: object) -> bool: - return id(self) == id(other) - - def __bool__(self) -> bool: - return True - - async def _prepare_hook(self, response: StreamResponse) -> None: - return - - def _cancel(self, exc: BaseException) -> None: - self._payload.set_exception(exc) - - -class Request(BaseRequest): - - ATTRS = BaseRequest.ATTRS | frozenset(["_match_info"]) - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - # matchdict, route_name, handler - # or information about traversal lookup - - # initialized after route resolving - self._match_info = None # type: Optional[UrlMappingMatchInfo] - - if DEBUG: - - def __setattr__(self, name: str, val: Any) -> None: - if name not in self.ATTRS: - warnings.warn( - "Setting custom {}.{} attribute " - "is discouraged".format(self.__class__.__name__, name), - DeprecationWarning, - stacklevel=2, - ) - super().__setattr__(name, val) - - def clone( - self, - *, - method: str = sentinel, - rel_url: StrOrURL = sentinel, - headers: LooseHeaders = sentinel, - scheme: str = sentinel, - host: str = sentinel, - remote: str = sentinel, - ) -> "Request": - ret = super().clone( - method=method, - rel_url=rel_url, - headers=headers, - scheme=scheme, - host=host, - remote=remote, - ) - new_ret = cast(Request, ret) - new_ret._match_info = self._match_info - return new_ret - - @reify - def match_info(self) -> "UrlMappingMatchInfo": - """Result of route resolving.""" - match_info = self._match_info - assert match_info is not None - return match_info - - @property - def app(self) -> "Application": - """Application instance.""" - match_info = self._match_info - assert match_info is not None - return match_info.current_app - - @property - def config_dict(self) -> ChainMapProxy: - match_info = self._match_info - assert match_info is not None - lst = match_info.apps - app = self.app - idx = lst.index(app) - sublist = list(reversed(lst[: idx + 1])) - return ChainMapProxy(sublist) - - async def _prepare_hook(self, response: StreamResponse) -> None: - match_info = self._match_info - if match_info is None: - return - for app in match_info._apps: - await app.on_response_prepare.send(self, response) diff --git a/third_party/python/aiohttp/aiohttp/web_response.py b/third_party/python/aiohttp/aiohttp/web_response.py deleted file mode 100644 index f34b00e2d952..000000000000 --- a/third_party/python/aiohttp/aiohttp/web_response.py +++ /dev/null @@ -1,781 +0,0 @@ -import asyncio -import collections.abc -import datetime -import enum -import json -import math -import time -import warnings -import zlib -from concurrent.futures import Executor -from email.utils import parsedate -from http.cookies import Morsel, SimpleCookie -from typing import ( - TYPE_CHECKING, - Any, - Dict, - Iterator, - Mapping, - MutableMapping, - Optional, - Tuple, - Union, - cast, -) - -from multidict import CIMultiDict, istr - -from . import hdrs, payload -from .abc import AbstractStreamWriter -from .helpers import PY_38, HeadersMixin, rfc822_formatted_time, sentinel -from .http import RESPONSES, SERVER_SOFTWARE, HttpVersion10, HttpVersion11 -from .payload import Payload -from .typedefs import JSONEncoder, LooseHeaders - -__all__ = ("ContentCoding", "StreamResponse", "Response", "json_response") - - -if TYPE_CHECKING: # pragma: no cover - from .web_request import BaseRequest - - BaseClass = MutableMapping[str, Any] -else: - BaseClass = collections.abc.MutableMapping - - -if not PY_38: - # allow samesite to be used in python < 3.8 - # already permitted in python 3.8, see https://bugs.python.org/issue29613 - Morsel._reserved["samesite"] = "SameSite" # type: ignore - - -class ContentCoding(enum.Enum): - # The content codings that we have support for. - # - # Additional registered codings are listed at: - # https://www.iana.org/assignments/http-parameters/http-parameters.xhtml#content-coding - deflate = "deflate" - gzip = "gzip" - identity = "identity" - - -############################################################ -# HTTP Response classes -############################################################ - - -class StreamResponse(BaseClass, HeadersMixin): - - _length_check = True - - def __init__( - self, - *, - status: int = 200, - reason: Optional[str] = None, - headers: Optional[LooseHeaders] = None, - ) -> None: - self._body = None - self._keep_alive = None # type: Optional[bool] - self._chunked = False - self._compression = False - self._compression_force = None # type: Optional[ContentCoding] - self._cookies = SimpleCookie() # type: SimpleCookie[str] - - self._req = None # type: Optional[BaseRequest] - self._payload_writer = None # type: Optional[AbstractStreamWriter] - self._eof_sent = False - self._body_length = 0 - self._state = {} # type: Dict[str, Any] - - if headers is not None: - self._headers = CIMultiDict(headers) # type: CIMultiDict[str] - else: - self._headers = CIMultiDict() - - self.set_status(status, reason) - - @property - def prepared(self) -> bool: - return self._payload_writer is not None - - @property - def task(self) -> "asyncio.Task[None]": - return getattr(self._req, "task", None) - - @property - def status(self) -> int: - return self._status - - @property - def chunked(self) -> bool: - return self._chunked - - @property - def compression(self) -> bool: - return self._compression - - @property - def reason(self) -> str: - return self._reason - - def set_status( - self, - status: int, - reason: Optional[str] = None, - _RESPONSES: Mapping[int, Tuple[str, str]] = RESPONSES, - ) -> None: - assert not self.prepared, ( - "Cannot change the response status code after " "the headers have been sent" - ) - self._status = int(status) - if reason is None: - try: - reason = _RESPONSES[self._status][0] - except Exception: - reason = "" - self._reason = reason - - @property - def keep_alive(self) -> Optional[bool]: - return self._keep_alive - - def force_close(self) -> None: - self._keep_alive = False - - @property - def body_length(self) -> int: - return self._body_length - - @property - def output_length(self) -> int: - warnings.warn("output_length is deprecated", DeprecationWarning) - assert self._payload_writer - return self._payload_writer.buffer_size - - def enable_chunked_encoding(self, chunk_size: Optional[int] = None) -> None: - """Enables automatic chunked transfer encoding.""" - self._chunked = True - - if hdrs.CONTENT_LENGTH in self._headers: - raise RuntimeError( - "You can't enable chunked encoding when " "a content length is set" - ) - if chunk_size is not None: - warnings.warn("Chunk size is deprecated #1615", DeprecationWarning) - - def enable_compression( - self, force: Optional[Union[bool, ContentCoding]] = None - ) -> None: - """Enables response compression encoding.""" - # Backwards compatibility for when force was a bool <0.17. - if type(force) == bool: - force = ContentCoding.deflate if force else ContentCoding.identity - warnings.warn( - "Using boolean for force is deprecated #3318", DeprecationWarning - ) - elif force is not None: - assert isinstance(force, ContentCoding), ( - "force should one of " "None, bool or " "ContentEncoding" - ) - - self._compression = True - self._compression_force = force - - @property - def headers(self) -> "CIMultiDict[str]": - return self._headers - - @property - def cookies(self) -> "SimpleCookie[str]": - return self._cookies - - def set_cookie( - self, - name: str, - value: str, - *, - expires: Optional[str] = None, - domain: Optional[str] = None, - max_age: Optional[Union[int, str]] = None, - path: str = "/", - secure: Optional[bool] = None, - httponly: Optional[bool] = None, - version: Optional[str] = None, - samesite: Optional[str] = None, - ) -> None: - """Set or update response cookie. - - Sets new cookie or updates existent with new value. - Also updates only those params which are not None. - """ - - old = self._cookies.get(name) - if old is not None and old.coded_value == "": - # deleted cookie - self._cookies.pop(name, None) - - self._cookies[name] = value - c = self._cookies[name] - - if expires is not None: - c["expires"] = expires - elif c.get("expires") == "Thu, 01 Jan 1970 00:00:00 GMT": - del c["expires"] - - if domain is not None: - c["domain"] = domain - - if max_age is not None: - c["max-age"] = str(max_age) - elif "max-age" in c: - del c["max-age"] - - c["path"] = path - - if secure is not None: - c["secure"] = secure - if httponly is not None: - c["httponly"] = httponly - if version is not None: - c["version"] = version - if samesite is not None: - c["samesite"] = samesite - - def del_cookie( - self, name: str, *, domain: Optional[str] = None, path: str = "/" - ) -> None: - """Delete cookie. - - Creates new empty expired cookie. - """ - # TODO: do we need domain/path here? - self._cookies.pop(name, None) - self.set_cookie( - name, - "", - max_age=0, - expires="Thu, 01 Jan 1970 00:00:00 GMT", - domain=domain, - path=path, - ) - - @property - def content_length(self) -> Optional[int]: - # Just a placeholder for adding setter - return super().content_length - - @content_length.setter - def content_length(self, value: Optional[int]) -> None: - if value is not None: - value = int(value) - if self._chunked: - raise RuntimeError( - "You can't set content length when " "chunked encoding is enable" - ) - self._headers[hdrs.CONTENT_LENGTH] = str(value) - else: - self._headers.pop(hdrs.CONTENT_LENGTH, None) - - @property - def content_type(self) -> str: - # Just a placeholder for adding setter - return super().content_type - - @content_type.setter - def content_type(self, value: str) -> None: - self.content_type # read header values if needed - self._content_type = str(value) - self._generate_content_type_header() - - @property - def charset(self) -> Optional[str]: - # Just a placeholder for adding setter - return super().charset - - @charset.setter - def charset(self, value: Optional[str]) -> None: - ctype = self.content_type # read header values if needed - if ctype == "application/octet-stream": - raise RuntimeError( - "Setting charset for application/octet-stream " - "doesn't make sense, setup content_type first" - ) - assert self._content_dict is not None - if value is None: - self._content_dict.pop("charset", None) - else: - self._content_dict["charset"] = str(value).lower() - self._generate_content_type_header() - - @property - def last_modified(self) -> Optional[datetime.datetime]: - """The value of Last-Modified HTTP header, or None. - - This header is represented as a `datetime` object. - """ - httpdate = self._headers.get(hdrs.LAST_MODIFIED) - if httpdate is not None: - timetuple = parsedate(httpdate) - if timetuple is not None: - return datetime.datetime(*timetuple[:6], tzinfo=datetime.timezone.utc) - return None - - @last_modified.setter - def last_modified( - self, value: Optional[Union[int, float, datetime.datetime, str]] - ) -> None: - if value is None: - self._headers.pop(hdrs.LAST_MODIFIED, None) - elif isinstance(value, (int, float)): - self._headers[hdrs.LAST_MODIFIED] = time.strftime( - "%a, %d %b %Y %H:%M:%S GMT", time.gmtime(math.ceil(value)) - ) - elif isinstance(value, datetime.datetime): - self._headers[hdrs.LAST_MODIFIED] = time.strftime( - "%a, %d %b %Y %H:%M:%S GMT", value.utctimetuple() - ) - elif isinstance(value, str): - self._headers[hdrs.LAST_MODIFIED] = value - - def _generate_content_type_header( - self, CONTENT_TYPE: istr = hdrs.CONTENT_TYPE - ) -> None: - assert self._content_dict is not None - assert self._content_type is not None - params = "; ".join(f"{k}={v}" for k, v in self._content_dict.items()) - if params: - ctype = self._content_type + "; " + params - else: - ctype = self._content_type - self._headers[CONTENT_TYPE] = ctype - - async def _do_start_compression(self, coding: ContentCoding) -> None: - if coding != ContentCoding.identity: - assert self._payload_writer is not None - self._headers[hdrs.CONTENT_ENCODING] = coding.value - self._payload_writer.enable_compression(coding.value) - # Compressed payload may have different content length, - # remove the header - self._headers.popall(hdrs.CONTENT_LENGTH, None) - - async def _start_compression(self, request: "BaseRequest") -> None: - if self._compression_force: - await self._do_start_compression(self._compression_force) - else: - accept_encoding = request.headers.get(hdrs.ACCEPT_ENCODING, "").lower() - for coding in ContentCoding: - if coding.value in accept_encoding: - await self._do_start_compression(coding) - return - - async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]: - if self._eof_sent: - return None - if self._payload_writer is not None: - return self._payload_writer - - return await self._start(request) - - async def _start(self, request: "BaseRequest") -> AbstractStreamWriter: - self._req = request - writer = self._payload_writer = request._payload_writer - - await self._prepare_headers() - await request._prepare_hook(self) - await self._write_headers() - - return writer - - async def _prepare_headers(self) -> None: - request = self._req - assert request is not None - writer = self._payload_writer - assert writer is not None - keep_alive = self._keep_alive - if keep_alive is None: - keep_alive = request.keep_alive - self._keep_alive = keep_alive - - version = request.version - - headers = self._headers - for cookie in self._cookies.values(): - value = cookie.output(header="")[1:] - headers.add(hdrs.SET_COOKIE, value) - - if self._compression: - await self._start_compression(request) - - if self._chunked: - if version != HttpVersion11: - raise RuntimeError( - "Using chunked encoding is forbidden " - "for HTTP/{0.major}.{0.minor}".format(request.version) - ) - writer.enable_chunking() - headers[hdrs.TRANSFER_ENCODING] = "chunked" - if hdrs.CONTENT_LENGTH in headers: - del headers[hdrs.CONTENT_LENGTH] - elif self._length_check: - writer.length = self.content_length - if writer.length is None: - if version >= HttpVersion11: - writer.enable_chunking() - headers[hdrs.TRANSFER_ENCODING] = "chunked" - if hdrs.CONTENT_LENGTH in headers: - del headers[hdrs.CONTENT_LENGTH] - else: - keep_alive = False - # HTTP 1.1: https://tools.ietf.org/html/rfc7230#section-3.3.2 - # HTTP 1.0: https://tools.ietf.org/html/rfc1945#section-10.4 - elif version >= HttpVersion11 and self.status in (100, 101, 102, 103, 204): - del headers[hdrs.CONTENT_LENGTH] - - headers.setdefault(hdrs.CONTENT_TYPE, "application/octet-stream") - headers.setdefault(hdrs.DATE, rfc822_formatted_time()) - headers.setdefault(hdrs.SERVER, SERVER_SOFTWARE) - - # connection header - if hdrs.CONNECTION not in headers: - if keep_alive: - if version == HttpVersion10: - headers[hdrs.CONNECTION] = "keep-alive" - else: - if version == HttpVersion11: - headers[hdrs.CONNECTION] = "close" - - async def _write_headers(self) -> None: - request = self._req - assert request is not None - writer = self._payload_writer - assert writer is not None - # status line - version = request.version - status_line = "HTTP/{}.{} {} {}".format( - version[0], version[1], self._status, self._reason - ) - await writer.write_headers(status_line, self._headers) - - async def write(self, data: bytes) -> None: - assert isinstance( - data, (bytes, bytearray, memoryview) - ), "data argument must be byte-ish (%r)" % type(data) - - if self._eof_sent: - raise RuntimeError("Cannot call write() after write_eof()") - if self._payload_writer is None: - raise RuntimeError("Cannot call write() before prepare()") - - await self._payload_writer.write(data) - - async def drain(self) -> None: - assert not self._eof_sent, "EOF has already been sent" - assert self._payload_writer is not None, "Response has not been started" - warnings.warn( - "drain method is deprecated, use await resp.write()", - DeprecationWarning, - stacklevel=2, - ) - await self._payload_writer.drain() - - async def write_eof(self, data: bytes = b"") -> None: - assert isinstance( - data, (bytes, bytearray, memoryview) - ), "data argument must be byte-ish (%r)" % type(data) - - if self._eof_sent: - return - - assert self._payload_writer is not None, "Response has not been started" - - await self._payload_writer.write_eof(data) - self._eof_sent = True - self._req = None - self._body_length = self._payload_writer.output_size - self._payload_writer = None - - def __repr__(self) -> str: - if self._eof_sent: - info = "eof" - elif self.prepared: - assert self._req is not None - info = f"{self._req.method} {self._req.path} " - else: - info = "not prepared" - return f"<{self.__class__.__name__} {self.reason} {info}>" - - def __getitem__(self, key: str) -> Any: - return self._state[key] - - def __setitem__(self, key: str, value: Any) -> None: - self._state[key] = value - - def __delitem__(self, key: str) -> None: - del self._state[key] - - def __len__(self) -> int: - return len(self._state) - - def __iter__(self) -> Iterator[str]: - return iter(self._state) - - def __hash__(self) -> int: - return hash(id(self)) - - def __eq__(self, other: object) -> bool: - return self is other - - -class Response(StreamResponse): - def __init__( - self, - *, - body: Any = None, - status: int = 200, - reason: Optional[str] = None, - text: Optional[str] = None, - headers: Optional[LooseHeaders] = None, - content_type: Optional[str] = None, - charset: Optional[str] = None, - zlib_executor_size: Optional[int] = None, - zlib_executor: Optional[Executor] = None, - ) -> None: - if body is not None and text is not None: - raise ValueError("body and text are not allowed together") - - if headers is None: - real_headers = CIMultiDict() # type: CIMultiDict[str] - elif not isinstance(headers, CIMultiDict): - real_headers = CIMultiDict(headers) - else: - real_headers = headers # = cast('CIMultiDict[str]', headers) - - if content_type is not None and "charset" in content_type: - raise ValueError("charset must not be in content_type " "argument") - - if text is not None: - if hdrs.CONTENT_TYPE in real_headers: - if content_type or charset: - raise ValueError( - "passing both Content-Type header and " - "content_type or charset params " - "is forbidden" - ) - else: - # fast path for filling headers - if not isinstance(text, str): - raise TypeError("text argument must be str (%r)" % type(text)) - if content_type is None: - content_type = "text/plain" - if charset is None: - charset = "utf-8" - real_headers[hdrs.CONTENT_TYPE] = content_type + "; charset=" + charset - body = text.encode(charset) - text = None - else: - if hdrs.CONTENT_TYPE in real_headers: - if content_type is not None or charset is not None: - raise ValueError( - "passing both Content-Type header and " - "content_type or charset params " - "is forbidden" - ) - else: - if content_type is not None: - if charset is not None: - content_type += "; charset=" + charset - real_headers[hdrs.CONTENT_TYPE] = content_type - - super().__init__(status=status, reason=reason, headers=real_headers) - - if text is not None: - self.text = text - else: - self.body = body - - self._compressed_body = None # type: Optional[bytes] - self._zlib_executor_size = zlib_executor_size - self._zlib_executor = zlib_executor - - @property - def body(self) -> Optional[Union[bytes, Payload]]: - return self._body - - @body.setter - def body( - self, - body: bytes, - CONTENT_TYPE: istr = hdrs.CONTENT_TYPE, - CONTENT_LENGTH: istr = hdrs.CONTENT_LENGTH, - ) -> None: - if body is None: - self._body = None # type: Optional[bytes] - self._body_payload = False # type: bool - elif isinstance(body, (bytes, bytearray)): - self._body = body - self._body_payload = False - else: - try: - self._body = body = payload.PAYLOAD_REGISTRY.get(body) - except payload.LookupError: - raise ValueError("Unsupported body type %r" % type(body)) - - self._body_payload = True - - headers = self._headers - - # set content-length header if needed - if not self._chunked and CONTENT_LENGTH not in headers: - size = body.size - if size is not None: - headers[CONTENT_LENGTH] = str(size) - - # set content-type - if CONTENT_TYPE not in headers: - headers[CONTENT_TYPE] = body.content_type - - # copy payload headers - if body.headers: - for (key, value) in body.headers.items(): - if key not in headers: - headers[key] = value - - self._compressed_body = None - - @property - def text(self) -> Optional[str]: - if self._body is None: - return None - return self._body.decode(self.charset or "utf-8") - - @text.setter - def text(self, text: str) -> None: - assert text is None or isinstance( - text, str - ), "text argument must be str (%r)" % type(text) - - if self.content_type == "application/octet-stream": - self.content_type = "text/plain" - if self.charset is None: - self.charset = "utf-8" - - self._body = text.encode(self.charset) - self._body_payload = False - self._compressed_body = None - - @property - def content_length(self) -> Optional[int]: - if self._chunked: - return None - - if hdrs.CONTENT_LENGTH in self._headers: - return super().content_length - - if self._compressed_body is not None: - # Return length of the compressed body - return len(self._compressed_body) - elif self._body_payload: - # A payload without content length, or a compressed payload - return None - elif self._body is not None: - return len(self._body) - else: - return 0 - - @content_length.setter - def content_length(self, value: Optional[int]) -> None: - raise RuntimeError("Content length is set automatically") - - async def write_eof(self, data: bytes = b"") -> None: - if self._eof_sent: - return - if self._compressed_body is None: - body = self._body # type: Optional[Union[bytes, Payload]] - else: - body = self._compressed_body - assert not data, f"data arg is not supported, got {data!r}" - assert self._req is not None - assert self._payload_writer is not None - if body is not None: - if self._req._method == hdrs.METH_HEAD or self._status in [204, 304]: - await super().write_eof() - elif self._body_payload: - payload = cast(Payload, body) - await payload.write(self._payload_writer) - await super().write_eof() - else: - await super().write_eof(cast(bytes, body)) - else: - await super().write_eof() - - async def _start(self, request: "BaseRequest") -> AbstractStreamWriter: - if not self._chunked and hdrs.CONTENT_LENGTH not in self._headers: - if not self._body_payload: - if self._body is not None: - self._headers[hdrs.CONTENT_LENGTH] = str(len(self._body)) - else: - self._headers[hdrs.CONTENT_LENGTH] = "0" - - return await super()._start(request) - - def _compress_body(self, zlib_mode: int) -> None: - assert zlib_mode > 0 - compressobj = zlib.compressobj(wbits=zlib_mode) - body_in = self._body - assert body_in is not None - self._compressed_body = compressobj.compress(body_in) + compressobj.flush() - - async def _do_start_compression(self, coding: ContentCoding) -> None: - if self._body_payload or self._chunked: - return await super()._do_start_compression(coding) - - if coding != ContentCoding.identity: - # Instead of using _payload_writer.enable_compression, - # compress the whole body - zlib_mode = ( - 16 + zlib.MAX_WBITS if coding == ContentCoding.gzip else zlib.MAX_WBITS - ) - body_in = self._body - assert body_in is not None - if ( - self._zlib_executor_size is not None - and len(body_in) > self._zlib_executor_size - ): - await asyncio.get_event_loop().run_in_executor( - self._zlib_executor, self._compress_body, zlib_mode - ) - else: - self._compress_body(zlib_mode) - - body_out = self._compressed_body - assert body_out is not None - - self._headers[hdrs.CONTENT_ENCODING] = coding.value - self._headers[hdrs.CONTENT_LENGTH] = str(len(body_out)) - - -def json_response( - data: Any = sentinel, - *, - text: Optional[str] = None, - body: Optional[bytes] = None, - status: int = 200, - reason: Optional[str] = None, - headers: Optional[LooseHeaders] = None, - content_type: str = "application/json", - dumps: JSONEncoder = json.dumps, -) -> Response: - if data is not sentinel: - if text or body: - raise ValueError("only one of data, text, or body should be specified") - else: - text = dumps(data) - return Response( - text=text, - body=body, - status=status, - reason=reason, - headers=headers, - content_type=content_type, - ) diff --git a/third_party/python/aiohttp/aiohttp/web_routedef.py b/third_party/python/aiohttp/aiohttp/web_routedef.py deleted file mode 100644 index 188525103de5..000000000000 --- a/third_party/python/aiohttp/aiohttp/web_routedef.py +++ /dev/null @@ -1,215 +0,0 @@ -import abc -import os # noqa -from typing import ( - TYPE_CHECKING, - Any, - Awaitable, - Callable, - Dict, - Iterator, - List, - Optional, - Sequence, - Type, - Union, - overload, -) - -import attr - -from . import hdrs -from .abc import AbstractView -from .typedefs import PathLike - -if TYPE_CHECKING: # pragma: no cover - from .web_request import Request - from .web_response import StreamResponse - from .web_urldispatcher import AbstractRoute, UrlDispatcher -else: - Request = StreamResponse = UrlDispatcher = AbstractRoute = None - - -__all__ = ( - "AbstractRouteDef", - "RouteDef", - "StaticDef", - "RouteTableDef", - "head", - "options", - "get", - "post", - "patch", - "put", - "delete", - "route", - "view", - "static", -) - - -class AbstractRouteDef(abc.ABC): - @abc.abstractmethod - def register(self, router: UrlDispatcher) -> List[AbstractRoute]: - pass # pragma: no cover - - -_SimpleHandler = Callable[[Request], Awaitable[StreamResponse]] -_HandlerType = Union[Type[AbstractView], _SimpleHandler] - - -@attr.s(auto_attribs=True, frozen=True, repr=False, slots=True) -class RouteDef(AbstractRouteDef): - method: str - path: str - handler: _HandlerType - kwargs: Dict[str, Any] - - def __repr__(self) -> str: - info = [] - for name, value in sorted(self.kwargs.items()): - info.append(f", {name}={value!r}") - return " {handler.__name__!r}" "{info}>".format( - method=self.method, path=self.path, handler=self.handler, info="".join(info) - ) - - def register(self, router: UrlDispatcher) -> List[AbstractRoute]: - if self.method in hdrs.METH_ALL: - reg = getattr(router, "add_" + self.method.lower()) - return [reg(self.path, self.handler, **self.kwargs)] - else: - return [ - router.add_route(self.method, self.path, self.handler, **self.kwargs) - ] - - -@attr.s(auto_attribs=True, frozen=True, repr=False, slots=True) -class StaticDef(AbstractRouteDef): - prefix: str - path: PathLike - kwargs: Dict[str, Any] - - def __repr__(self) -> str: - info = [] - for name, value in sorted(self.kwargs.items()): - info.append(f", {name}={value!r}") - return " {path}" "{info}>".format( - prefix=self.prefix, path=self.path, info="".join(info) - ) - - def register(self, router: UrlDispatcher) -> List[AbstractRoute]: - resource = router.add_static(self.prefix, self.path, **self.kwargs) - routes = resource.get_info().get("routes", {}) - return list(routes.values()) - - -def route(method: str, path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef: - return RouteDef(method, path, handler, kwargs) - - -def head(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef: - return route(hdrs.METH_HEAD, path, handler, **kwargs) - - -def options(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef: - return route(hdrs.METH_OPTIONS, path, handler, **kwargs) - - -def get( - path: str, - handler: _HandlerType, - *, - name: Optional[str] = None, - allow_head: bool = True, - **kwargs: Any, -) -> RouteDef: - return route( - hdrs.METH_GET, path, handler, name=name, allow_head=allow_head, **kwargs - ) - - -def post(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef: - return route(hdrs.METH_POST, path, handler, **kwargs) - - -def put(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef: - return route(hdrs.METH_PUT, path, handler, **kwargs) - - -def patch(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef: - return route(hdrs.METH_PATCH, path, handler, **kwargs) - - -def delete(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef: - return route(hdrs.METH_DELETE, path, handler, **kwargs) - - -def view(path: str, handler: Type[AbstractView], **kwargs: Any) -> RouteDef: - return route(hdrs.METH_ANY, path, handler, **kwargs) - - -def static(prefix: str, path: PathLike, **kwargs: Any) -> StaticDef: - return StaticDef(prefix, path, kwargs) - - -_Deco = Callable[[_HandlerType], _HandlerType] - - -class RouteTableDef(Sequence[AbstractRouteDef]): - """Route definition table""" - - def __init__(self) -> None: - self._items = [] # type: List[AbstractRouteDef] - - def __repr__(self) -> str: - return "".format(len(self._items)) - - @overload - def __getitem__(self, index: int) -> AbstractRouteDef: - ... - - @overload - def __getitem__(self, index: slice) -> List[AbstractRouteDef]: - ... - - def __getitem__(self, index): # type: ignore - return self._items[index] - - def __iter__(self) -> Iterator[AbstractRouteDef]: - return iter(self._items) - - def __len__(self) -> int: - return len(self._items) - - def __contains__(self, item: object) -> bool: - return item in self._items - - def route(self, method: str, path: str, **kwargs: Any) -> _Deco: - def inner(handler: _HandlerType) -> _HandlerType: - self._items.append(RouteDef(method, path, handler, kwargs)) - return handler - - return inner - - def head(self, path: str, **kwargs: Any) -> _Deco: - return self.route(hdrs.METH_HEAD, path, **kwargs) - - def get(self, path: str, **kwargs: Any) -> _Deco: - return self.route(hdrs.METH_GET, path, **kwargs) - - def post(self, path: str, **kwargs: Any) -> _Deco: - return self.route(hdrs.METH_POST, path, **kwargs) - - def put(self, path: str, **kwargs: Any) -> _Deco: - return self.route(hdrs.METH_PUT, path, **kwargs) - - def patch(self, path: str, **kwargs: Any) -> _Deco: - return self.route(hdrs.METH_PATCH, path, **kwargs) - - def delete(self, path: str, **kwargs: Any) -> _Deco: - return self.route(hdrs.METH_DELETE, path, **kwargs) - - def view(self, path: str, **kwargs: Any) -> _Deco: - return self.route(hdrs.METH_ANY, path, **kwargs) - - def static(self, prefix: str, path: PathLike, **kwargs: Any) -> None: - self._items.append(StaticDef(prefix, path, kwargs)) diff --git a/third_party/python/aiohttp/aiohttp/web_runner.py b/third_party/python/aiohttp/aiohttp/web_runner.py deleted file mode 100644 index 25ac28a7a892..000000000000 --- a/third_party/python/aiohttp/aiohttp/web_runner.py +++ /dev/null @@ -1,381 +0,0 @@ -import asyncio -import signal -import socket -from abc import ABC, abstractmethod -from typing import Any, List, Optional, Set - -from yarl import URL - -from .web_app import Application -from .web_server import Server - -try: - from ssl import SSLContext -except ImportError: - SSLContext = object # type: ignore - - -__all__ = ( - "BaseSite", - "TCPSite", - "UnixSite", - "NamedPipeSite", - "SockSite", - "BaseRunner", - "AppRunner", - "ServerRunner", - "GracefulExit", -) - - -class GracefulExit(SystemExit): - code = 1 - - -def _raise_graceful_exit() -> None: - raise GracefulExit() - - -class BaseSite(ABC): - __slots__ = ("_runner", "_shutdown_timeout", "_ssl_context", "_backlog", "_server") - - def __init__( - self, - runner: "BaseRunner", - *, - shutdown_timeout: float = 60.0, - ssl_context: Optional[SSLContext] = None, - backlog: int = 128, - ) -> None: - if runner.server is None: - raise RuntimeError("Call runner.setup() before making a site") - self._runner = runner - self._shutdown_timeout = shutdown_timeout - self._ssl_context = ssl_context - self._backlog = backlog - self._server = None # type: Optional[asyncio.AbstractServer] - - @property - @abstractmethod - def name(self) -> str: - pass # pragma: no cover - - @abstractmethod - async def start(self) -> None: - self._runner._reg_site(self) - - async def stop(self) -> None: - self._runner._check_site(self) - if self._server is None: - self._runner._unreg_site(self) - return # not started yet - self._server.close() - # named pipes do not have wait_closed property - if hasattr(self._server, "wait_closed"): - await self._server.wait_closed() - await self._runner.shutdown() - assert self._runner.server - await self._runner.server.shutdown(self._shutdown_timeout) - self._runner._unreg_site(self) - - -class TCPSite(BaseSite): - __slots__ = ("_host", "_port", "_reuse_address", "_reuse_port") - - def __init__( - self, - runner: "BaseRunner", - host: Optional[str] = None, - port: Optional[int] = None, - *, - shutdown_timeout: float = 60.0, - ssl_context: Optional[SSLContext] = None, - backlog: int = 128, - reuse_address: Optional[bool] = None, - reuse_port: Optional[bool] = None, - ) -> None: - super().__init__( - runner, - shutdown_timeout=shutdown_timeout, - ssl_context=ssl_context, - backlog=backlog, - ) - self._host = host - if port is None: - port = 8443 if self._ssl_context else 8080 - self._port = port - self._reuse_address = reuse_address - self._reuse_port = reuse_port - - @property - def name(self) -> str: - scheme = "https" if self._ssl_context else "http" - host = "0.0.0.0" if self._host is None else self._host - return str(URL.build(scheme=scheme, host=host, port=self._port)) - - async def start(self) -> None: - await super().start() - loop = asyncio.get_event_loop() - server = self._runner.server - assert server is not None - self._server = await loop.create_server( - server, - self._host, - self._port, - ssl=self._ssl_context, - backlog=self._backlog, - reuse_address=self._reuse_address, - reuse_port=self._reuse_port, - ) - - -class UnixSite(BaseSite): - __slots__ = ("_path",) - - def __init__( - self, - runner: "BaseRunner", - path: str, - *, - shutdown_timeout: float = 60.0, - ssl_context: Optional[SSLContext] = None, - backlog: int = 128, - ) -> None: - super().__init__( - runner, - shutdown_timeout=shutdown_timeout, - ssl_context=ssl_context, - backlog=backlog, - ) - self._path = path - - @property - def name(self) -> str: - scheme = "https" if self._ssl_context else "http" - return f"{scheme}://unix:{self._path}:" - - async def start(self) -> None: - await super().start() - loop = asyncio.get_event_loop() - server = self._runner.server - assert server is not None - self._server = await loop.create_unix_server( - server, self._path, ssl=self._ssl_context, backlog=self._backlog - ) - - -class NamedPipeSite(BaseSite): - __slots__ = ("_path",) - - def __init__( - self, runner: "BaseRunner", path: str, *, shutdown_timeout: float = 60.0 - ) -> None: - loop = asyncio.get_event_loop() - if not isinstance(loop, asyncio.ProactorEventLoop): # type: ignore - raise RuntimeError( - "Named Pipes only available in proactor" "loop under windows" - ) - super().__init__(runner, shutdown_timeout=shutdown_timeout) - self._path = path - - @property - def name(self) -> str: - return self._path - - async def start(self) -> None: - await super().start() - loop = asyncio.get_event_loop() - server = self._runner.server - assert server is not None - _server = await loop.start_serving_pipe(server, self._path) # type: ignore - self._server = _server[0] - - -class SockSite(BaseSite): - __slots__ = ("_sock", "_name") - - def __init__( - self, - runner: "BaseRunner", - sock: socket.socket, - *, - shutdown_timeout: float = 60.0, - ssl_context: Optional[SSLContext] = None, - backlog: int = 128, - ) -> None: - super().__init__( - runner, - shutdown_timeout=shutdown_timeout, - ssl_context=ssl_context, - backlog=backlog, - ) - self._sock = sock - scheme = "https" if self._ssl_context else "http" - if hasattr(socket, "AF_UNIX") and sock.family == socket.AF_UNIX: - name = f"{scheme}://unix:{sock.getsockname()}:" - else: - host, port = sock.getsockname()[:2] - name = str(URL.build(scheme=scheme, host=host, port=port)) - self._name = name - - @property - def name(self) -> str: - return self._name - - async def start(self) -> None: - await super().start() - loop = asyncio.get_event_loop() - server = self._runner.server - assert server is not None - self._server = await loop.create_server( - server, sock=self._sock, ssl=self._ssl_context, backlog=self._backlog - ) - - -class BaseRunner(ABC): - __slots__ = ("_handle_signals", "_kwargs", "_server", "_sites") - - def __init__(self, *, handle_signals: bool = False, **kwargs: Any) -> None: - self._handle_signals = handle_signals - self._kwargs = kwargs - self._server = None # type: Optional[Server] - self._sites = [] # type: List[BaseSite] - - @property - def server(self) -> Optional[Server]: - return self._server - - @property - def addresses(self) -> List[Any]: - ret = [] # type: List[Any] - for site in self._sites: - server = site._server - if server is not None: - sockets = server.sockets - if sockets is not None: - for sock in sockets: - ret.append(sock.getsockname()) - return ret - - @property - def sites(self) -> Set[BaseSite]: - return set(self._sites) - - async def setup(self) -> None: - loop = asyncio.get_event_loop() - - if self._handle_signals: - try: - loop.add_signal_handler(signal.SIGINT, _raise_graceful_exit) - loop.add_signal_handler(signal.SIGTERM, _raise_graceful_exit) - except NotImplementedError: # pragma: no cover - # add_signal_handler is not implemented on Windows - pass - - self._server = await self._make_server() - - @abstractmethod - async def shutdown(self) -> None: - pass # pragma: no cover - - async def cleanup(self) -> None: - loop = asyncio.get_event_loop() - - if self._server is None: - # no started yet, do nothing - return - - # The loop over sites is intentional, an exception on gather() - # leaves self._sites in unpredictable state. - # The loop guaranties that a site is either deleted on success or - # still present on failure - for site in list(self._sites): - await site.stop() - await self._cleanup_server() - self._server = None - if self._handle_signals: - try: - loop.remove_signal_handler(signal.SIGINT) - loop.remove_signal_handler(signal.SIGTERM) - except NotImplementedError: # pragma: no cover - # remove_signal_handler is not implemented on Windows - pass - - @abstractmethod - async def _make_server(self) -> Server: - pass # pragma: no cover - - @abstractmethod - async def _cleanup_server(self) -> None: - pass # pragma: no cover - - def _reg_site(self, site: BaseSite) -> None: - if site in self._sites: - raise RuntimeError(f"Site {site} is already registered in runner {self}") - self._sites.append(site) - - def _check_site(self, site: BaseSite) -> None: - if site not in self._sites: - raise RuntimeError(f"Site {site} is not registered in runner {self}") - - def _unreg_site(self, site: BaseSite) -> None: - if site not in self._sites: - raise RuntimeError(f"Site {site} is not registered in runner {self}") - self._sites.remove(site) - - -class ServerRunner(BaseRunner): - """Low-level web server runner""" - - __slots__ = ("_web_server",) - - def __init__( - self, web_server: Server, *, handle_signals: bool = False, **kwargs: Any - ) -> None: - super().__init__(handle_signals=handle_signals, **kwargs) - self._web_server = web_server - - async def shutdown(self) -> None: - pass - - async def _make_server(self) -> Server: - return self._web_server - - async def _cleanup_server(self) -> None: - pass - - -class AppRunner(BaseRunner): - """Web Application runner""" - - __slots__ = ("_app",) - - def __init__( - self, app: Application, *, handle_signals: bool = False, **kwargs: Any - ) -> None: - super().__init__(handle_signals=handle_signals, **kwargs) - if not isinstance(app, Application): - raise TypeError( - "The first argument should be web.Application " - "instance, got {!r}".format(app) - ) - self._app = app - - @property - def app(self) -> Application: - return self._app - - async def shutdown(self) -> None: - await self._app.shutdown() - - async def _make_server(self) -> Server: - loop = asyncio.get_event_loop() - self._app._set_loop(loop) - self._app.on_startup.freeze() - await self._app.startup() - self._app.freeze() - - return self._app._make_handler(loop=loop, **self._kwargs) - - async def _cleanup_server(self) -> None: - await self._app.cleanup() diff --git a/third_party/python/aiohttp/aiohttp/web_server.py b/third_party/python/aiohttp/aiohttp/web_server.py deleted file mode 100644 index 5657ed9c8008..000000000000 --- a/third_party/python/aiohttp/aiohttp/web_server.py +++ /dev/null @@ -1,62 +0,0 @@ -"""Low level HTTP server.""" -import asyncio -from typing import Any, Awaitable, Callable, Dict, List, Optional # noqa - -from .abc import AbstractStreamWriter -from .helpers import get_running_loop -from .http_parser import RawRequestMessage -from .streams import StreamReader -from .web_protocol import RequestHandler, _RequestFactory, _RequestHandler -from .web_request import BaseRequest - -__all__ = ("Server",) - - -class Server: - def __init__( - self, - handler: _RequestHandler, - *, - request_factory: Optional[_RequestFactory] = None, - loop: Optional[asyncio.AbstractEventLoop] = None, - **kwargs: Any - ) -> None: - self._loop = get_running_loop(loop) - self._connections = {} # type: Dict[RequestHandler, asyncio.Transport] - self._kwargs = kwargs - self.requests_count = 0 - self.request_handler = handler - self.request_factory = request_factory or self._make_request - - @property - def connections(self) -> List[RequestHandler]: - return list(self._connections.keys()) - - def connection_made( - self, handler: RequestHandler, transport: asyncio.Transport - ) -> None: - self._connections[handler] = transport - - def connection_lost( - self, handler: RequestHandler, exc: Optional[BaseException] = None - ) -> None: - if handler in self._connections: - del self._connections[handler] - - def _make_request( - self, - message: RawRequestMessage, - payload: StreamReader, - protocol: RequestHandler, - writer: AbstractStreamWriter, - task: "asyncio.Task[None]", - ) -> BaseRequest: - return BaseRequest(message, payload, protocol, writer, task, self._loop) - - async def shutdown(self, timeout: Optional[float] = None) -> None: - coros = [conn.shutdown(timeout) for conn in self._connections] - await asyncio.gather(*coros) - self._connections.clear() - - def __call__(self) -> RequestHandler: - return RequestHandler(self, loop=self._loop, **self._kwargs) diff --git a/third_party/python/aiohttp/aiohttp/web_urldispatcher.py b/third_party/python/aiohttp/aiohttp/web_urldispatcher.py deleted file mode 100644 index 2afd72f13dba..000000000000 --- a/third_party/python/aiohttp/aiohttp/web_urldispatcher.py +++ /dev/null @@ -1,1233 +0,0 @@ -import abc -import asyncio -import base64 -import hashlib -import inspect -import keyword -import os -import re -import warnings -from contextlib import contextmanager -from functools import wraps -from pathlib import Path -from types import MappingProxyType -from typing import ( - TYPE_CHECKING, - Any, - Awaitable, - Callable, - Container, - Dict, - Generator, - Iterable, - Iterator, - List, - Mapping, - Optional, - Pattern, - Set, - Sized, - Tuple, - Type, - Union, - cast, -) - -from typing_extensions import TypedDict -from yarl import URL, __version__ as yarl_version # type: ignore - -from . import hdrs -from .abc import AbstractMatchInfo, AbstractRouter, AbstractView -from .helpers import DEBUG -from .http import HttpVersion11 -from .typedefs import PathLike -from .web_exceptions import ( - HTTPException, - HTTPExpectationFailed, - HTTPForbidden, - HTTPMethodNotAllowed, - HTTPNotFound, -) -from .web_fileresponse import FileResponse -from .web_request import Request -from .web_response import Response, StreamResponse -from .web_routedef import AbstractRouteDef - -__all__ = ( - "UrlDispatcher", - "UrlMappingMatchInfo", - "AbstractResource", - "Resource", - "PlainResource", - "DynamicResource", - "AbstractRoute", - "ResourceRoute", - "StaticResource", - "View", -) - - -if TYPE_CHECKING: # pragma: no cover - from .web_app import Application - - BaseDict = Dict[str, str] -else: - BaseDict = dict - -YARL_VERSION = tuple(map(int, yarl_version.split(".")[:2])) - -HTTP_METHOD_RE = re.compile(r"^[0-9A-Za-z!#\$%&'\*\+\-\.\^_`\|~]+$") -ROUTE_RE = re.compile(r"(\{[_a-zA-Z][^{}]*(?:\{[^{}]*\}[^{}]*)*\})") -PATH_SEP = re.escape("/") - - -_WebHandler = Callable[[Request], Awaitable[StreamResponse]] -_ExpectHandler = Callable[[Request], Awaitable[None]] -_Resolve = Tuple[Optional[AbstractMatchInfo], Set[str]] - - -class _InfoDict(TypedDict, total=False): - path: str - - formatter: str - pattern: Pattern[str] - - directory: Path - prefix: str - routes: Mapping[str, "AbstractRoute"] - - app: "Application" - - domain: str - - rule: "AbstractRuleMatching" - - http_exception: HTTPException - - -class AbstractResource(Sized, Iterable["AbstractRoute"]): - def __init__(self, *, name: Optional[str] = None) -> None: - self._name = name - - @property - def name(self) -> Optional[str]: - return self._name - - @property - @abc.abstractmethod - def canonical(self) -> str: - """Exposes the resource's canonical path. - - For example '/foo/bar/{name}' - - """ - - @abc.abstractmethod # pragma: no branch - def url_for(self, **kwargs: str) -> URL: - """Construct url for resource with additional params.""" - - @abc.abstractmethod # pragma: no branch - async def resolve(self, request: Request) -> _Resolve: - """Resolve resource - - Return (UrlMappingMatchInfo, allowed_methods) pair.""" - - @abc.abstractmethod - def add_prefix(self, prefix: str) -> None: - """Add a prefix to processed URLs. - - Required for subapplications support. - - """ - - @abc.abstractmethod - def get_info(self) -> _InfoDict: - """Return a dict with additional info useful for introspection""" - - def freeze(self) -> None: - pass - - @abc.abstractmethod - def raw_match(self, path: str) -> bool: - """Perform a raw match against path""" - - -class AbstractRoute(abc.ABC): - def __init__( - self, - method: str, - handler: Union[_WebHandler, Type[AbstractView]], - *, - expect_handler: Optional[_ExpectHandler] = None, - resource: Optional[AbstractResource] = None, - ) -> None: - - if expect_handler is None: - expect_handler = _default_expect_handler - - assert asyncio.iscoroutinefunction( - expect_handler - ), f"Coroutine is expected, got {expect_handler!r}" - - method = method.upper() - if not HTTP_METHOD_RE.match(method): - raise ValueError(f"{method} is not allowed HTTP method") - - assert callable(handler), handler - if asyncio.iscoroutinefunction(handler): - pass - elif inspect.isgeneratorfunction(handler): - warnings.warn( - "Bare generators are deprecated, " "use @coroutine wrapper", - DeprecationWarning, - ) - elif isinstance(handler, type) and issubclass(handler, AbstractView): - pass - else: - warnings.warn( - "Bare functions are deprecated, " "use async ones", DeprecationWarning - ) - - @wraps(handler) - async def handler_wrapper(request: Request) -> StreamResponse: - result = old_handler(request) - if asyncio.iscoroutine(result): - return await result - return result # type: ignore - - old_handler = handler - handler = handler_wrapper - - self._method = method - self._handler = handler - self._expect_handler = expect_handler - self._resource = resource - - @property - def method(self) -> str: - return self._method - - @property - def handler(self) -> _WebHandler: - return self._handler - - @property - @abc.abstractmethod - def name(self) -> Optional[str]: - """Optional route's name, always equals to resource's name.""" - - @property - def resource(self) -> Optional[AbstractResource]: - return self._resource - - @abc.abstractmethod - def get_info(self) -> _InfoDict: - """Return a dict with additional info useful for introspection""" - - @abc.abstractmethod # pragma: no branch - def url_for(self, *args: str, **kwargs: str) -> URL: - """Construct url for route with additional params.""" - - async def handle_expect_header(self, request: Request) -> None: - await self._expect_handler(request) - - -class UrlMappingMatchInfo(BaseDict, AbstractMatchInfo): - def __init__(self, match_dict: Dict[str, str], route: AbstractRoute): - super().__init__(match_dict) - self._route = route - self._apps = [] # type: List[Application] - self._current_app = None # type: Optional[Application] - self._frozen = False - - @property - def handler(self) -> _WebHandler: - return self._route.handler - - @property - def route(self) -> AbstractRoute: - return self._route - - @property - def expect_handler(self) -> _ExpectHandler: - return self._route.handle_expect_header - - @property - def http_exception(self) -> Optional[HTTPException]: - return None - - def get_info(self) -> _InfoDict: # type: ignore - return self._route.get_info() - - @property - def apps(self) -> Tuple["Application", ...]: - return tuple(self._apps) - - def add_app(self, app: "Application") -> None: - if self._frozen: - raise RuntimeError("Cannot change apps stack after .freeze() call") - if self._current_app is None: - self._current_app = app - self._apps.insert(0, app) - - @property - def current_app(self) -> "Application": - app = self._current_app - assert app is not None - return app - - @contextmanager - def set_current_app(self, app: "Application") -> Generator[None, None, None]: - if DEBUG: # pragma: no cover - if app not in self._apps: - raise RuntimeError( - "Expected one of the following apps {!r}, got {!r}".format( - self._apps, app - ) - ) - prev = self._current_app - self._current_app = app - try: - yield - finally: - self._current_app = prev - - def freeze(self) -> None: - self._frozen = True - - def __repr__(self) -> str: - return f"" - - -class MatchInfoError(UrlMappingMatchInfo): - def __init__(self, http_exception: HTTPException) -> None: - self._exception = http_exception - super().__init__({}, SystemRoute(self._exception)) - - @property - def http_exception(self) -> HTTPException: - return self._exception - - def __repr__(self) -> str: - return "".format( - self._exception.status, self._exception.reason - ) - - -async def _default_expect_handler(request: Request) -> None: - """Default handler for Expect header. - - Just send "100 Continue" to client. - raise HTTPExpectationFailed if value of header is not "100-continue" - """ - expect = request.headers.get(hdrs.EXPECT, "") - if request.version == HttpVersion11: - if expect.lower() == "100-continue": - await request.writer.write(b"HTTP/1.1 100 Continue\r\n\r\n") - else: - raise HTTPExpectationFailed(text="Unknown Expect: %s" % expect) - - -class Resource(AbstractResource): - def __init__(self, *, name: Optional[str] = None) -> None: - super().__init__(name=name) - self._routes = [] # type: List[ResourceRoute] - - def add_route( - self, - method: str, - handler: Union[Type[AbstractView], _WebHandler], - *, - expect_handler: Optional[_ExpectHandler] = None, - ) -> "ResourceRoute": - - for route_obj in self._routes: - if route_obj.method == method or route_obj.method == hdrs.METH_ANY: - raise RuntimeError( - "Added route will never be executed, " - "method {route.method} is already " - "registered".format(route=route_obj) - ) - - route_obj = ResourceRoute(method, handler, self, expect_handler=expect_handler) - self.register_route(route_obj) - return route_obj - - def register_route(self, route: "ResourceRoute") -> None: - assert isinstance( - route, ResourceRoute - ), f"Instance of Route class is required, got {route!r}" - self._routes.append(route) - - async def resolve(self, request: Request) -> _Resolve: - allowed_methods = set() # type: Set[str] - - match_dict = self._match(request.rel_url.raw_path) - if match_dict is None: - return None, allowed_methods - - for route_obj in self._routes: - route_method = route_obj.method - allowed_methods.add(route_method) - - if route_method == request.method or route_method == hdrs.METH_ANY: - return (UrlMappingMatchInfo(match_dict, route_obj), allowed_methods) - else: - return None, allowed_methods - - @abc.abstractmethod - def _match(self, path: str) -> Optional[Dict[str, str]]: - pass # pragma: no cover - - def __len__(self) -> int: - return len(self._routes) - - def __iter__(self) -> Iterator[AbstractRoute]: - return iter(self._routes) - - # TODO: implement all abstract methods - - -class PlainResource(Resource): - def __init__(self, path: str, *, name: Optional[str] = None) -> None: - super().__init__(name=name) - assert not path or path.startswith("/") - self._path = path - - @property - def canonical(self) -> str: - return self._path - - def freeze(self) -> None: - if not self._path: - self._path = "/" - - def add_prefix(self, prefix: str) -> None: - assert prefix.startswith("/") - assert not prefix.endswith("/") - assert len(prefix) > 1 - self._path = prefix + self._path - - def _match(self, path: str) -> Optional[Dict[str, str]]: - # string comparison is about 10 times faster than regexp matching - if self._path == path: - return {} - else: - return None - - def raw_match(self, path: str) -> bool: - return self._path == path - - def get_info(self) -> _InfoDict: - return {"path": self._path} - - def url_for(self) -> URL: # type: ignore - return URL.build(path=self._path, encoded=True) - - def __repr__(self) -> str: - name = "'" + self.name + "' " if self.name is not None else "" - return f"" - - -class DynamicResource(Resource): - - DYN = re.compile(r"\{(?P[_a-zA-Z][_a-zA-Z0-9]*)\}") - DYN_WITH_RE = re.compile(r"\{(?P[_a-zA-Z][_a-zA-Z0-9]*):(?P.+)\}") - GOOD = r"[^{}/]+" - - def __init__(self, path: str, *, name: Optional[str] = None) -> None: - super().__init__(name=name) - pattern = "" - formatter = "" - for part in ROUTE_RE.split(path): - match = self.DYN.fullmatch(part) - if match: - pattern += "(?P<{}>{})".format(match.group("var"), self.GOOD) - formatter += "{" + match.group("var") + "}" - continue - - match = self.DYN_WITH_RE.fullmatch(part) - if match: - pattern += "(?P<{var}>{re})".format(**match.groupdict()) - formatter += "{" + match.group("var") + "}" - continue - - if "{" in part or "}" in part: - raise ValueError(f"Invalid path '{path}'['{part}']") - - part = _requote_path(part) - formatter += part - pattern += re.escape(part) - - try: - compiled = re.compile(pattern) - except re.error as exc: - raise ValueError(f"Bad pattern '{pattern}': {exc}") from None - assert compiled.pattern.startswith(PATH_SEP) - assert formatter.startswith("/") - self._pattern = compiled - self._formatter = formatter - - @property - def canonical(self) -> str: - return self._formatter - - def add_prefix(self, prefix: str) -> None: - assert prefix.startswith("/") - assert not prefix.endswith("/") - assert len(prefix) > 1 - self._pattern = re.compile(re.escape(prefix) + self._pattern.pattern) - self._formatter = prefix + self._formatter - - def _match(self, path: str) -> Optional[Dict[str, str]]: - match = self._pattern.fullmatch(path) - if match is None: - return None - else: - return { - key: _unquote_path(value) for key, value in match.groupdict().items() - } - - def raw_match(self, path: str) -> bool: - return self._formatter == path - - def get_info(self) -> _InfoDict: - return {"formatter": self._formatter, "pattern": self._pattern} - - def url_for(self, **parts: str) -> URL: - url = self._formatter.format_map({k: _quote_path(v) for k, v in parts.items()}) - return URL.build(path=url, encoded=True) - - def __repr__(self) -> str: - name = "'" + self.name + "' " if self.name is not None else "" - return "".format( - name=name, formatter=self._formatter - ) - - -class PrefixResource(AbstractResource): - def __init__(self, prefix: str, *, name: Optional[str] = None) -> None: - assert not prefix or prefix.startswith("/"), prefix - assert prefix in ("", "/") or not prefix.endswith("/"), prefix - super().__init__(name=name) - self._prefix = _requote_path(prefix) - - @property - def canonical(self) -> str: - return self._prefix - - def add_prefix(self, prefix: str) -> None: - assert prefix.startswith("/") - assert not prefix.endswith("/") - assert len(prefix) > 1 - self._prefix = prefix + self._prefix - - def raw_match(self, prefix: str) -> bool: - return False - - # TODO: impl missing abstract methods - - -class StaticResource(PrefixResource): - VERSION_KEY = "v" - - def __init__( - self, - prefix: str, - directory: PathLike, - *, - name: Optional[str] = None, - expect_handler: Optional[_ExpectHandler] = None, - chunk_size: int = 256 * 1024, - show_index: bool = False, - follow_symlinks: bool = False, - append_version: bool = False, - ) -> None: - super().__init__(prefix, name=name) - try: - directory = Path(directory) - if str(directory).startswith("~"): - directory = Path(os.path.expanduser(str(directory))) - directory = directory.resolve() - if not directory.is_dir(): - raise ValueError("Not a directory") - except (FileNotFoundError, ValueError) as error: - raise ValueError(f"No directory exists at '{directory}'") from error - self._directory = directory - self._show_index = show_index - self._chunk_size = chunk_size - self._follow_symlinks = follow_symlinks - self._expect_handler = expect_handler - self._append_version = append_version - - self._routes = { - "GET": ResourceRoute( - "GET", self._handle, self, expect_handler=expect_handler - ), - "HEAD": ResourceRoute( - "HEAD", self._handle, self, expect_handler=expect_handler - ), - } - - def url_for( # type: ignore - self, - *, - filename: Union[str, Path], - append_version: Optional[bool] = None, - ) -> URL: - if append_version is None: - append_version = self._append_version - if isinstance(filename, Path): - filename = str(filename) - filename = filename.lstrip("/") - - url = URL.build(path=self._prefix, encoded=True) - # filename is not encoded - if YARL_VERSION < (1, 6): - url = url / filename.replace("%", "%25") - else: - url = url / filename - - if append_version: - try: - filepath = self._directory.joinpath(filename).resolve() - if not self._follow_symlinks: - filepath.relative_to(self._directory) - except (ValueError, FileNotFoundError): - # ValueError for case when path point to symlink - # with follow_symlinks is False - return url # relatively safe - if filepath.is_file(): - # TODO cache file content - # with file watcher for cache invalidation - with filepath.open("rb") as f: - file_bytes = f.read() - h = self._get_file_hash(file_bytes) - url = url.with_query({self.VERSION_KEY: h}) - return url - return url - - @staticmethod - def _get_file_hash(byte_array: bytes) -> str: - m = hashlib.sha256() # todo sha256 can be configurable param - m.update(byte_array) - b64 = base64.urlsafe_b64encode(m.digest()) - return b64.decode("ascii") - - def get_info(self) -> _InfoDict: - return { - "directory": self._directory, - "prefix": self._prefix, - "routes": self._routes, - } - - def set_options_route(self, handler: _WebHandler) -> None: - if "OPTIONS" in self._routes: - raise RuntimeError("OPTIONS route was set already") - self._routes["OPTIONS"] = ResourceRoute( - "OPTIONS", handler, self, expect_handler=self._expect_handler - ) - - async def resolve(self, request: Request) -> _Resolve: - path = request.rel_url.raw_path - method = request.method - allowed_methods = set(self._routes) - if not path.startswith(self._prefix): - return None, set() - - if method not in allowed_methods: - return None, allowed_methods - - match_dict = {"filename": _unquote_path(path[len(self._prefix) + 1 :])} - return (UrlMappingMatchInfo(match_dict, self._routes[method]), allowed_methods) - - def __len__(self) -> int: - return len(self._routes) - - def __iter__(self) -> Iterator[AbstractRoute]: - return iter(self._routes.values()) - - async def _handle(self, request: Request) -> StreamResponse: - rel_url = request.match_info["filename"] - try: - filename = Path(rel_url) - if filename.anchor: - # rel_url is an absolute name like - # /static/\\machine_name\c$ or /static/D:\path - # where the static dir is totally different - raise HTTPForbidden() - filepath = self._directory.joinpath(filename).resolve() - if not self._follow_symlinks: - filepath.relative_to(self._directory) - except (ValueError, FileNotFoundError) as error: - # relatively safe - raise HTTPNotFound() from error - except HTTPForbidden: - raise - except Exception as error: - # perm error or other kind! - request.app.logger.exception(error) - raise HTTPNotFound() from error - - # on opening a dir, load its contents if allowed - if filepath.is_dir(): - if self._show_index: - try: - return Response( - text=self._directory_as_html(filepath), content_type="text/html" - ) - except PermissionError: - raise HTTPForbidden() - else: - raise HTTPForbidden() - elif filepath.is_file(): - return FileResponse(filepath, chunk_size=self._chunk_size) - else: - raise HTTPNotFound - - def _directory_as_html(self, filepath: Path) -> str: - # returns directory's index as html - - # sanity check - assert filepath.is_dir() - - relative_path_to_dir = filepath.relative_to(self._directory).as_posix() - index_of = f"Index of /{relative_path_to_dir}" - h1 = f"

{index_of}

" - - index_list = [] - dir_index = filepath.iterdir() - for _file in sorted(dir_index): - # show file url as relative to static path - rel_path = _file.relative_to(self._directory).as_posix() - file_url = self._prefix + "/" + rel_path - - # if file is a directory, add '/' to the end of the name - if _file.is_dir(): - file_name = f"{_file.name}/" - else: - file_name = _file.name - - index_list.append( - '
  • {name}
  • '.format( - url=file_url, name=file_name - ) - ) - ul = "
      \n{}\n
    ".format("\n".join(index_list)) - body = f"\n{h1}\n{ul}\n" - - head_str = f"\n{index_of}\n" - html = f"\n{head_str}\n{body}\n" - - return html - - def __repr__(self) -> str: - name = "'" + self.name + "'" if self.name is not None else "" - return " {directory!r}>".format( - name=name, path=self._prefix, directory=self._directory - ) - - -class PrefixedSubAppResource(PrefixResource): - def __init__(self, prefix: str, app: "Application") -> None: - super().__init__(prefix) - self._app = app - for resource in app.router.resources(): - resource.add_prefix(prefix) - - def add_prefix(self, prefix: str) -> None: - super().add_prefix(prefix) - for resource in self._app.router.resources(): - resource.add_prefix(prefix) - - def url_for(self, *args: str, **kwargs: str) -> URL: - raise RuntimeError(".url_for() is not supported " "by sub-application root") - - def get_info(self) -> _InfoDict: - return {"app": self._app, "prefix": self._prefix} - - async def resolve(self, request: Request) -> _Resolve: - if ( - not request.url.raw_path.startswith(self._prefix + "/") - and request.url.raw_path != self._prefix - ): - return None, set() - match_info = await self._app.router.resolve(request) - match_info.add_app(self._app) - if isinstance(match_info.http_exception, HTTPMethodNotAllowed): - methods = match_info.http_exception.allowed_methods - else: - methods = set() - return match_info, methods - - def __len__(self) -> int: - return len(self._app.router.routes()) - - def __iter__(self) -> Iterator[AbstractRoute]: - return iter(self._app.router.routes()) - - def __repr__(self) -> str: - return " {app!r}>".format( - prefix=self._prefix, app=self._app - ) - - -class AbstractRuleMatching(abc.ABC): - @abc.abstractmethod # pragma: no branch - async def match(self, request: Request) -> bool: - """Return bool if the request satisfies the criteria""" - - @abc.abstractmethod # pragma: no branch - def get_info(self) -> _InfoDict: - """Return a dict with additional info useful for introspection""" - - @property - @abc.abstractmethod # pragma: no branch - def canonical(self) -> str: - """Return a str""" - - -class Domain(AbstractRuleMatching): - re_part = re.compile(r"(?!-)[a-z\d-]{1,63}(? None: - super().__init__() - self._domain = self.validation(domain) - - @property - def canonical(self) -> str: - return self._domain - - def validation(self, domain: str) -> str: - if not isinstance(domain, str): - raise TypeError("Domain must be str") - domain = domain.rstrip(".").lower() - if not domain: - raise ValueError("Domain cannot be empty") - elif "://" in domain: - raise ValueError("Scheme not supported") - url = URL("http://" + domain) - assert url.raw_host is not None - if not all(self.re_part.fullmatch(x) for x in url.raw_host.split(".")): - raise ValueError("Domain not valid") - if url.port == 80: - return url.raw_host - return f"{url.raw_host}:{url.port}" - - async def match(self, request: Request) -> bool: - host = request.headers.get(hdrs.HOST) - if not host: - return False - return self.match_domain(host) - - def match_domain(self, host: str) -> bool: - return host.lower() == self._domain - - def get_info(self) -> _InfoDict: - return {"domain": self._domain} - - -class MaskDomain(Domain): - re_part = re.compile(r"(?!-)[a-z\d\*-]{1,63}(? None: - super().__init__(domain) - mask = self._domain.replace(".", r"\.").replace("*", ".*") - self._mask = re.compile(mask) - - @property - def canonical(self) -> str: - return self._mask.pattern - - def match_domain(self, host: str) -> bool: - return self._mask.fullmatch(host) is not None - - -class MatchedSubAppResource(PrefixedSubAppResource): - def __init__(self, rule: AbstractRuleMatching, app: "Application") -> None: - AbstractResource.__init__(self) - self._prefix = "" - self._app = app - self._rule = rule - - @property - def canonical(self) -> str: - return self._rule.canonical - - def get_info(self) -> _InfoDict: - return {"app": self._app, "rule": self._rule} - - async def resolve(self, request: Request) -> _Resolve: - if not await self._rule.match(request): - return None, set() - match_info = await self._app.router.resolve(request) - match_info.add_app(self._app) - if isinstance(match_info.http_exception, HTTPMethodNotAllowed): - methods = match_info.http_exception.allowed_methods - else: - methods = set() - return match_info, methods - - def __repr__(self) -> str: - return " {app!r}>" "".format(app=self._app) - - -class ResourceRoute(AbstractRoute): - """A route with resource""" - - def __init__( - self, - method: str, - handler: Union[_WebHandler, Type[AbstractView]], - resource: AbstractResource, - *, - expect_handler: Optional[_ExpectHandler] = None, - ) -> None: - super().__init__( - method, handler, expect_handler=expect_handler, resource=resource - ) - - def __repr__(self) -> str: - return " {handler!r}".format( - method=self.method, resource=self._resource, handler=self.handler - ) - - @property - def name(self) -> Optional[str]: - if self._resource is None: - return None - return self._resource.name - - def url_for(self, *args: str, **kwargs: str) -> URL: - """Construct url for route with additional params.""" - assert self._resource is not None - return self._resource.url_for(*args, **kwargs) - - def get_info(self) -> _InfoDict: - assert self._resource is not None - return self._resource.get_info() - - -class SystemRoute(AbstractRoute): - def __init__(self, http_exception: HTTPException) -> None: - super().__init__(hdrs.METH_ANY, self._handle) - self._http_exception = http_exception - - def url_for(self, *args: str, **kwargs: str) -> URL: - raise RuntimeError(".url_for() is not allowed for SystemRoute") - - @property - def name(self) -> Optional[str]: - return None - - def get_info(self) -> _InfoDict: - return {"http_exception": self._http_exception} - - async def _handle(self, request: Request) -> StreamResponse: - raise self._http_exception - - @property - def status(self) -> int: - return self._http_exception.status - - @property - def reason(self) -> str: - return self._http_exception.reason - - def __repr__(self) -> str: - return "".format(self=self) - - -class View(AbstractView): - async def _iter(self) -> StreamResponse: - if self.request.method not in hdrs.METH_ALL: - self._raise_allowed_methods() - method = getattr(self, self.request.method.lower(), None) - if method is None: - self._raise_allowed_methods() - resp = await method() - return resp - - def __await__(self) -> Generator[Any, None, StreamResponse]: - return self._iter().__await__() - - def _raise_allowed_methods(self) -> None: - allowed_methods = {m for m in hdrs.METH_ALL if hasattr(self, m.lower())} - raise HTTPMethodNotAllowed(self.request.method, allowed_methods) - - -class ResourcesView(Sized, Iterable[AbstractResource], Container[AbstractResource]): - def __init__(self, resources: List[AbstractResource]) -> None: - self._resources = resources - - def __len__(self) -> int: - return len(self._resources) - - def __iter__(self) -> Iterator[AbstractResource]: - yield from self._resources - - def __contains__(self, resource: object) -> bool: - return resource in self._resources - - -class RoutesView(Sized, Iterable[AbstractRoute], Container[AbstractRoute]): - def __init__(self, resources: List[AbstractResource]): - self._routes = [] # type: List[AbstractRoute] - for resource in resources: - for route in resource: - self._routes.append(route) - - def __len__(self) -> int: - return len(self._routes) - - def __iter__(self) -> Iterator[AbstractRoute]: - yield from self._routes - - def __contains__(self, route: object) -> bool: - return route in self._routes - - -class UrlDispatcher(AbstractRouter, Mapping[str, AbstractResource]): - - NAME_SPLIT_RE = re.compile(r"[.:-]") - - def __init__(self) -> None: - super().__init__() - self._resources = [] # type: List[AbstractResource] - self._named_resources = {} # type: Dict[str, AbstractResource] - - async def resolve(self, request: Request) -> AbstractMatchInfo: - method = request.method - allowed_methods = set() # type: Set[str] - - for resource in self._resources: - match_dict, allowed = await resource.resolve(request) - if match_dict is not None: - return match_dict - else: - allowed_methods |= allowed - else: - if allowed_methods: - return MatchInfoError(HTTPMethodNotAllowed(method, allowed_methods)) - else: - return MatchInfoError(HTTPNotFound()) - - def __iter__(self) -> Iterator[str]: - return iter(self._named_resources) - - def __len__(self) -> int: - return len(self._named_resources) - - def __contains__(self, resource: object) -> bool: - return resource in self._named_resources - - def __getitem__(self, name: str) -> AbstractResource: - return self._named_resources[name] - - def resources(self) -> ResourcesView: - return ResourcesView(self._resources) - - def routes(self) -> RoutesView: - return RoutesView(self._resources) - - def named_resources(self) -> Mapping[str, AbstractResource]: - return MappingProxyType(self._named_resources) - - def register_resource(self, resource: AbstractResource) -> None: - assert isinstance( - resource, AbstractResource - ), f"Instance of AbstractResource class is required, got {resource!r}" - if self.frozen: - raise RuntimeError("Cannot register a resource into frozen router.") - - name = resource.name - - if name is not None: - parts = self.NAME_SPLIT_RE.split(name) - for part in parts: - if keyword.iskeyword(part): - raise ValueError( - f"Incorrect route name {name!r}, " - "python keywords cannot be used " - "for route name" - ) - if not part.isidentifier(): - raise ValueError( - "Incorrect route name {!r}, " - "the name should be a sequence of " - "python identifiers separated " - "by dash, dot or column".format(name) - ) - if name in self._named_resources: - raise ValueError( - "Duplicate {!r}, " - "already handled by {!r}".format(name, self._named_resources[name]) - ) - self._named_resources[name] = resource - self._resources.append(resource) - - def add_resource(self, path: str, *, name: Optional[str] = None) -> Resource: - if path and not path.startswith("/"): - raise ValueError("path should be started with / or be empty") - # Reuse last added resource if path and name are the same - if self._resources: - resource = self._resources[-1] - if resource.name == name and resource.raw_match(path): - return cast(Resource, resource) - if not ("{" in path or "}" in path or ROUTE_RE.search(path)): - resource = PlainResource(_requote_path(path), name=name) - self.register_resource(resource) - return resource - resource = DynamicResource(path, name=name) - self.register_resource(resource) - return resource - - def add_route( - self, - method: str, - path: str, - handler: Union[_WebHandler, Type[AbstractView]], - *, - name: Optional[str] = None, - expect_handler: Optional[_ExpectHandler] = None, - ) -> AbstractRoute: - resource = self.add_resource(path, name=name) - return resource.add_route(method, handler, expect_handler=expect_handler) - - def add_static( - self, - prefix: str, - path: PathLike, - *, - name: Optional[str] = None, - expect_handler: Optional[_ExpectHandler] = None, - chunk_size: int = 256 * 1024, - show_index: bool = False, - follow_symlinks: bool = False, - append_version: bool = False, - ) -> AbstractResource: - """Add static files view. - - prefix - url prefix - path - folder with files - - """ - assert prefix.startswith("/") - if prefix.endswith("/"): - prefix = prefix[:-1] - resource = StaticResource( - prefix, - path, - name=name, - expect_handler=expect_handler, - chunk_size=chunk_size, - show_index=show_index, - follow_symlinks=follow_symlinks, - append_version=append_version, - ) - self.register_resource(resource) - return resource - - def add_head(self, path: str, handler: _WebHandler, **kwargs: Any) -> AbstractRoute: - """ - Shortcut for add_route with method HEAD - """ - return self.add_route(hdrs.METH_HEAD, path, handler, **kwargs) - - def add_options( - self, path: str, handler: _WebHandler, **kwargs: Any - ) -> AbstractRoute: - """ - Shortcut for add_route with method OPTIONS - """ - return self.add_route(hdrs.METH_OPTIONS, path, handler, **kwargs) - - def add_get( - self, - path: str, - handler: _WebHandler, - *, - name: Optional[str] = None, - allow_head: bool = True, - **kwargs: Any, - ) -> AbstractRoute: - """ - Shortcut for add_route with method GET, if allow_head is true another - route is added allowing head requests to the same endpoint - """ - resource = self.add_resource(path, name=name) - if allow_head: - resource.add_route(hdrs.METH_HEAD, handler, **kwargs) - return resource.add_route(hdrs.METH_GET, handler, **kwargs) - - def add_post(self, path: str, handler: _WebHandler, **kwargs: Any) -> AbstractRoute: - """ - Shortcut for add_route with method POST - """ - return self.add_route(hdrs.METH_POST, path, handler, **kwargs) - - def add_put(self, path: str, handler: _WebHandler, **kwargs: Any) -> AbstractRoute: - """ - Shortcut for add_route with method PUT - """ - return self.add_route(hdrs.METH_PUT, path, handler, **kwargs) - - def add_patch( - self, path: str, handler: _WebHandler, **kwargs: Any - ) -> AbstractRoute: - """ - Shortcut for add_route with method PATCH - """ - return self.add_route(hdrs.METH_PATCH, path, handler, **kwargs) - - def add_delete( - self, path: str, handler: _WebHandler, **kwargs: Any - ) -> AbstractRoute: - """ - Shortcut for add_route with method DELETE - """ - return self.add_route(hdrs.METH_DELETE, path, handler, **kwargs) - - def add_view( - self, path: str, handler: Type[AbstractView], **kwargs: Any - ) -> AbstractRoute: - """ - Shortcut for add_route with ANY methods for a class-based view - """ - return self.add_route(hdrs.METH_ANY, path, handler, **kwargs) - - def freeze(self) -> None: - super().freeze() - for resource in self._resources: - resource.freeze() - - def add_routes(self, routes: Iterable[AbstractRouteDef]) -> List[AbstractRoute]: - """Append routes to route table. - - Parameter should be a sequence of RouteDef objects. - - Returns a list of registered AbstractRoute instances. - """ - registered_routes = [] - for route_def in routes: - registered_routes.extend(route_def.register(self)) - return registered_routes - - -def _quote_path(value: str) -> str: - if YARL_VERSION < (1, 6): - value = value.replace("%", "%25") - return URL.build(path=value, encoded=False).raw_path - - -def _unquote_path(value: str) -> str: - return URL.build(path=value, encoded=True).path - - -def _requote_path(value: str) -> str: - # Quote non-ascii characters and other characters which must be quoted, - # but preserve existing %-sequences. - result = _quote_path(value) - if "%" in value: - result = result.replace("%25", "%") - return result diff --git a/third_party/python/aiohttp/aiohttp/web_ws.py b/third_party/python/aiohttp/aiohttp/web_ws.py deleted file mode 100644 index da7ce6df1c59..000000000000 --- a/third_party/python/aiohttp/aiohttp/web_ws.py +++ /dev/null @@ -1,481 +0,0 @@ -import asyncio -import base64 -import binascii -import hashlib -import json -from typing import Any, Iterable, Optional, Tuple - -import async_timeout -import attr -from multidict import CIMultiDict - -from . import hdrs -from .abc import AbstractStreamWriter -from .helpers import call_later, set_result -from .http import ( - WS_CLOSED_MESSAGE, - WS_CLOSING_MESSAGE, - WS_KEY, - WebSocketError, - WebSocketReader, - WebSocketWriter, - WSMessage, - WSMsgType as WSMsgType, - ws_ext_gen, - ws_ext_parse, -) -from .log import ws_logger -from .streams import EofStream, FlowControlDataQueue -from .typedefs import JSONDecoder, JSONEncoder -from .web_exceptions import HTTPBadRequest, HTTPException -from .web_request import BaseRequest -from .web_response import StreamResponse - -__all__ = ( - "WebSocketResponse", - "WebSocketReady", - "WSMsgType", -) - -THRESHOLD_CONNLOST_ACCESS = 5 - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class WebSocketReady: - ok: bool - protocol: Optional[str] - - def __bool__(self) -> bool: - return self.ok - - -class WebSocketResponse(StreamResponse): - - _length_check = False - - def __init__( - self, - *, - timeout: float = 10.0, - receive_timeout: Optional[float] = None, - autoclose: bool = True, - autoping: bool = True, - heartbeat: Optional[float] = None, - protocols: Iterable[str] = (), - compress: bool = True, - max_msg_size: int = 4 * 1024 * 1024, - ) -> None: - super().__init__(status=101) - self._protocols = protocols - self._ws_protocol = None # type: Optional[str] - self._writer = None # type: Optional[WebSocketWriter] - self._reader = None # type: Optional[FlowControlDataQueue[WSMessage]] - self._closed = False - self._closing = False - self._conn_lost = 0 - self._close_code = None # type: Optional[int] - self._loop = None # type: Optional[asyncio.AbstractEventLoop] - self._waiting = None # type: Optional[asyncio.Future[bool]] - self._exception = None # type: Optional[BaseException] - self._timeout = timeout - self._receive_timeout = receive_timeout - self._autoclose = autoclose - self._autoping = autoping - self._heartbeat = heartbeat - self._heartbeat_cb = None - if heartbeat is not None: - self._pong_heartbeat = heartbeat / 2.0 - self._pong_response_cb = None - self._compress = compress - self._max_msg_size = max_msg_size - - def _cancel_heartbeat(self) -> None: - if self._pong_response_cb is not None: - self._pong_response_cb.cancel() - self._pong_response_cb = None - - if self._heartbeat_cb is not None: - self._heartbeat_cb.cancel() - self._heartbeat_cb = None - - def _reset_heartbeat(self) -> None: - self._cancel_heartbeat() - - if self._heartbeat is not None: - self._heartbeat_cb = call_later( - self._send_heartbeat, self._heartbeat, self._loop - ) - - def _send_heartbeat(self) -> None: - if self._heartbeat is not None and not self._closed: - # fire-and-forget a task is not perfect but maybe ok for - # sending ping. Otherwise we need a long-living heartbeat - # task in the class. - self._loop.create_task(self._writer.ping()) # type: ignore - - if self._pong_response_cb is not None: - self._pong_response_cb.cancel() - self._pong_response_cb = call_later( - self._pong_not_received, self._pong_heartbeat, self._loop - ) - - def _pong_not_received(self) -> None: - if self._req is not None and self._req.transport is not None: - self._closed = True - self._close_code = 1006 - self._exception = asyncio.TimeoutError() - self._req.transport.close() - - async def prepare(self, request: BaseRequest) -> AbstractStreamWriter: - # make pre-check to don't hide it by do_handshake() exceptions - if self._payload_writer is not None: - return self._payload_writer - - protocol, writer = self._pre_start(request) - payload_writer = await super().prepare(request) - assert payload_writer is not None - self._post_start(request, protocol, writer) - await payload_writer.drain() - return payload_writer - - def _handshake( - self, request: BaseRequest - ) -> Tuple["CIMultiDict[str]", str, bool, bool]: - headers = request.headers - if "websocket" != headers.get(hdrs.UPGRADE, "").lower().strip(): - raise HTTPBadRequest( - text=( - "No WebSocket UPGRADE hdr: {}\n Can " - '"Upgrade" only to "WebSocket".' - ).format(headers.get(hdrs.UPGRADE)) - ) - - if "upgrade" not in headers.get(hdrs.CONNECTION, "").lower(): - raise HTTPBadRequest( - text="No CONNECTION upgrade hdr: {}".format( - headers.get(hdrs.CONNECTION) - ) - ) - - # find common sub-protocol between client and server - protocol = None - if hdrs.SEC_WEBSOCKET_PROTOCOL in headers: - req_protocols = [ - str(proto.strip()) - for proto in headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(",") - ] - - for proto in req_protocols: - if proto in self._protocols: - protocol = proto - break - else: - # No overlap found: Return no protocol as per spec - ws_logger.warning( - "Client protocols %r don’t overlap server-known ones %r", - req_protocols, - self._protocols, - ) - - # check supported version - version = headers.get(hdrs.SEC_WEBSOCKET_VERSION, "") - if version not in ("13", "8", "7"): - raise HTTPBadRequest(text=f"Unsupported version: {version}") - - # check client handshake for validity - key = headers.get(hdrs.SEC_WEBSOCKET_KEY) - try: - if not key or len(base64.b64decode(key)) != 16: - raise HTTPBadRequest(text=f"Handshake error: {key!r}") - except binascii.Error: - raise HTTPBadRequest(text=f"Handshake error: {key!r}") from None - - accept_val = base64.b64encode( - hashlib.sha1(key.encode() + WS_KEY).digest() - ).decode() - response_headers = CIMultiDict( # type: ignore - { - hdrs.UPGRADE: "websocket", # type: ignore - hdrs.CONNECTION: "upgrade", - hdrs.SEC_WEBSOCKET_ACCEPT: accept_val, - } - ) - - notakeover = False - compress = 0 - if self._compress: - extensions = headers.get(hdrs.SEC_WEBSOCKET_EXTENSIONS) - # Server side always get return with no exception. - # If something happened, just drop compress extension - compress, notakeover = ws_ext_parse(extensions, isserver=True) - if compress: - enabledext = ws_ext_gen( - compress=compress, isserver=True, server_notakeover=notakeover - ) - response_headers[hdrs.SEC_WEBSOCKET_EXTENSIONS] = enabledext - - if protocol: - response_headers[hdrs.SEC_WEBSOCKET_PROTOCOL] = protocol - return (response_headers, protocol, compress, notakeover) # type: ignore - - def _pre_start(self, request: BaseRequest) -> Tuple[str, WebSocketWriter]: - self._loop = request._loop - - headers, protocol, compress, notakeover = self._handshake(request) - - self.set_status(101) - self.headers.update(headers) - self.force_close() - self._compress = compress - transport = request._protocol.transport - assert transport is not None - writer = WebSocketWriter( - request._protocol, transport, compress=compress, notakeover=notakeover - ) - - return protocol, writer - - def _post_start( - self, request: BaseRequest, protocol: str, writer: WebSocketWriter - ) -> None: - self._ws_protocol = protocol - self._writer = writer - - self._reset_heartbeat() - - loop = self._loop - assert loop is not None - self._reader = FlowControlDataQueue(request._protocol, 2 ** 16, loop=loop) - request.protocol.set_parser( - WebSocketReader(self._reader, self._max_msg_size, compress=self._compress) - ) - # disable HTTP keepalive for WebSocket - request.protocol.keep_alive(False) - - def can_prepare(self, request: BaseRequest) -> WebSocketReady: - if self._writer is not None: - raise RuntimeError("Already started") - try: - _, protocol, _, _ = self._handshake(request) - except HTTPException: - return WebSocketReady(False, None) - else: - return WebSocketReady(True, protocol) - - @property - def closed(self) -> bool: - return self._closed - - @property - def close_code(self) -> Optional[int]: - return self._close_code - - @property - def ws_protocol(self) -> Optional[str]: - return self._ws_protocol - - @property - def compress(self) -> bool: - return self._compress - - def exception(self) -> Optional[BaseException]: - return self._exception - - async def ping(self, message: bytes = b"") -> None: - if self._writer is None: - raise RuntimeError("Call .prepare() first") - await self._writer.ping(message) - - async def pong(self, message: bytes = b"") -> None: - # unsolicited pong - if self._writer is None: - raise RuntimeError("Call .prepare() first") - await self._writer.pong(message) - - async def send_str(self, data: str, compress: Optional[bool] = None) -> None: - if self._writer is None: - raise RuntimeError("Call .prepare() first") - if not isinstance(data, str): - raise TypeError("data argument must be str (%r)" % type(data)) - await self._writer.send(data, binary=False, compress=compress) - - async def send_bytes(self, data: bytes, compress: Optional[bool] = None) -> None: - if self._writer is None: - raise RuntimeError("Call .prepare() first") - if not isinstance(data, (bytes, bytearray, memoryview)): - raise TypeError("data argument must be byte-ish (%r)" % type(data)) - await self._writer.send(data, binary=True, compress=compress) - - async def send_json( - self, - data: Any, - compress: Optional[bool] = None, - *, - dumps: JSONEncoder = json.dumps, - ) -> None: - await self.send_str(dumps(data), compress=compress) - - async def write_eof(self) -> None: # type: ignore - if self._eof_sent: - return - if self._payload_writer is None: - raise RuntimeError("Response has not been started") - - await self.close() - self._eof_sent = True - - async def close(self, *, code: int = 1000, message: bytes = b"") -> bool: - if self._writer is None: - raise RuntimeError("Call .prepare() first") - - self._cancel_heartbeat() - reader = self._reader - assert reader is not None - - # we need to break `receive()` cycle first, - # `close()` may be called from different task - if self._waiting is not None and not self._closed: - reader.feed_data(WS_CLOSING_MESSAGE, 0) - await self._waiting - - if not self._closed: - self._closed = True - try: - await self._writer.close(code, message) - writer = self._payload_writer - assert writer is not None - await writer.drain() - except (asyncio.CancelledError, asyncio.TimeoutError): - self._close_code = 1006 - raise - except Exception as exc: - self._close_code = 1006 - self._exception = exc - return True - - if self._closing: - return True - - reader = self._reader - assert reader is not None - try: - with async_timeout.timeout(self._timeout, loop=self._loop): - msg = await reader.read() - except asyncio.CancelledError: - self._close_code = 1006 - raise - except Exception as exc: - self._close_code = 1006 - self._exception = exc - return True - - if msg.type == WSMsgType.CLOSE: - self._close_code = msg.data - return True - - self._close_code = 1006 - self._exception = asyncio.TimeoutError() - return True - else: - return False - - async def receive(self, timeout: Optional[float] = None) -> WSMessage: - if self._reader is None: - raise RuntimeError("Call .prepare() first") - - loop = self._loop - assert loop is not None - while True: - if self._waiting is not None: - raise RuntimeError("Concurrent call to receive() is not allowed") - - if self._closed: - self._conn_lost += 1 - if self._conn_lost >= THRESHOLD_CONNLOST_ACCESS: - raise RuntimeError("WebSocket connection is closed.") - return WS_CLOSED_MESSAGE - elif self._closing: - return WS_CLOSING_MESSAGE - - try: - self._waiting = loop.create_future() - try: - with async_timeout.timeout( - timeout or self._receive_timeout, loop=self._loop - ): - msg = await self._reader.read() - self._reset_heartbeat() - finally: - waiter = self._waiting - set_result(waiter, True) - self._waiting = None - except (asyncio.CancelledError, asyncio.TimeoutError): - self._close_code = 1006 - raise - except EofStream: - self._close_code = 1000 - await self.close() - return WSMessage(WSMsgType.CLOSED, None, None) - except WebSocketError as exc: - self._close_code = exc.code - await self.close(code=exc.code) - return WSMessage(WSMsgType.ERROR, exc, None) - except Exception as exc: - self._exception = exc - self._closing = True - self._close_code = 1006 - await self.close() - return WSMessage(WSMsgType.ERROR, exc, None) - - if msg.type == WSMsgType.CLOSE: - self._closing = True - self._close_code = msg.data - if not self._closed and self._autoclose: - await self.close() - elif msg.type == WSMsgType.CLOSING: - self._closing = True - elif msg.type == WSMsgType.PING and self._autoping: - await self.pong(msg.data) - continue - elif msg.type == WSMsgType.PONG and self._autoping: - continue - - return msg - - async def receive_str(self, *, timeout: Optional[float] = None) -> str: - msg = await self.receive(timeout) - if msg.type != WSMsgType.TEXT: - raise TypeError( - "Received message {}:{!r} is not WSMsgType.TEXT".format( - msg.type, msg.data - ) - ) - return msg.data - - async def receive_bytes(self, *, timeout: Optional[float] = None) -> bytes: - msg = await self.receive(timeout) - if msg.type != WSMsgType.BINARY: - raise TypeError(f"Received message {msg.type}:{msg.data!r} is not bytes") - return msg.data - - async def receive_json( - self, *, loads: JSONDecoder = json.loads, timeout: Optional[float] = None - ) -> Any: - data = await self.receive_str(timeout=timeout) - return loads(data) - - async def write(self, data: bytes) -> None: - raise RuntimeError("Cannot call .write() for websocket") - - def __aiter__(self) -> "WebSocketResponse": - return self - - async def __anext__(self) -> WSMessage: - msg = await self.receive() - if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSING, WSMsgType.CLOSED): - raise StopAsyncIteration - return msg - - def _cancel(self, exc: BaseException) -> None: - if self._reader is not None: - self._reader.set_exception(exc) diff --git a/third_party/python/aiohttp/aiohttp/worker.py b/third_party/python/aiohttp/aiohttp/worker.py deleted file mode 100644 index 67b244bbd35a..000000000000 --- a/third_party/python/aiohttp/aiohttp/worker.py +++ /dev/null @@ -1,252 +0,0 @@ -"""Async gunicorn worker for aiohttp.web""" - -import asyncio -import os -import re -import signal -import sys -from types import FrameType -from typing import Any, Awaitable, Callable, Optional, Union # noqa - -from gunicorn.config import AccessLogFormat as GunicornAccessLogFormat -from gunicorn.workers import base - -from aiohttp import web - -from .helpers import set_result -from .web_app import Application -from .web_log import AccessLogger - -try: - import ssl - - SSLContext = ssl.SSLContext -except ImportError: # pragma: no cover - ssl = None # type: ignore - SSLContext = object # type: ignore - - -__all__ = ("GunicornWebWorker", "GunicornUVLoopWebWorker", "GunicornTokioWebWorker") - - -class GunicornWebWorker(base.Worker): - - DEFAULT_AIOHTTP_LOG_FORMAT = AccessLogger.LOG_FORMAT - DEFAULT_GUNICORN_LOG_FORMAT = GunicornAccessLogFormat.default - - def __init__(self, *args: Any, **kw: Any) -> None: # pragma: no cover - super().__init__(*args, **kw) - - self._task = None # type: Optional[asyncio.Task[None]] - self.exit_code = 0 - self._notify_waiter = None # type: Optional[asyncio.Future[bool]] - - def init_process(self) -> None: - # create new event_loop after fork - asyncio.get_event_loop().close() - - self.loop = asyncio.new_event_loop() - asyncio.set_event_loop(self.loop) - - super().init_process() - - def run(self) -> None: - self._task = self.loop.create_task(self._run()) - - try: # ignore all finalization problems - self.loop.run_until_complete(self._task) - except Exception: - self.log.exception("Exception in gunicorn worker") - if sys.version_info >= (3, 6): - self.loop.run_until_complete(self.loop.shutdown_asyncgens()) - self.loop.close() - - sys.exit(self.exit_code) - - async def _run(self) -> None: - if isinstance(self.wsgi, Application): - app = self.wsgi - elif asyncio.iscoroutinefunction(self.wsgi): - app = await self.wsgi() - else: - raise RuntimeError( - "wsgi app should be either Application or " - "async function returning Application, got {}".format(self.wsgi) - ) - access_log = self.log.access_log if self.cfg.accesslog else None - runner = web.AppRunner( - app, - logger=self.log, - keepalive_timeout=self.cfg.keepalive, - access_log=access_log, - access_log_format=self._get_valid_log_format(self.cfg.access_log_format), - ) - await runner.setup() - - ctx = self._create_ssl_context(self.cfg) if self.cfg.is_ssl else None - - runner = runner - assert runner is not None - server = runner.server - assert server is not None - for sock in self.sockets: - site = web.SockSite( - runner, - sock, - ssl_context=ctx, - shutdown_timeout=self.cfg.graceful_timeout / 100 * 95, - ) - await site.start() - - # If our parent changed then we shut down. - pid = os.getpid() - try: - while self.alive: # type: ignore - self.notify() - - cnt = server.requests_count - if self.cfg.max_requests and cnt > self.cfg.max_requests: - self.alive = False - self.log.info("Max requests, shutting down: %s", self) - - elif pid == os.getpid() and self.ppid != os.getppid(): - self.alive = False - self.log.info("Parent changed, shutting down: %s", self) - else: - await self._wait_next_notify() - except BaseException: - pass - - await runner.cleanup() - - def _wait_next_notify(self) -> "asyncio.Future[bool]": - self._notify_waiter_done() - - loop = self.loop - assert loop is not None - self._notify_waiter = waiter = loop.create_future() - self.loop.call_later(1.0, self._notify_waiter_done, waiter) - - return waiter - - def _notify_waiter_done( - self, waiter: Optional["asyncio.Future[bool]"] = None - ) -> None: - if waiter is None: - waiter = self._notify_waiter - if waiter is not None: - set_result(waiter, True) - - if waiter is self._notify_waiter: - self._notify_waiter = None - - def init_signals(self) -> None: - # Set up signals through the event loop API. - - self.loop.add_signal_handler( - signal.SIGQUIT, self.handle_quit, signal.SIGQUIT, None - ) - - self.loop.add_signal_handler( - signal.SIGTERM, self.handle_exit, signal.SIGTERM, None - ) - - self.loop.add_signal_handler( - signal.SIGINT, self.handle_quit, signal.SIGINT, None - ) - - self.loop.add_signal_handler( - signal.SIGWINCH, self.handle_winch, signal.SIGWINCH, None - ) - - self.loop.add_signal_handler( - signal.SIGUSR1, self.handle_usr1, signal.SIGUSR1, None - ) - - self.loop.add_signal_handler( - signal.SIGABRT, self.handle_abort, signal.SIGABRT, None - ) - - # Don't let SIGTERM and SIGUSR1 disturb active requests - # by interrupting system calls - signal.siginterrupt(signal.SIGTERM, False) - signal.siginterrupt(signal.SIGUSR1, False) - - def handle_quit(self, sig: int, frame: FrameType) -> None: - self.alive = False - - # worker_int callback - self.cfg.worker_int(self) - - # wakeup closing process - self._notify_waiter_done() - - def handle_abort(self, sig: int, frame: FrameType) -> None: - self.alive = False - self.exit_code = 1 - self.cfg.worker_abort(self) - sys.exit(1) - - @staticmethod - def _create_ssl_context(cfg: Any) -> "SSLContext": - """Creates SSLContext instance for usage in asyncio.create_server. - - See ssl.SSLSocket.__init__ for more details. - """ - if ssl is None: # pragma: no cover - raise RuntimeError("SSL is not supported.") - - ctx = ssl.SSLContext(cfg.ssl_version) - ctx.load_cert_chain(cfg.certfile, cfg.keyfile) - ctx.verify_mode = cfg.cert_reqs - if cfg.ca_certs: - ctx.load_verify_locations(cfg.ca_certs) - if cfg.ciphers: - ctx.set_ciphers(cfg.ciphers) - return ctx - - def _get_valid_log_format(self, source_format: str) -> str: - if source_format == self.DEFAULT_GUNICORN_LOG_FORMAT: - return self.DEFAULT_AIOHTTP_LOG_FORMAT - elif re.search(r"%\([^\)]+\)", source_format): - raise ValueError( - "Gunicorn's style options in form of `%(name)s` are not " - "supported for the log formatting. Please use aiohttp's " - "format specification to configure access log formatting: " - "http://docs.aiohttp.org/en/stable/logging.html" - "#format-specification" - ) - else: - return source_format - - -class GunicornUVLoopWebWorker(GunicornWebWorker): - def init_process(self) -> None: - import uvloop - - # Close any existing event loop before setting a - # new policy. - asyncio.get_event_loop().close() - - # Setup uvloop policy, so that every - # asyncio.get_event_loop() will create an instance - # of uvloop event loop. - asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) - - super().init_process() - - -class GunicornTokioWebWorker(GunicornWebWorker): - def init_process(self) -> None: # pragma: no cover - import tokio - - # Close any existing event loop before setting a - # new policy. - asyncio.get_event_loop().close() - - # Setup tokio policy, so that every - # asyncio.get_event_loop() will create an instance - # of tokio event loop. - asyncio.set_event_loop_policy(tokio.EventLoopPolicy()) - - super().init_process() diff --git a/third_party/python/aiohttp/examples/background_tasks.py b/third_party/python/aiohttp/examples/background_tasks.py deleted file mode 100755 index 2a1ec12afae0..000000000000 --- a/third_party/python/aiohttp/examples/background_tasks.py +++ /dev/null @@ -1,66 +0,0 @@ -#!/usr/bin/env python3 -"""Example of aiohttp.web.Application.on_startup signal handler""" -import asyncio - -import aioredis - -from aiohttp import web - - -async def websocket_handler(request): - ws = web.WebSocketResponse() - await ws.prepare(request) - request.app["websockets"].append(ws) - try: - async for msg in ws: - print(msg) - await asyncio.sleep(1) - finally: - request.app["websockets"].remove(ws) - return ws - - -async def on_shutdown(app): - for ws in app["websockets"]: - await ws.close(code=999, message="Server shutdown") - - -async def listen_to_redis(app): - try: - sub = await aioredis.create_redis(("localhost", 6379), loop=app.loop) - ch, *_ = await sub.subscribe("news") - async for msg in ch.iter(encoding="utf-8"): - # Forward message to all connected websockets: - for ws in app["websockets"]: - await ws.send_str(f"{ch.name}: {msg}") - print(f"message in {ch.name}: {msg}") - except asyncio.CancelledError: - pass - finally: - print("Cancel Redis listener: close connection...") - await sub.unsubscribe(ch.name) - await sub.quit() - print("Redis connection closed.") - - -async def start_background_tasks(app): - app["redis_listener"] = app.loop.create_task(listen_to_redis(app)) - - -async def cleanup_background_tasks(app): - print("cleanup background tasks...") - app["redis_listener"].cancel() - await app["redis_listener"] - - -def init(): - app = web.Application() - app["websockets"] = [] - app.router.add_get("/news", websocket_handler) - app.on_startup.append(start_background_tasks) - app.on_cleanup.append(cleanup_background_tasks) - app.on_shutdown.append(on_shutdown) - return app - - -web.run_app(init()) diff --git a/third_party/python/aiohttp/examples/cli_app.py b/third_party/python/aiohttp/examples/cli_app.py deleted file mode 100755 index 9fbd3b76049c..000000000000 --- a/third_party/python/aiohttp/examples/cli_app.py +++ /dev/null @@ -1,51 +0,0 @@ -#!/usr/bin/env python3 -""" -Example of serving an Application using the `aiohttp.web` CLI. - -Serve this app using:: - - $ python -m aiohttp.web -H localhost -P 8080 --repeat 10 cli_app:init \ - > "Hello World" - -Here ``--repeat`` & ``"Hello World"`` are application specific command-line -arguments. `aiohttp.web` only parses & consumes the command-line arguments it -needs (i.e. ``-H``, ``-P`` & ``entry-func``) and passes on any additional -arguments to the `cli_app:init` function for processing. -""" - -from argparse import ArgumentParser - -from aiohttp import web - - -def display_message(req): - args = req.app["args"] - text = "\n".join([args.message] * args.repeat) - return web.Response(text=text) - - -def init(argv): - arg_parser = ArgumentParser( - prog="aiohttp.web ...", description="Application CLI", add_help=False - ) - - # Positional argument - arg_parser.add_argument("message", help="message to print") - - # Optional argument - arg_parser.add_argument( - "--repeat", help="number of times to repeat message", type=int, default="1" - ) - - # Avoid conflict with -h from `aiohttp.web` CLI parser - arg_parser.add_argument( - "--app-help", help="show this message and exit", action="help" - ) - - args = arg_parser.parse_args(argv) - - app = web.Application() - app["args"] = args - app.router.add_get("/", display_message) - - return app diff --git a/third_party/python/aiohttp/examples/client_auth.py b/third_party/python/aiohttp/examples/client_auth.py deleted file mode 100755 index 6513de20e5c8..000000000000 --- a/third_party/python/aiohttp/examples/client_auth.py +++ /dev/null @@ -1,23 +0,0 @@ -#!/usr/bin/env python3 -import asyncio - -import aiohttp - - -async def fetch(session): - print("Query http://httpbin.org/basic-auth/andrew/password") - async with session.get("http://httpbin.org/basic-auth/andrew/password") as resp: - print(resp.status) - body = await resp.text() - print(body) - - -async def go(loop): - async with aiohttp.ClientSession( - auth=aiohttp.BasicAuth("andrew", "password"), loop=loop - ) as session: - await fetch(session) - - -loop = asyncio.get_event_loop() -loop.run_until_complete(go(loop)) diff --git a/third_party/python/aiohttp/examples/client_json.py b/third_party/python/aiohttp/examples/client_json.py deleted file mode 100755 index e54edeaddb69..000000000000 --- a/third_party/python/aiohttp/examples/client_json.py +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/env python3 -import asyncio - -import aiohttp - - -async def fetch(session): - print("Query http://httpbin.org/get") - async with session.get("http://httpbin.org/get") as resp: - print(resp.status) - data = await resp.json() - print(data) - - -async def go(loop): - async with aiohttp.ClientSession(loop=loop) as session: - await fetch(session) - - -loop = asyncio.get_event_loop() -loop.run_until_complete(go(loop)) -loop.close() diff --git a/third_party/python/aiohttp/examples/client_ws.py b/third_party/python/aiohttp/examples/client_ws.py deleted file mode 100755 index ec48eccc9ad3..000000000000 --- a/third_party/python/aiohttp/examples/client_ws.py +++ /dev/null @@ -1,73 +0,0 @@ -#!/usr/bin/env python3 -"""websocket cmd client for wssrv.py example.""" -import argparse -import asyncio -import signal -import sys - -import aiohttp - - -async def start_client(loop, url): - name = input("Please enter your name: ") - - # input reader - def stdin_callback(): - line = sys.stdin.buffer.readline().decode("utf-8") - if not line: - loop.stop() - else: - ws.send_str(name + ": " + line) - - loop.add_reader(sys.stdin.fileno(), stdin_callback) - - async def dispatch(): - while True: - msg = await ws.receive() - - if msg.type == aiohttp.WSMsgType.TEXT: - print("Text: ", msg.data.strip()) - elif msg.type == aiohttp.WSMsgType.BINARY: - print("Binary: ", msg.data) - elif msg.type == aiohttp.WSMsgType.PING: - ws.pong() - elif msg.type == aiohttp.WSMsgType.PONG: - print("Pong received") - else: - if msg.type == aiohttp.WSMsgType.CLOSE: - await ws.close() - elif msg.type == aiohttp.WSMsgType.ERROR: - print("Error during receive %s" % ws.exception()) - elif msg.type == aiohttp.WSMsgType.CLOSED: - pass - - break - - # send request - async with aiohttp.ws_connect(url, autoclose=False, autoping=False) as ws: - await dispatch() - - -ARGS = argparse.ArgumentParser( - description="websocket console client for wssrv.py example." -) -ARGS.add_argument( - "--host", action="store", dest="host", default="127.0.0.1", help="Host name" -) -ARGS.add_argument( - "--port", action="store", dest="port", default=8080, type=int, help="Port number" -) - -if __name__ == "__main__": - args = ARGS.parse_args() - if ":" in args.host: - args.host, port = args.host.split(":", 1) - args.port = int(port) - - url = f"http://{args.host}:{args.port}" - - loop = asyncio.get_event_loop() - - loop.add_signal_handler(signal.SIGINT, loop.stop) - loop.create_task(start_client(loop, url)) - loop.run_forever() diff --git a/third_party/python/aiohttp/examples/curl.py b/third_party/python/aiohttp/examples/curl.py deleted file mode 100755 index a39639af34e4..000000000000 --- a/third_party/python/aiohttp/examples/curl.py +++ /dev/null @@ -1,35 +0,0 @@ -#!/usr/bin/env python3 - -import argparse -import asyncio - -import aiohttp - - -async def curl(url): - async with aiohttp.ClientSession() as session: - async with session.request("GET", url) as response: - print(repr(response)) - chunk = await response.content.read() - print("Downloaded: %s" % len(chunk)) - - -if __name__ == "__main__": - ARGS = argparse.ArgumentParser(description="GET url example") - ARGS.add_argument("url", nargs=1, metavar="URL", help="URL to download") - ARGS.add_argument( - "--iocp", - default=False, - action="store_true", - help="Use ProactorEventLoop on Windows", - ) - options = ARGS.parse_args() - - if options.iocp: - from asyncio import events, windows_events - - el = windows_events.ProactorEventLoop() - events.set_event_loop(el) - - loop = asyncio.get_event_loop() - loop.run_until_complete(curl(options.url[0])) diff --git a/third_party/python/aiohttp/examples/fake_server.py b/third_party/python/aiohttp/examples/fake_server.py deleted file mode 100755 index 007d96ba0276..000000000000 --- a/third_party/python/aiohttp/examples/fake_server.py +++ /dev/null @@ -1,115 +0,0 @@ -#!/usr/bin/env python3 -import asyncio -import pathlib -import socket -import ssl - -import aiohttp -from aiohttp import web -from aiohttp.resolver import DefaultResolver -from aiohttp.test_utils import unused_port - - -class FakeResolver: - _LOCAL_HOST = {0: "127.0.0.1", socket.AF_INET: "127.0.0.1", socket.AF_INET6: "::1"} - - def __init__(self, fakes, *, loop): - """fakes -- dns -> port dict""" - self._fakes = fakes - self._resolver = DefaultResolver(loop=loop) - - async def resolve(self, host, port=0, family=socket.AF_INET): - fake_port = self._fakes.get(host) - if fake_port is not None: - return [ - { - "hostname": host, - "host": self._LOCAL_HOST[family], - "port": fake_port, - "family": family, - "proto": 0, - "flags": socket.AI_NUMERICHOST, - } - ] - else: - return await self._resolver.resolve(host, port, family) - - -class FakeFacebook: - def __init__(self, *, loop): - self.loop = loop - self.app = web.Application(loop=loop) - self.app.router.add_routes( - [ - web.get("/v2.7/me", self.on_me), - web.get("/v2.7/me/friends", self.on_my_friends), - ] - ) - self.runner = None - here = pathlib.Path(__file__) - ssl_cert = here.parent / "server.crt" - ssl_key = here.parent / "server.key" - self.ssl_context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) - self.ssl_context.load_cert_chain(str(ssl_cert), str(ssl_key)) - - async def start(self): - port = unused_port() - self.runner = web.AppRunner(self.app) - await self.runner.setup() - site = web.TCPSite(self.runner, "127.0.0.1", port, ssl_context=self.ssl_context) - await site.start() - return {"graph.facebook.com": port} - - async def stop(self): - await self.runner.cleanup() - - async def on_me(self, request): - return web.json_response({"name": "John Doe", "id": "12345678901234567"}) - - async def on_my_friends(self, request): - return web.json_response( - { - "data": [ - {"name": "Bill Doe", "id": "233242342342"}, - {"name": "Mary Doe", "id": "2342342343222"}, - {"name": "Alex Smith", "id": "234234234344"}, - ], - "paging": { - "cursors": { - "before": "QVFIUjRtc2c5NEl0ajN", - "after": "QVFIUlpFQWM0TmVuaDRad0dt", - }, - "next": ( - "https://graph.facebook.com/v2.7/12345678901234567/" - "friends?access_token=EAACEdEose0cB" - ), - }, - "summary": {"total_count": 3}, - } - ) - - -async def main(loop): - token = "ER34gsSGGS34XCBKd7u" - - fake_facebook = FakeFacebook(loop=loop) - info = await fake_facebook.start() - resolver = FakeResolver(info, loop=loop) - connector = aiohttp.TCPConnector(loop=loop, resolver=resolver, verify_ssl=False) - - async with aiohttp.ClientSession(connector=connector, loop=loop) as session: - async with session.get( - "https://graph.facebook.com/v2.7/me", params={"access_token": token} - ) as resp: - print(await resp.json()) - - async with session.get( - "https://graph.facebook.com/v2.7/me/friends", params={"access_token": token} - ) as resp: - print(await resp.json()) - - await fake_facebook.stop() - - -loop = asyncio.get_event_loop() -loop.run_until_complete(main(loop)) diff --git a/third_party/python/aiohttp/examples/legacy/crawl.py b/third_party/python/aiohttp/examples/legacy/crawl.py deleted file mode 100755 index c8029b48545a..000000000000 --- a/third_party/python/aiohttp/examples/legacy/crawl.py +++ /dev/null @@ -1,108 +0,0 @@ -#!/usr/bin/env python3 - -import asyncio -import logging -import re -import signal -import sys -import urllib.parse - -import aiohttp - - -class Crawler: - def __init__(self, rooturl, loop, maxtasks=100): - self.rooturl = rooturl - self.loop = loop - self.todo = set() - self.busy = set() - self.done = {} - self.tasks = set() - self.sem = asyncio.Semaphore(maxtasks, loop=loop) - - # connector stores cookies between requests and uses connection pool - self.session = aiohttp.ClientSession(loop=loop) - - async def run(self): - t = asyncio.ensure_future(self.addurls([(self.rooturl, "")]), loop=self.loop) - await asyncio.sleep(1, loop=self.loop) - while self.busy: - await asyncio.sleep(1, loop=self.loop) - - await t - await self.session.close() - self.loop.stop() - - async def addurls(self, urls): - for url, parenturl in urls: - url = urllib.parse.urljoin(parenturl, url) - url, frag = urllib.parse.urldefrag(url) - if ( - url.startswith(self.rooturl) - and url not in self.busy - and url not in self.done - and url not in self.todo - ): - self.todo.add(url) - await self.sem.acquire() - task = asyncio.ensure_future(self.process(url), loop=self.loop) - task.add_done_callback(lambda t: self.sem.release()) - task.add_done_callback(self.tasks.remove) - self.tasks.add(task) - - async def process(self, url): - print("processing:", url) - - self.todo.remove(url) - self.busy.add(url) - try: - resp = await self.session.get(url) - except Exception as exc: - print("...", url, "has error", repr(str(exc))) - self.done[url] = False - else: - if resp.status == 200 and ("text/html" in resp.headers.get("content-type")): - data = (await resp.read()).decode("utf-8", "replace") - urls = re.findall(r'(?i)href=["\']?([^\s"\'<>]+)', data) - asyncio.Task(self.addurls([(u, url) for u in urls])) - - resp.close() - self.done[url] = True - - self.busy.remove(url) - print( - len(self.done), - "completed tasks,", - len(self.tasks), - "still pending, todo", - len(self.todo), - ) - - -def main(): - loop = asyncio.get_event_loop() - - c = Crawler(sys.argv[1], loop) - asyncio.ensure_future(c.run(), loop=loop) - - try: - loop.add_signal_handler(signal.SIGINT, loop.stop) - except RuntimeError: - pass - loop.run_forever() - print("todo:", len(c.todo)) - print("busy:", len(c.busy)) - print("done:", len(c.done), "; ok:", sum(c.done.values())) - print("tasks:", len(c.tasks)) - - -if __name__ == "__main__": - if "--iocp" in sys.argv: - from asyncio import events, windows_events - - sys.argv.remove("--iocp") - logging.info("using iocp") - el = windows_events.ProactorEventLoop() - events.set_event_loop(el) - - main() diff --git a/third_party/python/aiohttp/examples/legacy/srv.py b/third_party/python/aiohttp/examples/legacy/srv.py deleted file mode 100755 index 628b6f332f1e..000000000000 --- a/third_party/python/aiohttp/examples/legacy/srv.py +++ /dev/null @@ -1,178 +0,0 @@ -#!/usr/bin/env python3 -"""Simple server written using an event loop.""" - -import argparse -import asyncio -import logging -import os -import sys - -import aiohttp -import aiohttp.server - -try: - import ssl -except ImportError: # pragma: no cover - ssl = None - - -class HttpRequestHandler(aiohttp.server.ServerHttpProtocol): - async def handle_request(self, message, payload): - print( - "method = {!r}; path = {!r}; version = {!r}".format( - message.method, message.path, message.version - ) - ) - - path = message.path - - if not (path.isprintable() and path.startswith("/")) or "/." in path: - print("bad path", repr(path)) - path = None - else: - path = "." + path - if not os.path.exists(path): - print("no file", repr(path)) - path = None - else: - isdir = os.path.isdir(path) - - if not path: - raise aiohttp.HttpProcessingError(code=404) - - for hdr, val in message.headers.items(): - print(hdr, val) - - if isdir and not path.endswith("/"): - path = path + "/" - raise aiohttp.HttpProcessingError( - code=302, headers=(("URI", path), ("Location", path)) - ) - - response = aiohttp.Response(self.writer, 200, http_version=message.version) - response.add_header("Transfer-Encoding", "chunked") - - # content encoding - accept_encoding = message.headers.get("accept-encoding", "").lower() - if "deflate" in accept_encoding: - response.add_header("Content-Encoding", "deflate") - response.add_compression_filter("deflate") - elif "gzip" in accept_encoding: - response.add_header("Content-Encoding", "gzip") - response.add_compression_filter("gzip") - - response.add_chunking_filter(1025) - - if isdir: - response.add_header("Content-type", "text/html") - response.send_headers() - - response.write(b"
      \r\n") - for name in sorted(os.listdir(path)): - if name.isprintable() and not name.startswith("."): - try: - bname = name.encode("ascii") - except UnicodeError: - pass - else: - if os.path.isdir(os.path.join(path, name)): - response.write( - b'
    • ' - + bname - + b"/
    • \r\n" - ) - else: - response.write( - b'
    • ' - + bname - + b"
    • \r\n" - ) - response.write(b"
    ") - else: - response.add_header("Content-type", "text/plain") - response.send_headers() - - try: - with open(path, "rb") as fp: - chunk = fp.read(8192) - while chunk: - response.write(chunk) - chunk = fp.read(8192) - except OSError: - response.write(b"Cannot open") - - await response.write_eof() - if response.keep_alive(): - self.keep_alive(True) - - -ARGS = argparse.ArgumentParser(description="Run simple HTTP server.") -ARGS.add_argument( - "--host", action="store", dest="host", default="127.0.0.1", help="Host name" -) -ARGS.add_argument( - "--port", action="store", dest="port", default=8080, type=int, help="Port number" -) -# make iocp and ssl mutually exclusive because ProactorEventLoop is -# incompatible with SSL -group = ARGS.add_mutually_exclusive_group() -group.add_argument( - "--iocp", action="store_true", dest="iocp", help="Windows IOCP event loop" -) -group.add_argument("--ssl", action="store_true", dest="ssl", help="Run ssl mode.") -ARGS.add_argument("--sslcert", action="store", dest="certfile", help="SSL cert file.") -ARGS.add_argument("--sslkey", action="store", dest="keyfile", help="SSL key file.") - - -def main(): - args = ARGS.parse_args() - - if ":" in args.host: - args.host, port = args.host.split(":", 1) - args.port = int(port) - - if args.iocp: - from asyncio import windows_events - - sys.argv.remove("--iocp") - logging.info("using iocp") - el = windows_events.ProactorEventLoop() - asyncio.set_event_loop(el) - - if args.ssl: - here = os.path.join(os.path.dirname(__file__), "tests") - - if args.certfile: - certfile = args.certfile or os.path.join(here, "sample.crt") - keyfile = args.keyfile or os.path.join(here, "sample.key") - else: - certfile = os.path.join(here, "sample.crt") - keyfile = os.path.join(here, "sample.key") - - sslcontext = ssl.SSLContext(ssl.PROTOCOL_SSLv23) - sslcontext.load_cert_chain(certfile, keyfile) - else: - sslcontext = None - - loop = asyncio.get_event_loop() - f = loop.create_server( - lambda: HttpRequestHandler(debug=True, keep_alive=75), - args.host, - args.port, - ssl=sslcontext, - ) - svr = loop.run_until_complete(f) - socks = svr.sockets - print("serving on", socks[0].getsockname()) - try: - loop.run_forever() - except KeyboardInterrupt: - pass - - -if __name__ == "__main__": - main() diff --git a/third_party/python/aiohttp/examples/legacy/tcp_protocol_parser.py b/third_party/python/aiohttp/examples/legacy/tcp_protocol_parser.py deleted file mode 100755 index ca49db7d8f9e..000000000000 --- a/third_party/python/aiohttp/examples/legacy/tcp_protocol_parser.py +++ /dev/null @@ -1,172 +0,0 @@ -#!/usr/bin/env python3 -"""Protocol parser example.""" -import argparse -import asyncio -import collections - -import aiohttp - -try: - import signal -except ImportError: - signal = None - - -MSG_TEXT = b"text:" -MSG_PING = b"ping:" -MSG_PONG = b"pong:" -MSG_STOP = b"stop:" - -Message = collections.namedtuple("Message", ("tp", "data")) - - -def my_protocol_parser(out, buf): - """Parser is used with StreamParser for incremental protocol parsing. - Parser is a generator function, but it is not a coroutine. Usually - parsers are implemented as a state machine. - - more details in asyncio/parsers.py - existing parsers: - * HTTP protocol parsers asyncio/http/protocol.py - * websocket parser asyncio/http/websocket.py - """ - while True: - tp = yield from buf.read(5) - if tp in (MSG_PING, MSG_PONG): - # skip line - yield from buf.skipuntil(b"\r\n") - out.feed_data(Message(tp, None)) - elif tp == MSG_STOP: - out.feed_data(Message(tp, None)) - elif tp == MSG_TEXT: - # read text - text = yield from buf.readuntil(b"\r\n") - out.feed_data(Message(tp, text.strip().decode("utf-8"))) - else: - raise ValueError("Unknown protocol prefix.") - - -class MyProtocolWriter: - def __init__(self, transport): - self.transport = transport - - def ping(self): - self.transport.write(b"ping:\r\n") - - def pong(self): - self.transport.write(b"pong:\r\n") - - def stop(self): - self.transport.write(b"stop:\r\n") - - def send_text(self, text): - self.transport.write(f"text:{text.strip()}\r\n".encode("utf-8")) - - -class EchoServer(asyncio.Protocol): - def connection_made(self, transport): - print("Connection made") - self.transport = transport - self.stream = aiohttp.StreamParser() - asyncio.Task(self.dispatch()) - - def data_received(self, data): - self.stream.feed_data(data) - - def eof_received(self): - self.stream.feed_eof() - - def connection_lost(self, exc): - print("Connection lost") - - async def dispatch(self): - reader = self.stream.set_parser(my_protocol_parser) - writer = MyProtocolWriter(self.transport) - - while True: - try: - msg = await reader.read() - except aiohttp.ConnectionError: - # client has been disconnected - break - - print(f"Message received: {msg}") - - if msg.type == MSG_PING: - writer.pong() - elif msg.type == MSG_TEXT: - writer.send_text("Re: " + msg.data) - elif msg.type == MSG_STOP: - self.transport.close() - break - - -async def start_client(loop, host, port): - transport, stream = await loop.create_connection(aiohttp.StreamProtocol, host, port) - reader = stream.reader.set_parser(my_protocol_parser) - writer = MyProtocolWriter(transport) - writer.ping() - - message = "This is the message. It will be echoed." - - while True: - try: - msg = await reader.read() - except aiohttp.ConnectionError: - print("Server has been disconnected.") - break - - print(f"Message received: {msg}") - if msg.type == MSG_PONG: - writer.send_text(message) - print("data sent:", message) - elif msg.type == MSG_TEXT: - writer.stop() - print("stop sent") - break - - transport.close() - - -def start_server(loop, host, port): - f = loop.create_server(EchoServer, host, port) - srv = loop.run_until_complete(f) - x = srv.sockets[0] - print("serving on", x.getsockname()) - loop.run_forever() - - -ARGS = argparse.ArgumentParser(description="Protocol parser example.") -ARGS.add_argument( - "--server", action="store_true", dest="server", default=False, help="Run tcp server" -) -ARGS.add_argument( - "--client", action="store_true", dest="client", default=False, help="Run tcp client" -) -ARGS.add_argument( - "--host", action="store", dest="host", default="127.0.0.1", help="Host name" -) -ARGS.add_argument( - "--port", action="store", dest="port", default=9999, type=int, help="Port number" -) - - -if __name__ == "__main__": - args = ARGS.parse_args() - - if ":" in args.host: - args.host, port = args.host.split(":", 1) - args.port = int(port) - - if (not (args.server or args.client)) or (args.server and args.client): - print("Please specify --server or --client\n") - ARGS.print_help() - else: - loop = asyncio.get_event_loop() - if signal is not None: - loop.add_signal_handler(signal.SIGINT, loop.stop) - - if args.server: - start_server(loop, args.host, args.port) - else: - loop.run_until_complete(start_client(loop, args.host, args.port)) diff --git a/third_party/python/aiohttp/examples/lowlevel_srv.py b/third_party/python/aiohttp/examples/lowlevel_srv.py deleted file mode 100644 index 5a003f40f42b..000000000000 --- a/third_party/python/aiohttp/examples/lowlevel_srv.py +++ /dev/null @@ -1,26 +0,0 @@ -import asyncio - -from aiohttp import web - - -async def handler(request): - return web.Response(text="OK") - - -async def main(loop): - server = web.Server(handler) - await loop.create_server(server, "127.0.0.1", 8080) - print("======= Serving on http://127.0.0.1:8080/ ======") - - # pause here for very long time by serving HTTP requests and - # waiting for keyboard interruption - await asyncio.sleep(100 * 3600) - - -loop = asyncio.get_event_loop() - -try: - loop.run_until_complete(main(loop)) -except KeyboardInterrupt: - pass -loop.close() diff --git a/third_party/python/aiohttp/examples/server.crt b/third_party/python/aiohttp/examples/server.crt deleted file mode 100644 index 708971a376cb..000000000000 --- a/third_party/python/aiohttp/examples/server.crt +++ /dev/null @@ -1,19 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIDADCCAegCCQCgevpPMuTTLzANBgkqhkiG9w0BAQsFADBCMQswCQYDVQQGEwJV -QTEQMA4GA1UECAwHVWtyYWluZTEhMB8GA1UECgwYSW50ZXJuZXQgV2lkZ2l0cyBQ -dHkgTHRkMB4XDTE2MDgwNzIzMTMwOFoXDTI2MDgwNTIzMTMwOFowQjELMAkGA1UE -BhMCVUExEDAOBgNVBAgMB1VrcmFpbmUxITAfBgNVBAoMGEludGVybmV0IFdpZGdp -dHMgUHR5IEx0ZDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOUgkn3j -X/sdg6GGueGDHCM+snIUVY3fM6D4jXjyBhnT3TqKG1lJwCGYR11AD+2SJYppU+w4 -QaF6YZwMeZBKy+mVQ9+CrVYyKQE7j9H8XgNEHV9BQzoragT8lia8eC5aOQzUeX8A -xCSSbsnyT/X+S1IKdd0txLOeZOD6pWwJoc3dpDELglk2b1tzhyN2GjQv3aRHj55P -x7127MeZyRXwODFpXrpbnwih4OqkA4EYtmqFbZttGEzMhd4Y5mkbyuRbGM+IE99o -QJMvnIkjAfUo0aKnDrcAIkWCkwLIci9TIG6u3R1P2Tn+HYVntzQZ4BnxanbFNQ5S -9ARd3529EmO3BzUCAwEAATANBgkqhkiG9w0BAQsFAAOCAQEAXyiw1+YUnTEDI3C/ -vq1Vn9pnwZALVQPiPlTqEGkl/nbq0suMmeZZG7pwrOJp3wr+sGwRAv9sPTro6srf -Vj12wTo4LrTRKEDuS+AUJl0Mut7cPGIUKo+MGeZmmnDjMqcjljN3AO47ef4eWYo5 -XGe4r4NDABEk5auOD/vQW5IiIMdmWsaMJ+0mZNpAV2NhAD/6ia28VvSL/yuaNqDW -TYTUYHWLH08H6M6qrQ7FdoIDyYR5siqBukQzeqlnuq45bQ3ViYttNIkzZN4jbWJV -/MFYLuJQ/fNoalDIC+ec0EIa9NbrfpoocJ8h6HlmWOqkES4QpBSOrkVid64Cdy3P -JgiEWg== ------END CERTIFICATE----- diff --git a/third_party/python/aiohttp/examples/server.csr b/third_party/python/aiohttp/examples/server.csr deleted file mode 100644 index 1df3087b91f9..000000000000 --- a/third_party/python/aiohttp/examples/server.csr +++ /dev/null @@ -1,16 +0,0 @@ ------BEGIN CERTIFICATE REQUEST----- -MIIChzCCAW8CAQAwQjELMAkGA1UEBhMCVUExEDAOBgNVBAgMB1VrcmFpbmUxITAf -BgNVBAoMGEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZDCCASIwDQYJKoZIhvcNAQEB -BQADggEPADCCAQoCggEBAOUgkn3jX/sdg6GGueGDHCM+snIUVY3fM6D4jXjyBhnT -3TqKG1lJwCGYR11AD+2SJYppU+w4QaF6YZwMeZBKy+mVQ9+CrVYyKQE7j9H8XgNE -HV9BQzoragT8lia8eC5aOQzUeX8AxCSSbsnyT/X+S1IKdd0txLOeZOD6pWwJoc3d -pDELglk2b1tzhyN2GjQv3aRHj55Px7127MeZyRXwODFpXrpbnwih4OqkA4EYtmqF -bZttGEzMhd4Y5mkbyuRbGM+IE99oQJMvnIkjAfUo0aKnDrcAIkWCkwLIci9TIG6u -3R1P2Tn+HYVntzQZ4BnxanbFNQ5S9ARd3529EmO3BzUCAwEAAaAAMA0GCSqGSIb3 -DQEBCwUAA4IBAQDO/PSd29KgisTdGXhntg7yBEhBAjsDW7uQCrdrPSZtFyN6wUHy -/1yrrWe56ZuW8jpuP5tG0eTZ+0bT2RXIRot8a2Cc3eBhpoe8M3d84yXjKAoHutGE -5IK+TViQdvT3pT3a7pTmjlf8Ojq9tx+U2ckiz8Ccnjd9yM47M9NgMhrS1aBpVZSt -gOD+zzrqMML4xks9id94H7bi9Tgs3AbEJIyDpBpoK6i4OvK7KTidCngCg80qmdTy -bcScLapoy1Ped2BKKuxWdOOlP+mDJatc/pcfBLE13AncQjJgMerS9M5RWCBjmRow -A+aB6fBEU8bOTrqCryfBeTiV6xzyDDcIXtc6 ------END CERTIFICATE REQUEST----- diff --git a/third_party/python/aiohttp/examples/server.key b/third_party/python/aiohttp/examples/server.key deleted file mode 100644 index 37dae99e6737..000000000000 --- a/third_party/python/aiohttp/examples/server.key +++ /dev/null @@ -1,27 +0,0 @@ ------BEGIN RSA PRIVATE KEY----- -MIIEowIBAAKCAQEA5SCSfeNf+x2DoYa54YMcIz6ychRVjd8zoPiNePIGGdPdOoob -WUnAIZhHXUAP7ZIlimlT7DhBoXphnAx5kErL6ZVD34KtVjIpATuP0fxeA0QdX0FD -OitqBPyWJrx4Llo5DNR5fwDEJJJuyfJP9f5LUgp13S3Es55k4PqlbAmhzd2kMQuC -WTZvW3OHI3YaNC/dpEePnk/HvXbsx5nJFfA4MWleulufCKHg6qQDgRi2aoVtm20Y -TMyF3hjmaRvK5FsYz4gT32hAky+ciSMB9SjRoqcOtwAiRYKTAshyL1Mgbq7dHU/Z -Of4dhWe3NBngGfFqdsU1DlL0BF3fnb0SY7cHNQIDAQABAoIBAG9BJ6B03VADfrzZ -vDwh+3Gpqd/2u6wNqvYIejk123yDATLBiJIMW3x0goJm7tT+V7gjeJqEnmmYEPlC -nWxQxT6AOdq3iw8FgB+XGjhuAAA5/MEZ4VjHZ81QEGBytzBaosT2DqB6cMMJTz5D -qEvb1Brb9WsWJCLLUFRloBkbfDOG9lMvt34ixYTTmqjsVj5WByD5BhzKH51OJ72L -00IYpvrsEOtSev1hNV4199CHPYE90T/YQVooRBiHtTcfN+/KNVJu6Rf/zcaJ3WMS -1l3MBI8HwMimjKKkbddpoMHyFMtSNmS9Yq+4a9w7XZo1F5rt88hYSCtAF8HRAarX -0VBCJmkCgYEA9HenBBnmfDoN857femzoTHdWQQrZQ4YPAKHvKPlcgudizE5tQbs0 -iTpwm+IsecgJS2Rio7zY+P7A5nKFz3N5c0IX3smYo0J2PoakkLAm25KMxFZYBuz4 -MFWVdfByAU7d28BdNfyOVbA2kU2eal9lJ0yPLpMLbH8+bbvw5uBS808CgYEA7++p -ftwib3DvKWMpl6G5eA1C2xprdbE0jm2fSr3LYp/vZ4QN2V6kK2YIlyUqQvhYCnxX -oIP3v2MWDRHKKwJtBWR4+t23PaDaSXS2Ifm0qhRxwSm/oqpAJQXbR7VzxXp4/4FP -1SgkLe51bubc4h+cDngqBLcplCanvj52CqhqzDsCgYAEIhG8zANNjl22BLWaiETV -Jh9bMifCMH4IcLRuaOjbfbX55kmKlvOobkiBGi3OUUd28teIFSVF8GiqfL0uaLFg -9XkZ1yaxe+or3HLjz1aY171xhFQwqcj4aDoCqHIE+6Rclr/8raxqXnRNuJY5DivT -okO5cdr7lpsjl83W2WwNmQKBgCPXi1xWChbXqgJmu8nY8NnMMVaFpdPY+t7j5U3G -+GDtP1gZU/BKwP9yqInblWqXqp82X+isjg/a/2pIZAj0vdB2Z9Qh1sOwCau7cZG1 -uZVGpI+UavojsJ1XOKCHrJmtZ/HTIVfYPT9XRdehSRHGYwuOS8iUi/ODqr8ymXOS -IRINAoGBAMEmhTihgFz6Y8ezRK3QTubguehHZG1zIvtgVhOk+8hRUTSJPI9nBJPC -4gOZsPx4g2oLK6PiudPR79bhxRxPACCMnXkdwZ/8FaIdmvRHsWVs8T80wID0wthI -r5hW4uqi9CcKZrGWH7mx9cVJktspeGUczvKyzNMfCaojwzA/49Z1 ------END RSA PRIVATE KEY----- diff --git a/third_party/python/aiohttp/examples/server_simple.py b/third_party/python/aiohttp/examples/server_simple.py deleted file mode 100644 index d464383d269a..000000000000 --- a/third_party/python/aiohttp/examples/server_simple.py +++ /dev/null @@ -1,31 +0,0 @@ -# server_simple.py -from aiohttp import web - - -async def handle(request): - name = request.match_info.get("name", "Anonymous") - text = "Hello, " + name - return web.Response(text=text) - - -async def wshandle(request): - ws = web.WebSocketResponse() - await ws.prepare(request) - - async for msg in ws: - if msg.type == web.WSMsgType.text: - await ws.send_str(f"Hello, {msg.data}") - elif msg.type == web.WSMsgType.binary: - await ws.send_bytes(msg.data) - elif msg.type == web.WSMsgType.close: - break - - return ws - - -app = web.Application() -app.add_routes( - [web.get("/", handle), web.get("/echo", wshandle), web.get("/{name}", handle)] -) - -web.run_app(app) diff --git a/third_party/python/aiohttp/examples/static_files.py b/third_party/python/aiohttp/examples/static_files.py deleted file mode 100755 index 65f6bb9c7646..000000000000 --- a/third_party/python/aiohttp/examples/static_files.py +++ /dev/null @@ -1,9 +0,0 @@ -#!/usr/bin/env python3 -import pathlib - -from aiohttp import web - -app = web.Application() -app.router.add_static("/", pathlib.Path(__file__).parent, show_index=True) - -web.run_app(app) diff --git a/third_party/python/aiohttp/examples/web_classview.py b/third_party/python/aiohttp/examples/web_classview.py deleted file mode 100755 index 0f65f7d7f432..000000000000 --- a/third_party/python/aiohttp/examples/web_classview.py +++ /dev/null @@ -1,63 +0,0 @@ -#!/usr/bin/env python3 -"""Example for aiohttp.web class based views -""" - - -import functools -import json - -from aiohttp import web - - -class MyView(web.View): - async def get(self): - return web.json_response( - { - "method": "get", - "args": dict(self.request.GET), - "headers": dict(self.request.headers), - }, - dumps=functools.partial(json.dumps, indent=4), - ) - - async def post(self): - data = await self.request.post() - return web.json_response( - { - "method": "post", - "args": dict(self.request.GET), - "data": dict(data), - "headers": dict(self.request.headers), - }, - dumps=functools.partial(json.dumps, indent=4), - ) - - -async def index(request): - txt = """ - - - Class based view example - - -

    Class based view example

    -
      -
    • / This page -
    • /get Returns GET data. -
    • /post Returns POST data. -
    - - - """ - return web.Response(text=txt, content_type="text/html") - - -def init(): - app = web.Application() - app.router.add_get("/", index) - app.router.add_get("/get", MyView) - app.router.add_post("/post", MyView) - return app - - -web.run_app(init()) diff --git a/third_party/python/aiohttp/examples/web_cookies.py b/third_party/python/aiohttp/examples/web_cookies.py deleted file mode 100755 index e7a4a595d779..000000000000 --- a/third_party/python/aiohttp/examples/web_cookies.py +++ /dev/null @@ -1,45 +0,0 @@ -#!/usr/bin/env python3 -"""Example for aiohttp.web basic server with cookies. -""" - -from pprint import pformat - -from aiohttp import web - -tmpl = """\ - - - Login
    - Logout
    -
    {}
    - -""" - - -async def root(request): - resp = web.Response(content_type="text/html") - resp.text = tmpl.format(pformat(request.cookies)) - return resp - - -async def login(request): - resp = web.HTTPFound(location="/") - resp.set_cookie("AUTH", "secret") - return resp - - -async def logout(request): - resp = web.HTTPFound(location="/") - resp.del_cookie("AUTH") - return resp - - -def init(loop): - app = web.Application(loop=loop) - app.router.add_get("/", root) - app.router.add_get("/login", login) - app.router.add_get("/logout", logout) - return app - - -web.run_app(init()) diff --git a/third_party/python/aiohttp/examples/web_rewrite_headers_middleware.py b/third_party/python/aiohttp/examples/web_rewrite_headers_middleware.py deleted file mode 100755 index 20799a3a7c2f..000000000000 --- a/third_party/python/aiohttp/examples/web_rewrite_headers_middleware.py +++ /dev/null @@ -1,30 +0,0 @@ -#!/usr/bin/env python3 -""" -Example for rewriting response headers by middleware. -""" - -from aiohttp import web - - -async def handler(request): - return web.Response(text="Everything is fine") - - -@web.middleware -async def middleware(request, handler): - try: - response = await handler(request) - except web.HTTPException as exc: - raise exc - if not response.prepared: - response.headers["SERVER"] = "Secured Server Software" - return response - - -def init(): - app = web.Application(middlewares=[middleware]) - app.router.add_get("/", handler) - return app - - -web.run_app(init()) diff --git a/third_party/python/aiohttp/examples/web_srv.py b/third_party/python/aiohttp/examples/web_srv.py deleted file mode 100755 index b572326a3a21..000000000000 --- a/third_party/python/aiohttp/examples/web_srv.py +++ /dev/null @@ -1,59 +0,0 @@ -#!/usr/bin/env python3 -"""Example for aiohttp.web basic server -""" - -import textwrap - -from aiohttp import web - - -async def intro(request): - txt = textwrap.dedent( - """\ - Type {url}/hello/John {url}/simple or {url}/change_body - in browser url bar - """ - ).format(url="127.0.0.1:8080") - binary = txt.encode("utf8") - resp = web.StreamResponse() - resp.content_length = len(binary) - resp.content_type = "text/plain" - await resp.prepare(request) - await resp.write(binary) - return resp - - -async def simple(request): - return web.Response(text="Simple answer") - - -async def change_body(request): - resp = web.Response() - resp.body = b"Body changed" - resp.content_type = "text/plain" - return resp - - -async def hello(request): - resp = web.StreamResponse() - name = request.match_info.get("name", "Anonymous") - answer = ("Hello, " + name).encode("utf8") - resp.content_length = len(answer) - resp.content_type = "text/plain" - await resp.prepare(request) - await resp.write(answer) - await resp.write_eof() - return resp - - -def init(): - app = web.Application() - app.router.add_get("/", intro) - app.router.add_get("/simple", simple) - app.router.add_get("/change_body", change_body) - app.router.add_get("/hello/{name}", hello) - app.router.add_get("/hello", hello) - return app - - -web.run_app(init()) diff --git a/third_party/python/aiohttp/examples/web_srv_route_deco.py b/third_party/python/aiohttp/examples/web_srv_route_deco.py deleted file mode 100644 index 332990362ccf..000000000000 --- a/third_party/python/aiohttp/examples/web_srv_route_deco.py +++ /dev/null @@ -1,62 +0,0 @@ -#!/usr/bin/env python3 -"""Example for aiohttp.web basic server -with decorator definition for routes -""" - -import textwrap - -from aiohttp import web - -routes = web.RouteTableDef() - - -@routes.get("/") -async def intro(request): - txt = textwrap.dedent( - """\ - Type {url}/hello/John {url}/simple or {url}/change_body - in browser url bar - """ - ).format(url="127.0.0.1:8080") - binary = txt.encode("utf8") - resp = web.StreamResponse() - resp.content_length = len(binary) - resp.content_type = "text/plain" - await resp.prepare(request) - await resp.write(binary) - return resp - - -@routes.get("/simple") -async def simple(request): - return web.Response(text="Simple answer") - - -@routes.get("/change_body") -async def change_body(request): - resp = web.Response() - resp.body = b"Body changed" - resp.content_type = "text/plain" - return resp - - -@routes.get("/hello") -async def hello(request): - resp = web.StreamResponse() - name = request.match_info.get("name", "Anonymous") - answer = ("Hello, " + name).encode("utf8") - resp.content_length = len(answer) - resp.content_type = "text/plain" - await resp.prepare(request) - await resp.write(answer) - await resp.write_eof() - return resp - - -def init(): - app = web.Application() - app.router.add_routes(routes) - return app - - -web.run_app(init()) diff --git a/third_party/python/aiohttp/examples/web_srv_route_table.py b/third_party/python/aiohttp/examples/web_srv_route_table.py deleted file mode 100644 index f53142adad41..000000000000 --- a/third_party/python/aiohttp/examples/web_srv_route_table.py +++ /dev/null @@ -1,64 +0,0 @@ -#!/usr/bin/env python3 -"""Example for aiohttp.web basic server -with table definition for routes -""" - -import textwrap - -from aiohttp import web - - -async def intro(request): - txt = textwrap.dedent( - """\ - Type {url}/hello/John {url}/simple or {url}/change_body - in browser url bar - """ - ).format(url="127.0.0.1:8080") - binary = txt.encode("utf8") - resp = web.StreamResponse() - resp.content_length = len(binary) - resp.content_type = "text/plain" - await resp.prepare(request) - await resp.write(binary) - return resp - - -async def simple(request): - return web.Response(text="Simple answer") - - -async def change_body(request): - resp = web.Response() - resp.body = b"Body changed" - resp.content_type = "text/plain" - return resp - - -async def hello(request): - resp = web.StreamResponse() - name = request.match_info.get("name", "Anonymous") - answer = ("Hello, " + name).encode("utf8") - resp.content_length = len(answer) - resp.content_type = "text/plain" - await resp.prepare(request) - await resp.write(answer) - await resp.write_eof() - return resp - - -def init(): - app = web.Application() - app.router.add_routes( - [ - web.get("/", intro), - web.get("/simple", simple), - web.get("/change_body", change_body), - web.get("/hello/{name}", hello), - web.get("/hello", hello), - ] - ) - return app - - -web.run_app(init()) diff --git a/third_party/python/aiohttp/examples/web_ws.py b/third_party/python/aiohttp/examples/web_ws.py deleted file mode 100755 index 970f1506be37..000000000000 --- a/third_party/python/aiohttp/examples/web_ws.py +++ /dev/null @@ -1,58 +0,0 @@ -#!/usr/bin/env python3 -"""Example for aiohttp.web websocket server -""" - -import os - -from aiohttp import web - -WS_FILE = os.path.join(os.path.dirname(__file__), "websocket.html") - - -async def wshandler(request): - resp = web.WebSocketResponse() - available = resp.can_prepare(request) - if not available: - with open(WS_FILE, "rb") as fp: - return web.Response(body=fp.read(), content_type="text/html") - - await resp.prepare(request) - - await resp.send_str("Welcome!!!") - - try: - print("Someone joined.") - for ws in request.app["sockets"]: - await ws.send_str("Someone joined") - request.app["sockets"].append(resp) - - async for msg in resp: - if msg.type == web.WSMsgType.TEXT: - for ws in request.app["sockets"]: - if ws is not resp: - await ws.send_str(msg.data) - else: - return resp - return resp - - finally: - request.app["sockets"].remove(resp) - print("Someone disconnected.") - for ws in request.app["sockets"]: - await ws.send_str("Someone disconnected.") - - -async def on_shutdown(app): - for ws in app["sockets"]: - await ws.close() - - -def init(): - app = web.Application() - app["sockets"] = [] - app.router.add_get("/", wshandler) - app.on_shutdown.append(on_shutdown) - return app - - -web.run_app(init()) diff --git a/third_party/python/aiohttp/examples/websocket.html b/third_party/python/aiohttp/examples/websocket.html deleted file mode 100644 index 2ba9ff367d69..000000000000 --- a/third_party/python/aiohttp/examples/websocket.html +++ /dev/null @@ -1,89 +0,0 @@ - - - - - - - - -

    Chat!

    -
    -  | Status: - disconnected -
    -
    -
    -
    - - -
    - - diff --git a/third_party/python/aiohttp/pyproject.toml b/third_party/python/aiohttp/pyproject.toml deleted file mode 100644 index e666dfc174ed..000000000000 --- a/third_party/python/aiohttp/pyproject.toml +++ /dev/null @@ -1,7 +0,0 @@ -[tool.towncrier] -package = "aiohttp" -filename = "CHANGES.rst" -directory = "CHANGES/" -title_format = "{version} ({project_date})" -template = "CHANGES/.TEMPLATE.rst" -issue_format = "`#{issue} `_" diff --git a/third_party/python/aiohttp/setup.cfg b/third_party/python/aiohttp/setup.cfg deleted file mode 100644 index 2f528bc49fd8..000000000000 --- a/third_party/python/aiohttp/setup.cfg +++ /dev/null @@ -1,93 +0,0 @@ -[aliases] -test = pytest - -[metadata] -license_file = LICENSE.txt - -[pep8] -max-line-length = 79 - -[easy_install] -zip_ok = false - -[flake8] -ignore = N801,N802,N803,E203,E226,E305,W504,E252,E301,E302,E704,W503,W504,F811 -max-line-length = 88 - -[isort] -line_length = 88 -include_trailing_comma = True -multi_line_output = 3 -force_grid_wrap = 0 -combine_as_imports = True -known_third_party = jinja2,pytest,multidict,yarl,gunicorn,freezegun,async_generator -known_first_party = aiohttp,aiohttp_jinja2,aiopg - -[report] -exclude_lines = - @abc.abstractmethod - @abstractmethod - -[coverage:run] -branch = True -source = aiohttp, tests -omit = site-packages - -[tool:pytest] -addopts = --cov=aiohttp -v -rxXs --durations 10 -filterwarnings = - error - ignore:module 'ssl' has no attribute 'OP_NO_COMPRESSION'. The Python interpreter is compiled against OpenSSL < 1.0.0. Ref. https.//docs.python.org/3/library/ssl.html#ssl.OP_NO_COMPRESSION:UserWarning -junit_suite_name = aiohttp_test_suite -norecursedirs = dist docs build .tox .eggs -minversion = 3.8.2 -testpaths = tests/ -junit_family = xunit2 -xfail_strict = true - -[mypy] -follow_imports = silent -strict_optional = True -warn_redundant_casts = True -warn_unused_ignores = True -check_untyped_defs = True -disallow_any_generics = True -disallow_untyped_defs = True - -[mypy-pytest] -ignore_missing_imports = true - -[mypy-uvloop] -ignore_missing_imports = true - -[mypy-tokio] -ignore_missing_imports = true - -[mypy-async_generator] -ignore_missing_imports = true - -[mypy-aiodns] -ignore_missing_imports = true - -[mypy-gunicorn.config] -ignore_missing_imports = true - -[mypy-gunicorn.workers] -ignore_missing_imports = true - -[mypy-brotli] -ignore_missing_imports = true - -[mypy-chardet] -ignore_missing_imports = true - -[mypy-cchardet] -ignore_missing_imports = true - -[mypy-idna_ssl] -ignore_missing_imports = true - -[egg_info] -tag_build = -tag_date = 0 - diff --git a/third_party/python/aiohttp/setup.py b/third_party/python/aiohttp/setup.py deleted file mode 100644 index 54462ba71c99..000000000000 --- a/third_party/python/aiohttp/setup.py +++ /dev/null @@ -1,159 +0,0 @@ -import pathlib -import re -import sys -from distutils.command.build_ext import build_ext -from distutils.errors import CCompilerError, DistutilsExecError, DistutilsPlatformError - -from setuptools import Extension, setup - -if sys.version_info < (3, 6): - raise RuntimeError("aiohttp 3.7+ requires Python 3.6+") - -here = pathlib.Path(__file__).parent - - -if (here / ".git").exists() and not (here / "vendor/http-parser/README.md").exists(): - print("Install submodules when building from git clone", file=sys.stderr) - print("Hint:", file=sys.stderr) - print(" git submodule update --init", file=sys.stderr) - sys.exit(2) - - -# NOTE: makefile cythonizes all Cython modules - -extensions = [ - Extension("aiohttp._websocket", ["aiohttp/_websocket.c"]), - Extension( - "aiohttp._http_parser", - [ - "aiohttp/_http_parser.c", - "vendor/http-parser/http_parser.c", - "aiohttp/_find_header.c", - ], - define_macros=[("HTTP_PARSER_STRICT", 0)], - ), - Extension("aiohttp._frozenlist", ["aiohttp/_frozenlist.c"]), - Extension("aiohttp._helpers", ["aiohttp/_helpers.c"]), - Extension("aiohttp._http_writer", ["aiohttp/_http_writer.c"]), -] - - -class BuildFailed(Exception): - pass - - -class ve_build_ext(build_ext): - # This class allows C extension building to fail. - - def run(self): - try: - build_ext.run(self) - except (DistutilsPlatformError, FileNotFoundError): - raise BuildFailed() - - def build_extension(self, ext): - try: - build_ext.build_extension(self, ext) - except (CCompilerError, DistutilsExecError, DistutilsPlatformError, ValueError): - raise BuildFailed() - - -txt = (here / "aiohttp" / "__init__.py").read_text("utf-8") -try: - version = re.findall(r'^__version__ = "([^"]+)"\r?$', txt, re.M)[0] -except IndexError: - raise RuntimeError("Unable to determine version.") - -install_requires = [ - "attrs>=17.3.0", - "chardet>=2.0,<5.0", - "multidict>=4.5,<7.0", - "async_timeout>=3.0,<4.0", - "yarl>=1.0,<2.0", - 'idna-ssl>=1.0; python_version<"3.7"', - "typing_extensions>=3.6.5", -] - - -def read(f): - return (here / f).read_text("utf-8").strip() - - -NEEDS_PYTEST = {"pytest", "test"}.intersection(sys.argv) -pytest_runner = ["pytest-runner"] if NEEDS_PYTEST else [] - -tests_require = [ - "pytest", - "gunicorn", - "pytest-timeout", - "async-generator", - "pytest-xdist", -] - - -args = dict( - name="aiohttp", - version=version, - description="Async http client/server framework (asyncio)", - long_description="\n\n".join((read("README.rst"), read("CHANGES.rst"))), - classifiers=[ - "License :: OSI Approved :: Apache Software License", - "Intended Audience :: Developers", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.6", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Development Status :: 5 - Production/Stable", - "Operating System :: POSIX", - "Operating System :: MacOS :: MacOS X", - "Operating System :: Microsoft :: Windows", - "Topic :: Internet :: WWW/HTTP", - "Framework :: AsyncIO", - ], - author="Nikolay Kim", - author_email="fafhrd91@gmail.com", - maintainer=", ".join( - ( - "Nikolay Kim ", - "Andrew Svetlov ", - ) - ), - maintainer_email="aio-libs@googlegroups.com", - url="https://github.com/aio-libs/aiohttp", - project_urls={ - "Chat: Gitter": "https://gitter.im/aio-libs/Lobby", - "CI: Azure Pipelines": "https://dev.azure.com/aio-libs/aiohttp/_build", - "Coverage: codecov": "https://codecov.io/github/aio-libs/aiohttp", - "Docs: RTD": "https://docs.aiohttp.org", - "GitHub: issues": "https://github.com/aio-libs/aiohttp/issues", - "GitHub: repo": "https://github.com/aio-libs/aiohttp", - }, - license="Apache 2", - packages=["aiohttp"], - python_requires=">=3.6", - install_requires=install_requires, - extras_require={ - "speedups": [ - "aiodns", - "brotlipy", - "cchardet", - ], - }, - tests_require=tests_require, - setup_requires=pytest_runner, - include_package_data=True, - ext_modules=extensions, - cmdclass=dict(build_ext=ve_build_ext), -) - -try: - setup(**args) -except BuildFailed: - print("************************************************************") - print("Cannot compile C accelerator module, use pure python version") - print("************************************************************") - del args["ext_modules"] - del args["cmdclass"] - setup(**args) diff --git a/third_party/python/aiohttp/vendor/http-parser/.gitignore b/third_party/python/aiohttp/vendor/http-parser/.gitignore deleted file mode 100644 index c122e76fb912..000000000000 --- a/third_party/python/aiohttp/vendor/http-parser/.gitignore +++ /dev/null @@ -1,30 +0,0 @@ -/out/ -core -tags -*.o -test -test_g -test_fast -bench -url_parser -parsertrace -parsertrace_g -*.mk -*.Makefile -*.so.* -*.exe.* -*.exe -*.a - - -# Visual Studio uglies -*.suo -*.sln -*.vcxproj -*.vcxproj.filters -*.vcxproj.user -*.opensdf -*.ncrunchsolution* -*.sdf -*.vsp -*.psess diff --git a/third_party/python/aiohttp/vendor/http-parser/.mailmap b/third_party/python/aiohttp/vendor/http-parser/.mailmap deleted file mode 100644 index 278d14126372..000000000000 --- a/third_party/python/aiohttp/vendor/http-parser/.mailmap +++ /dev/null @@ -1,8 +0,0 @@ -# update AUTHORS with: -# git log --all --reverse --format='%aN <%aE>' | perl -ne 'BEGIN{print "# Authors ordered by first contribution.\n"} print unless $h{$_}; $h{$_} = 1' > AUTHORS -Ryan Dahl -Salman Haq -Simon Zimmermann -Thomas LE ROUX LE ROUX Thomas -Thomas LE ROUX Thomas LE ROUX -Fedor Indutny diff --git a/third_party/python/aiohttp/vendor/http-parser/.travis.yml b/third_party/python/aiohttp/vendor/http-parser/.travis.yml deleted file mode 100644 index 4b038e6e62d6..000000000000 --- a/third_party/python/aiohttp/vendor/http-parser/.travis.yml +++ /dev/null @@ -1,13 +0,0 @@ -language: c - -compiler: - - clang - - gcc - -script: - - "make" - -notifications: - email: false - irc: - - "irc.freenode.net#node-ci" diff --git a/third_party/python/aiohttp/vendor/http-parser/AUTHORS b/third_party/python/aiohttp/vendor/http-parser/AUTHORS deleted file mode 100644 index 5323b685caef..000000000000 --- a/third_party/python/aiohttp/vendor/http-parser/AUTHORS +++ /dev/null @@ -1,68 +0,0 @@ -# Authors ordered by first contribution. -Ryan Dahl -Jeremy Hinegardner -Sergey Shepelev -Joe Damato -tomika -Phoenix Sol -Cliff Frey -Ewen Cheslack-Postava -Santiago Gala -Tim Becker -Jeff Terrace -Ben Noordhuis -Nathan Rajlich -Mark Nottingham -Aman Gupta -Tim Becker -Sean Cunningham -Peter Griess -Salman Haq -Cliff Frey -Jon Kolb -Fouad Mardini -Paul Querna -Felix Geisendörfer -koichik -Andre Caron -Ivo Raisr -James McLaughlin -David Gwynne -Thomas LE ROUX -Randy Rizun -Andre Louis Caron -Simon Zimmermann -Erik Dubbelboer -Martell Malone -Bertrand Paquet -BogDan Vatra -Peter Faiman -Corey Richardson -Tóth Tamás -Cam Swords -Chris Dickinson -Uli Köhler -Charlie Somerville -Patrik Stutz -Fedor Indutny -runner -Alexis Campailla -David Wragg -Vinnie Falco -Alex Butum -Rex Feng -Alex Kocharin -Mark Koopman -Helge Heß -Alexis La Goutte -George Miroshnykov -Maciej Małecki -Marc O'Morain -Jeff Pinner -Timothy J Fontaine -Akagi201 -Romain Giraud -Jay Satiro -Arne Steen -Kjell Schubert -Olivier Mengué diff --git a/third_party/python/aiohttp/vendor/http-parser/LICENSE-MIT b/third_party/python/aiohttp/vendor/http-parser/LICENSE-MIT deleted file mode 100644 index 1ec0ab4e1746..000000000000 --- a/third_party/python/aiohttp/vendor/http-parser/LICENSE-MIT +++ /dev/null @@ -1,19 +0,0 @@ -Copyright Joyent, Inc. and other Node contributors. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to -deal in the Software without restriction, including without limitation the -rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -sell copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -IN THE SOFTWARE. diff --git a/third_party/python/aiohttp/vendor/http-parser/Makefile b/third_party/python/aiohttp/vendor/http-parser/Makefile deleted file mode 100644 index 5d212215041c..000000000000 --- a/third_party/python/aiohttp/vendor/http-parser/Makefile +++ /dev/null @@ -1,160 +0,0 @@ -# Copyright Joyent, Inc. and other Node contributors. All rights reserved. -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to -# deal in the Software without restriction, including without limitation the -# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -# sell copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -# IN THE SOFTWARE. - -PLATFORM ?= $(shell sh -c 'uname -s | tr "[A-Z]" "[a-z]"') -HELPER ?= -BINEXT ?= -SOLIBNAME = libhttp_parser -SOMAJOR = 2 -SOMINOR = 9 -SOREV = 4 -ifeq (darwin,$(PLATFORM)) -SOEXT ?= dylib -SONAME ?= $(SOLIBNAME).$(SOMAJOR).$(SOMINOR).$(SOEXT) -LIBNAME ?= $(SOLIBNAME).$(SOMAJOR).$(SOMINOR).$(SOREV).$(SOEXT) -else ifeq (wine,$(PLATFORM)) -CC = winegcc -BINEXT = .exe.so -HELPER = wine -else -SOEXT ?= so -SONAME ?= $(SOLIBNAME).$(SOEXT).$(SOMAJOR).$(SOMINOR) -LIBNAME ?= $(SOLIBNAME).$(SOEXT).$(SOMAJOR).$(SOMINOR).$(SOREV) -endif - -CC?=gcc -AR?=ar - -CPPFLAGS ?= -LDFLAGS ?= - -CPPFLAGS += -I. -CPPFLAGS_DEBUG = $(CPPFLAGS) -DHTTP_PARSER_STRICT=1 -CPPFLAGS_DEBUG += $(CPPFLAGS_DEBUG_EXTRA) -CPPFLAGS_FAST = $(CPPFLAGS) -DHTTP_PARSER_STRICT=0 -CPPFLAGS_FAST += $(CPPFLAGS_FAST_EXTRA) -CPPFLAGS_BENCH = $(CPPFLAGS_FAST) - -CFLAGS += -Wall -Wextra -Werror -CFLAGS_DEBUG = $(CFLAGS) -O0 -g $(CFLAGS_DEBUG_EXTRA) -CFLAGS_FAST = $(CFLAGS) -O3 $(CFLAGS_FAST_EXTRA) -CFLAGS_BENCH = $(CFLAGS_FAST) -Wno-unused-parameter -CFLAGS_LIB = $(CFLAGS_FAST) -fPIC - -LDFLAGS_LIB = $(LDFLAGS) -shared - -INSTALL ?= install -PREFIX ?= /usr/local -LIBDIR = $(PREFIX)/lib -INCLUDEDIR = $(PREFIX)/include - -ifeq (darwin,$(PLATFORM)) -LDFLAGS_LIB += -Wl,-install_name,$(LIBDIR)/$(SONAME) -else -# TODO(bnoordhuis) The native SunOS linker expects -h rather than -soname... -LDFLAGS_LIB += -Wl,-soname=$(SONAME) -endif - -test: test_g test_fast - $(HELPER) ./test_g$(BINEXT) - $(HELPER) ./test_fast$(BINEXT) - -test_g: http_parser_g.o test_g.o - $(CC) $(CFLAGS_DEBUG) $(LDFLAGS) http_parser_g.o test_g.o -o $@ - -test_g.o: test.c http_parser.h Makefile - $(CC) $(CPPFLAGS_DEBUG) $(CFLAGS_DEBUG) -c test.c -o $@ - -http_parser_g.o: http_parser.c http_parser.h Makefile - $(CC) $(CPPFLAGS_DEBUG) $(CFLAGS_DEBUG) -c http_parser.c -o $@ - -test_fast: http_parser.o test.o http_parser.h - $(CC) $(CFLAGS_FAST) $(LDFLAGS) http_parser.o test.o -o $@ - -test.o: test.c http_parser.h Makefile - $(CC) $(CPPFLAGS_FAST) $(CFLAGS_FAST) -c test.c -o $@ - -bench: http_parser.o bench.o - $(CC) $(CFLAGS_BENCH) $(LDFLAGS) http_parser.o bench.o -o $@ - -bench.o: bench.c http_parser.h Makefile - $(CC) $(CPPFLAGS_BENCH) $(CFLAGS_BENCH) -c bench.c -o $@ - -http_parser.o: http_parser.c http_parser.h Makefile - $(CC) $(CPPFLAGS_FAST) $(CFLAGS_FAST) -c http_parser.c - -test-run-timed: test_fast - while(true) do time $(HELPER) ./test_fast$(BINEXT) > /dev/null; done - -test-valgrind: test_g - valgrind ./test_g - -libhttp_parser.o: http_parser.c http_parser.h Makefile - $(CC) $(CPPFLAGS_FAST) $(CFLAGS_LIB) -c http_parser.c -o libhttp_parser.o - -library: libhttp_parser.o - $(CC) $(LDFLAGS_LIB) -o $(LIBNAME) $< - -package: http_parser.o - $(AR) rcs libhttp_parser.a http_parser.o - -url_parser: http_parser.o contrib/url_parser.c - $(CC) $(CPPFLAGS_FAST) $(CFLAGS_FAST) $^ -o $@ - -url_parser_g: http_parser_g.o contrib/url_parser.c - $(CC) $(CPPFLAGS_DEBUG) $(CFLAGS_DEBUG) $^ -o $@ - -parsertrace: http_parser.o contrib/parsertrace.c - $(CC) $(CPPFLAGS_FAST) $(CFLAGS_FAST) $^ -o parsertrace$(BINEXT) - -parsertrace_g: http_parser_g.o contrib/parsertrace.c - $(CC) $(CPPFLAGS_DEBUG) $(CFLAGS_DEBUG) $^ -o parsertrace_g$(BINEXT) - -tags: http_parser.c http_parser.h test.c - ctags $^ - -install: library - $(INSTALL) -D http_parser.h $(DESTDIR)$(INCLUDEDIR)/http_parser.h - $(INSTALL) -D $(LIBNAME) $(DESTDIR)$(LIBDIR)/$(LIBNAME) - ln -sf $(LIBNAME) $(DESTDIR)$(LIBDIR)/$(SONAME) - ln -sf $(LIBNAME) $(DESTDIR)$(LIBDIR)/$(SOLIBNAME).$(SOEXT) - -install-strip: library - $(INSTALL) -D http_parser.h $(DESTDIR)$(INCLUDEDIR)/http_parser.h - $(INSTALL) -D -s $(LIBNAME) $(DESTDIR)$(LIBDIR)/$(LIBNAME) - ln -sf $(LIBNAME) $(DESTDIR)$(LIBDIR)/$(SONAME) - ln -sf $(LIBNAME) $(DESTDIR)$(LIBDIR)/$(SOLIBNAME).$(SOEXT) - -uninstall: - rm $(DESTDIR)$(INCLUDEDIR)/http_parser.h - rm $(DESTDIR)$(LIBDIR)/$(SOLIBNAME).$(SOEXT) - rm $(DESTDIR)$(LIBDIR)/$(SONAME) - rm $(DESTDIR)$(LIBDIR)/$(LIBNAME) - -clean: - rm -f *.o *.a tags test test_fast test_g \ - http_parser.tar libhttp_parser.so.* \ - url_parser url_parser_g parsertrace parsertrace_g \ - *.exe *.exe.so - -contrib/url_parser.c: http_parser.h -contrib/parsertrace.c: http_parser.h - -.PHONY: clean package test-run test-run-timed test-valgrind install install-strip uninstall diff --git a/third_party/python/aiohttp/vendor/http-parser/README.md b/third_party/python/aiohttp/vendor/http-parser/README.md deleted file mode 100644 index b265d71715f7..000000000000 --- a/third_party/python/aiohttp/vendor/http-parser/README.md +++ /dev/null @@ -1,246 +0,0 @@ -HTTP Parser -=========== - -[![Build Status](https://api.travis-ci.org/nodejs/http-parser.svg?branch=master)](https://travis-ci.org/nodejs/http-parser) - -This is a parser for HTTP messages written in C. It parses both requests and -responses. The parser is designed to be used in performance HTTP -applications. It does not make any syscalls nor allocations, it does not -buffer data, it can be interrupted at anytime. Depending on your -architecture, it only requires about 40 bytes of data per message -stream (in a web server that is per connection). - -Features: - - * No dependencies - * Handles persistent streams (keep-alive). - * Decodes chunked encoding. - * Upgrade support - * Defends against buffer overflow attacks. - -The parser extracts the following information from HTTP messages: - - * Header fields and values - * Content-Length - * Request method - * Response status code - * Transfer-Encoding - * HTTP version - * Request URL - * Message body - - -Usage ------ - -One `http_parser` object is used per TCP connection. Initialize the struct -using `http_parser_init()` and set the callbacks. That might look something -like this for a request parser: -```c -http_parser_settings settings; -settings.on_url = my_url_callback; -settings.on_header_field = my_header_field_callback; -/* ... */ - -http_parser *parser = malloc(sizeof(http_parser)); -http_parser_init(parser, HTTP_REQUEST); -parser->data = my_socket; -``` - -When data is received on the socket execute the parser and check for errors. - -```c -size_t len = 80*1024, nparsed; -char buf[len]; -ssize_t recved; - -recved = recv(fd, buf, len, 0); - -if (recved < 0) { - /* Handle error. */ -} - -/* Start up / continue the parser. - * Note we pass recved==0 to signal that EOF has been received. - */ -nparsed = http_parser_execute(parser, &settings, buf, recved); - -if (parser->upgrade) { - /* handle new protocol */ -} else if (nparsed != recved) { - /* Handle error. Usually just close the connection. */ -} -``` - -`http_parser` needs to know where the end of the stream is. For example, sometimes -servers send responses without Content-Length and expect the client to -consume input (for the body) until EOF. To tell `http_parser` about EOF, give -`0` as the fourth parameter to `http_parser_execute()`. Callbacks and errors -can still be encountered during an EOF, so one must still be prepared -to receive them. - -Scalar valued message information such as `status_code`, `method`, and the -HTTP version are stored in the parser structure. This data is only -temporally stored in `http_parser` and gets reset on each new message. If -this information is needed later, copy it out of the structure during the -`headers_complete` callback. - -The parser decodes the transfer-encoding for both requests and responses -transparently. That is, a chunked encoding is decoded before being sent to -the on_body callback. - - -The Special Problem of Upgrade ------------------------------- - -`http_parser` supports upgrading the connection to a different protocol. An -increasingly common example of this is the WebSocket protocol which sends -a request like - - GET /demo HTTP/1.1 - Upgrade: WebSocket - Connection: Upgrade - Host: example.com - Origin: http://example.com - WebSocket-Protocol: sample - -followed by non-HTTP data. - -(See [RFC6455](https://tools.ietf.org/html/rfc6455) for more information the -WebSocket protocol.) - -To support this, the parser will treat this as a normal HTTP message without a -body, issuing both on_headers_complete and on_message_complete callbacks. However -http_parser_execute() will stop parsing at the end of the headers and return. - -The user is expected to check if `parser->upgrade` has been set to 1 after -`http_parser_execute()` returns. Non-HTTP data begins at the buffer supplied -offset by the return value of `http_parser_execute()`. - - -Callbacks ---------- - -During the `http_parser_execute()` call, the callbacks set in -`http_parser_settings` will be executed. The parser maintains state and -never looks behind, so buffering the data is not necessary. If you need to -save certain data for later usage, you can do that from the callbacks. - -There are two types of callbacks: - -* notification `typedef int (*http_cb) (http_parser*);` - Callbacks: on_message_begin, on_headers_complete, on_message_complete. -* data `typedef int (*http_data_cb) (http_parser*, const char *at, size_t length);` - Callbacks: (requests only) on_url, - (common) on_header_field, on_header_value, on_body; - -Callbacks must return 0 on success. Returning a non-zero value indicates -error to the parser, making it exit immediately. - -For cases where it is necessary to pass local information to/from a callback, -the `http_parser` object's `data` field can be used. -An example of such a case is when using threads to handle a socket connection, -parse a request, and then give a response over that socket. By instantiation -of a thread-local struct containing relevant data (e.g. accepted socket, -allocated memory for callbacks to write into, etc), a parser's callbacks are -able to communicate data between the scope of the thread and the scope of the -callback in a threadsafe manner. This allows `http_parser` to be used in -multi-threaded contexts. - -Example: -```c - typedef struct { - socket_t sock; - void* buffer; - int buf_len; - } custom_data_t; - - -int my_url_callback(http_parser* parser, const char *at, size_t length) { - /* access to thread local custom_data_t struct. - Use this access save parsed data for later use into thread local - buffer, or communicate over socket - */ - parser->data; - ... - return 0; -} - -... - -void http_parser_thread(socket_t sock) { - int nparsed = 0; - /* allocate memory for user data */ - custom_data_t *my_data = malloc(sizeof(custom_data_t)); - - /* some information for use by callbacks. - * achieves thread -> callback information flow */ - my_data->sock = sock; - - /* instantiate a thread-local parser */ - http_parser *parser = malloc(sizeof(http_parser)); - http_parser_init(parser, HTTP_REQUEST); /* initialise parser */ - /* this custom data reference is accessible through the reference to the - parser supplied to callback functions */ - parser->data = my_data; - - http_parser_settings settings; /* set up callbacks */ - settings.on_url = my_url_callback; - - /* execute parser */ - nparsed = http_parser_execute(parser, &settings, buf, recved); - - ... - /* parsed information copied from callback. - can now perform action on data copied into thread-local memory from callbacks. - achieves callback -> thread information flow */ - my_data->buffer; - ... -} - -``` - -In case you parse HTTP message in chunks (i.e. `read()` request line -from socket, parse, read half headers, parse, etc) your data callbacks -may be called more than once. `http_parser` guarantees that data pointer is only -valid for the lifetime of callback. You can also `read()` into a heap allocated -buffer to avoid copying memory around if this fits your application. - -Reading headers may be a tricky task if you read/parse headers partially. -Basically, you need to remember whether last header callback was field or value -and apply the following logic: - - (on_header_field and on_header_value shortened to on_h_*) - ------------------------ ------------ -------------------------------------------- - | State (prev. callback) | Callback | Description/action | - ------------------------ ------------ -------------------------------------------- - | nothing (first call) | on_h_field | Allocate new buffer and copy callback data | - | | | into it | - ------------------------ ------------ -------------------------------------------- - | value | on_h_field | New header started. | - | | | Copy current name,value buffers to headers | - | | | list and allocate new buffer for new name | - ------------------------ ------------ -------------------------------------------- - | field | on_h_field | Previous name continues. Reallocate name | - | | | buffer and append callback data to it | - ------------------------ ------------ -------------------------------------------- - | field | on_h_value | Value for current header started. Allocate | - | | | new buffer and copy callback data to it | - ------------------------ ------------ -------------------------------------------- - | value | on_h_value | Value continues. Reallocate value buffer | - | | | and append callback data to it | - ------------------------ ------------ -------------------------------------------- - - -Parsing URLs ------------- - -A simplistic zero-copy URL parser is provided as `http_parser_parse_url()`. -Users of this library may wish to use it to parse URLs constructed from -consecutive `on_url` callbacks. - -See examples of reading in headers: - -* [partial example](http://gist.github.com/155877) in C -* [from http-parser tests](http://github.com/joyent/http-parser/blob/37a0ff8/test.c#L403) in C -* [from Node library](http://github.com/joyent/node/blob/842eaf4/src/http.js#L284) in Javascript diff --git a/third_party/python/aiohttp/vendor/http-parser/bench.c b/third_party/python/aiohttp/vendor/http-parser/bench.c deleted file mode 100644 index 678f5556c598..000000000000 --- a/third_party/python/aiohttp/vendor/http-parser/bench.c +++ /dev/null @@ -1,128 +0,0 @@ -/* Copyright Fedor Indutny. All rights reserved. - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to - * deal in the Software without restriction, including without limitation the - * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or - * sell copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in - * all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS - * IN THE SOFTWARE. - */ -#include "http_parser.h" -#include -#include -#include -#include -#include - -/* 8 gb */ -static const int64_t kBytes = 8LL << 30; - -static const char data[] = - "POST /joyent/http-parser HTTP/1.1\r\n" - "Host: github.com\r\n" - "DNT: 1\r\n" - "Accept-Encoding: gzip, deflate, sdch\r\n" - "Accept-Language: ru-RU,ru;q=0.8,en-US;q=0.6,en;q=0.4\r\n" - "User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) " - "AppleWebKit/537.36 (KHTML, like Gecko) " - "Chrome/39.0.2171.65 Safari/537.36\r\n" - "Accept: text/html,application/xhtml+xml,application/xml;q=0.9," - "image/webp,*/*;q=0.8\r\n" - "Referer: https://github.com/joyent/http-parser\r\n" - "Connection: keep-alive\r\n" - "Transfer-Encoding: chunked\r\n" - "Cache-Control: max-age=0\r\n\r\nb\r\nhello world\r\n0\r\n"; -static const size_t data_len = sizeof(data) - 1; - -static int on_info(http_parser* p) { - return 0; -} - - -static int on_data(http_parser* p, const char *at, size_t length) { - return 0; -} - -static http_parser_settings settings = { - .on_message_begin = on_info, - .on_headers_complete = on_info, - .on_message_complete = on_info, - .on_header_field = on_data, - .on_header_value = on_data, - .on_url = on_data, - .on_status = on_data, - .on_body = on_data -}; - -int bench(int iter_count, int silent) { - struct http_parser parser; - int i; - int err; - struct timeval start; - struct timeval end; - - if (!silent) { - err = gettimeofday(&start, NULL); - assert(err == 0); - } - - fprintf(stderr, "req_len=%d\n", (int) data_len); - for (i = 0; i < iter_count; i++) { - size_t parsed; - http_parser_init(&parser, HTTP_REQUEST); - - parsed = http_parser_execute(&parser, &settings, data, data_len); - assert(parsed == data_len); - } - - if (!silent) { - double elapsed; - double bw; - double total; - - err = gettimeofday(&end, NULL); - assert(err == 0); - - fprintf(stdout, "Benchmark result:\n"); - - elapsed = (double) (end.tv_sec - start.tv_sec) + - (end.tv_usec - start.tv_usec) * 1e-6f; - - total = (double) iter_count * data_len; - bw = (double) total / elapsed; - - fprintf(stdout, "%.2f mb | %.2f mb/s | %.2f req/sec | %.2f s\n", - (double) total / (1024 * 1024), - bw / (1024 * 1024), - (double) iter_count / elapsed, - elapsed); - - fflush(stdout); - } - - return 0; -} - -int main(int argc, char** argv) { - int64_t iterations; - - iterations = kBytes / (int64_t) data_len; - if (argc == 2 && strcmp(argv[1], "infinite") == 0) { - for (;;) - bench(iterations, 1); - return 0; - } else { - return bench(iterations, 0); - } -} diff --git a/third_party/python/aiohttp/vendor/http-parser/contrib/parsertrace.c b/third_party/python/aiohttp/vendor/http-parser/contrib/parsertrace.c deleted file mode 100644 index 3daa7f46a193..000000000000 --- a/third_party/python/aiohttp/vendor/http-parser/contrib/parsertrace.c +++ /dev/null @@ -1,157 +0,0 @@ -/* Copyright Joyent, Inc. and other Node contributors. - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to - * deal in the Software without restriction, including without limitation the - * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or - * sell copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in - * all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS - * IN THE SOFTWARE. - */ - -/* Dump what the parser finds to stdout as it happen */ - -#include "http_parser.h" -#include -#include -#include - -int on_message_begin(http_parser* _) { - (void)_; - printf("\n***MESSAGE BEGIN***\n\n"); - return 0; -} - -int on_headers_complete(http_parser* _) { - (void)_; - printf("\n***HEADERS COMPLETE***\n\n"); - return 0; -} - -int on_message_complete(http_parser* _) { - (void)_; - printf("\n***MESSAGE COMPLETE***\n\n"); - return 0; -} - -int on_url(http_parser* _, const char* at, size_t length) { - (void)_; - printf("Url: %.*s\n", (int)length, at); - return 0; -} - -int on_header_field(http_parser* _, const char* at, size_t length) { - (void)_; - printf("Header field: %.*s\n", (int)length, at); - return 0; -} - -int on_header_value(http_parser* _, const char* at, size_t length) { - (void)_; - printf("Header value: %.*s\n", (int)length, at); - return 0; -} - -int on_body(http_parser* _, const char* at, size_t length) { - (void)_; - printf("Body: %.*s\n", (int)length, at); - return 0; -} - -void usage(const char* name) { - fprintf(stderr, - "Usage: %s $type $filename\n" - " type: -x, where x is one of {r,b,q}\n" - " parses file as a Response, reQuest, or Both\n", - name); - exit(EXIT_FAILURE); -} - -int main(int argc, char* argv[]) { - enum http_parser_type file_type; - - if (argc != 3) { - usage(argv[0]); - } - - char* type = argv[1]; - if (type[0] != '-') { - usage(argv[0]); - } - - switch (type[1]) { - /* in the case of "-", type[1] will be NUL */ - case 'r': - file_type = HTTP_RESPONSE; - break; - case 'q': - file_type = HTTP_REQUEST; - break; - case 'b': - file_type = HTTP_BOTH; - break; - default: - usage(argv[0]); - } - - char* filename = argv[2]; - FILE* file = fopen(filename, "r"); - if (file == NULL) { - perror("fopen"); - goto fail; - } - - fseek(file, 0, SEEK_END); - long file_length = ftell(file); - if (file_length == -1) { - perror("ftell"); - goto fail; - } - fseek(file, 0, SEEK_SET); - - char* data = malloc(file_length); - if (fread(data, 1, file_length, file) != (size_t)file_length) { - fprintf(stderr, "couldn't read entire file\n"); - free(data); - goto fail; - } - - http_parser_settings settings; - memset(&settings, 0, sizeof(settings)); - settings.on_message_begin = on_message_begin; - settings.on_url = on_url; - settings.on_header_field = on_header_field; - settings.on_header_value = on_header_value; - settings.on_headers_complete = on_headers_complete; - settings.on_body = on_body; - settings.on_message_complete = on_message_complete; - - http_parser parser; - http_parser_init(&parser, file_type); - size_t nparsed = http_parser_execute(&parser, &settings, data, file_length); - free(data); - - if (nparsed != (size_t)file_length) { - fprintf(stderr, - "Error: %s (%s)\n", - http_errno_description(HTTP_PARSER_ERRNO(&parser)), - http_errno_name(HTTP_PARSER_ERRNO(&parser))); - goto fail; - } - - return EXIT_SUCCESS; - -fail: - fclose(file); - return EXIT_FAILURE; -} diff --git a/third_party/python/aiohttp/vendor/http-parser/contrib/url_parser.c b/third_party/python/aiohttp/vendor/http-parser/contrib/url_parser.c deleted file mode 100644 index f235bed9e483..000000000000 --- a/third_party/python/aiohttp/vendor/http-parser/contrib/url_parser.c +++ /dev/null @@ -1,47 +0,0 @@ -#include "http_parser.h" -#include -#include - -void -dump_url (const char *url, const struct http_parser_url *u) -{ - unsigned int i; - - printf("\tfield_set: 0x%x, port: %u\n", u->field_set, u->port); - for (i = 0; i < UF_MAX; i++) { - if ((u->field_set & (1 << i)) == 0) { - printf("\tfield_data[%u]: unset\n", i); - continue; - } - - printf("\tfield_data[%u]: off: %u, len: %u, part: %.*s\n", - i, - u->field_data[i].off, - u->field_data[i].len, - u->field_data[i].len, - url + u->field_data[i].off); - } -} - -int main(int argc, char ** argv) { - struct http_parser_url u; - int len, connect, result; - - if (argc != 3) { - printf("Syntax : %s connect|get url\n", argv[0]); - return 1; - } - len = strlen(argv[2]); - connect = strcmp("connect", argv[1]) == 0 ? 1 : 0; - printf("Parsing %s, connect %d\n", argv[2], connect); - - http_parser_url_init(&u); - result = http_parser_parse_url(argv[2], len, connect, &u); - if (result != 0) { - printf("Parse error : %d\n", result); - return result; - } - printf("Parse ok, result : \n"); - dump_url(argv[2], &u); - return 0; -} diff --git a/third_party/python/aiohttp/vendor/http-parser/http_parser.c b/third_party/python/aiohttp/vendor/http-parser/http_parser.c deleted file mode 100644 index 95ff42f783d9..000000000000 --- a/third_party/python/aiohttp/vendor/http-parser/http_parser.c +++ /dev/null @@ -1,2568 +0,0 @@ -/* Copyright Joyent, Inc. and other Node contributors. - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to - * deal in the Software without restriction, including without limitation the - * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or - * sell copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in - * all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS - * IN THE SOFTWARE. - */ -#include "http_parser.h" -#include -#include -#include -#include -#include - -static uint32_t max_header_size = HTTP_MAX_HEADER_SIZE; - -#ifndef ULLONG_MAX -# define ULLONG_MAX ((uint64_t) -1) /* 2^64-1 */ -#endif - -#ifndef MIN -# define MIN(a,b) ((a) < (b) ? (a) : (b)) -#endif - -#ifndef ARRAY_SIZE -# define ARRAY_SIZE(a) (sizeof(a) / sizeof((a)[0])) -#endif - -#ifndef BIT_AT -# define BIT_AT(a, i) \ - (!!((unsigned int) (a)[(unsigned int) (i) >> 3] & \ - (1 << ((unsigned int) (i) & 7)))) -#endif - -#ifndef ELEM_AT -# define ELEM_AT(a, i, v) ((unsigned int) (i) < ARRAY_SIZE(a) ? (a)[(i)] : (v)) -#endif - -#define SET_ERRNO(e) \ -do { \ - parser->nread = nread; \ - parser->http_errno = (e); \ -} while(0) - -#define CURRENT_STATE() p_state -#define UPDATE_STATE(V) p_state = (enum state) (V); -#define RETURN(V) \ -do { \ - parser->nread = nread; \ - parser->state = CURRENT_STATE(); \ - return (V); \ -} while (0); -#define REEXECUTE() \ - goto reexecute; \ - - -#ifdef __GNUC__ -# define LIKELY(X) __builtin_expect(!!(X), 1) -# define UNLIKELY(X) __builtin_expect(!!(X), 0) -#else -# define LIKELY(X) (X) -# define UNLIKELY(X) (X) -#endif - - -/* Run the notify callback FOR, returning ER if it fails */ -#define CALLBACK_NOTIFY_(FOR, ER) \ -do { \ - assert(HTTP_PARSER_ERRNO(parser) == HPE_OK); \ - \ - if (LIKELY(settings->on_##FOR)) { \ - parser->state = CURRENT_STATE(); \ - if (UNLIKELY(0 != settings->on_##FOR(parser))) { \ - SET_ERRNO(HPE_CB_##FOR); \ - } \ - UPDATE_STATE(parser->state); \ - \ - /* We either errored above or got paused; get out */ \ - if (UNLIKELY(HTTP_PARSER_ERRNO(parser) != HPE_OK)) { \ - return (ER); \ - } \ - } \ -} while (0) - -/* Run the notify callback FOR and consume the current byte */ -#define CALLBACK_NOTIFY(FOR) CALLBACK_NOTIFY_(FOR, p - data + 1) - -/* Run the notify callback FOR and don't consume the current byte */ -#define CALLBACK_NOTIFY_NOADVANCE(FOR) CALLBACK_NOTIFY_(FOR, p - data) - -/* Run data callback FOR with LEN bytes, returning ER if it fails */ -#define CALLBACK_DATA_(FOR, LEN, ER) \ -do { \ - assert(HTTP_PARSER_ERRNO(parser) == HPE_OK); \ - \ - if (FOR##_mark) { \ - if (LIKELY(settings->on_##FOR)) { \ - parser->state = CURRENT_STATE(); \ - if (UNLIKELY(0 != \ - settings->on_##FOR(parser, FOR##_mark, (LEN)))) { \ - SET_ERRNO(HPE_CB_##FOR); \ - } \ - UPDATE_STATE(parser->state); \ - \ - /* We either errored above or got paused; get out */ \ - if (UNLIKELY(HTTP_PARSER_ERRNO(parser) != HPE_OK)) { \ - return (ER); \ - } \ - } \ - FOR##_mark = NULL; \ - } \ -} while (0) - -/* Run the data callback FOR and consume the current byte */ -#define CALLBACK_DATA(FOR) \ - CALLBACK_DATA_(FOR, p - FOR##_mark, p - data + 1) - -/* Run the data callback FOR and don't consume the current byte */ -#define CALLBACK_DATA_NOADVANCE(FOR) \ - CALLBACK_DATA_(FOR, p - FOR##_mark, p - data) - -/* Set the mark FOR; non-destructive if mark is already set */ -#define MARK(FOR) \ -do { \ - if (!FOR##_mark) { \ - FOR##_mark = p; \ - } \ -} while (0) - -/* Don't allow the total size of the HTTP headers (including the status - * line) to exceed max_header_size. This check is here to protect - * embedders against denial-of-service attacks where the attacker feeds - * us a never-ending header that the embedder keeps buffering. - * - * This check is arguably the responsibility of embedders but we're doing - * it on the embedder's behalf because most won't bother and this way we - * make the web a little safer. max_header_size is still far bigger - * than any reasonable request or response so this should never affect - * day-to-day operation. - */ -#define COUNT_HEADER_SIZE(V) \ -do { \ - nread += (uint32_t)(V); \ - if (UNLIKELY(nread > max_header_size)) { \ - SET_ERRNO(HPE_HEADER_OVERFLOW); \ - goto error; \ - } \ -} while (0) - - -#define PROXY_CONNECTION "proxy-connection" -#define CONNECTION "connection" -#define CONTENT_LENGTH "content-length" -#define TRANSFER_ENCODING "transfer-encoding" -#define UPGRADE "upgrade" -#define CHUNKED "chunked" -#define KEEP_ALIVE "keep-alive" -#define CLOSE "close" - - -static const char *method_strings[] = - { -#define XX(num, name, string) #string, - HTTP_METHOD_MAP(XX) -#undef XX - }; - - -/* Tokens as defined by rfc 2616. Also lowercases them. - * token = 1* - * separators = "(" | ")" | "<" | ">" | "@" - * | "," | ";" | ":" | "\" | <"> - * | "/" | "[" | "]" | "?" | "=" - * | "{" | "}" | SP | HT - */ -static const char tokens[256] = { -/* 0 nul 1 soh 2 stx 3 etx 4 eot 5 enq 6 ack 7 bel */ - 0, 0, 0, 0, 0, 0, 0, 0, -/* 8 bs 9 ht 10 nl 11 vt 12 np 13 cr 14 so 15 si */ - 0, 0, 0, 0, 0, 0, 0, 0, -/* 16 dle 17 dc1 18 dc2 19 dc3 20 dc4 21 nak 22 syn 23 etb */ - 0, 0, 0, 0, 0, 0, 0, 0, -/* 24 can 25 em 26 sub 27 esc 28 fs 29 gs 30 rs 31 us */ - 0, 0, 0, 0, 0, 0, 0, 0, -/* 32 sp 33 ! 34 " 35 # 36 $ 37 % 38 & 39 ' */ - ' ', '!', 0, '#', '$', '%', '&', '\'', -/* 40 ( 41 ) 42 * 43 + 44 , 45 - 46 . 47 / */ - 0, 0, '*', '+', 0, '-', '.', 0, -/* 48 0 49 1 50 2 51 3 52 4 53 5 54 6 55 7 */ - '0', '1', '2', '3', '4', '5', '6', '7', -/* 56 8 57 9 58 : 59 ; 60 < 61 = 62 > 63 ? */ - '8', '9', 0, 0, 0, 0, 0, 0, -/* 64 @ 65 A 66 B 67 C 68 D 69 E 70 F 71 G */ - 0, 'a', 'b', 'c', 'd', 'e', 'f', 'g', -/* 72 H 73 I 74 J 75 K 76 L 77 M 78 N 79 O */ - 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', -/* 80 P 81 Q 82 R 83 S 84 T 85 U 86 V 87 W */ - 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', -/* 88 X 89 Y 90 Z 91 [ 92 \ 93 ] 94 ^ 95 _ */ - 'x', 'y', 'z', 0, 0, 0, '^', '_', -/* 96 ` 97 a 98 b 99 c 100 d 101 e 102 f 103 g */ - '`', 'a', 'b', 'c', 'd', 'e', 'f', 'g', -/* 104 h 105 i 106 j 107 k 108 l 109 m 110 n 111 o */ - 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', -/* 112 p 113 q 114 r 115 s 116 t 117 u 118 v 119 w */ - 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', -/* 120 x 121 y 122 z 123 { 124 | 125 } 126 ~ 127 del */ - 'x', 'y', 'z', 0, '|', 0, '~', 0 }; - - -static const int8_t unhex[256] = - {-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1 - ,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1 - ,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1 - , 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,-1,-1,-1,-1,-1,-1 - ,-1,10,11,12,13,14,15,-1,-1,-1,-1,-1,-1,-1,-1,-1 - ,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1 - ,-1,10,11,12,13,14,15,-1,-1,-1,-1,-1,-1,-1,-1,-1 - ,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1 - }; - - -#if HTTP_PARSER_STRICT -# define T(v) 0 -#else -# define T(v) v -#endif - - -static const uint8_t normal_url_char[32] = { -/* 0 nul 1 soh 2 stx 3 etx 4 eot 5 enq 6 ack 7 bel */ - 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0, -/* 8 bs 9 ht 10 nl 11 vt 12 np 13 cr 14 so 15 si */ - 0 | T(2) | 0 | 0 | T(16) | 0 | 0 | 0, -/* 16 dle 17 dc1 18 dc2 19 dc3 20 dc4 21 nak 22 syn 23 etb */ - 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0, -/* 24 can 25 em 26 sub 27 esc 28 fs 29 gs 30 rs 31 us */ - 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0, -/* 32 sp 33 ! 34 " 35 # 36 $ 37 % 38 & 39 ' */ - 0 | 2 | 4 | 0 | 16 | 32 | 64 | 128, -/* 40 ( 41 ) 42 * 43 + 44 , 45 - 46 . 47 / */ - 1 | 2 | 4 | 8 | 16 | 32 | 64 | 128, -/* 48 0 49 1 50 2 51 3 52 4 53 5 54 6 55 7 */ - 1 | 2 | 4 | 8 | 16 | 32 | 64 | 128, -/* 56 8 57 9 58 : 59 ; 60 < 61 = 62 > 63 ? */ - 1 | 2 | 4 | 8 | 16 | 32 | 64 | 0, -/* 64 @ 65 A 66 B 67 C 68 D 69 E 70 F 71 G */ - 1 | 2 | 4 | 8 | 16 | 32 | 64 | 128, -/* 72 H 73 I 74 J 75 K 76 L 77 M 78 N 79 O */ - 1 | 2 | 4 | 8 | 16 | 32 | 64 | 128, -/* 80 P 81 Q 82 R 83 S 84 T 85 U 86 V 87 W */ - 1 | 2 | 4 | 8 | 16 | 32 | 64 | 128, -/* 88 X 89 Y 90 Z 91 [ 92 \ 93 ] 94 ^ 95 _ */ - 1 | 2 | 4 | 8 | 16 | 32 | 64 | 128, -/* 96 ` 97 a 98 b 99 c 100 d 101 e 102 f 103 g */ - 1 | 2 | 4 | 8 | 16 | 32 | 64 | 128, -/* 104 h 105 i 106 j 107 k 108 l 109 m 110 n 111 o */ - 1 | 2 | 4 | 8 | 16 | 32 | 64 | 128, -/* 112 p 113 q 114 r 115 s 116 t 117 u 118 v 119 w */ - 1 | 2 | 4 | 8 | 16 | 32 | 64 | 128, -/* 120 x 121 y 122 z 123 { 124 | 125 } 126 ~ 127 del */ - 1 | 2 | 4 | 8 | 16 | 32 | 64 | 0, }; - -#undef T - -enum state - { s_dead = 1 /* important that this is > 0 */ - - , s_start_req_or_res - , s_res_or_resp_H - , s_start_res - , s_res_H - , s_res_HT - , s_res_HTT - , s_res_HTTP - , s_res_http_major - , s_res_http_dot - , s_res_http_minor - , s_res_http_end - , s_res_first_status_code - , s_res_status_code - , s_res_status_start - , s_res_status - , s_res_line_almost_done - - , s_start_req - - , s_req_method - , s_req_spaces_before_url - , s_req_schema - , s_req_schema_slash - , s_req_schema_slash_slash - , s_req_server_start - , s_req_server - , s_req_server_with_at - , s_req_path - , s_req_query_string_start - , s_req_query_string - , s_req_fragment_start - , s_req_fragment - , s_req_http_start - , s_req_http_H - , s_req_http_HT - , s_req_http_HTT - , s_req_http_HTTP - , s_req_http_I - , s_req_http_IC - , s_req_http_major - , s_req_http_dot - , s_req_http_minor - , s_req_http_end - , s_req_line_almost_done - - , s_header_field_start - , s_header_field - , s_header_value_discard_ws - , s_header_value_discard_ws_almost_done - , s_header_value_discard_lws - , s_header_value_start - , s_header_value - , s_header_value_lws - - , s_header_almost_done - - , s_chunk_size_start - , s_chunk_size - , s_chunk_parameters - , s_chunk_size_almost_done - - , s_headers_almost_done - , s_headers_done - - /* Important: 's_headers_done' must be the last 'header' state. All - * states beyond this must be 'body' states. It is used for overflow - * checking. See the PARSING_HEADER() macro. - */ - - , s_chunk_data - , s_chunk_data_almost_done - , s_chunk_data_done - - , s_body_identity - , s_body_identity_eof - - , s_message_done - }; - - -#define PARSING_HEADER(state) (state <= s_headers_done) - - -enum header_states - { h_general = 0 - , h_C - , h_CO - , h_CON - - , h_matching_connection - , h_matching_proxy_connection - , h_matching_content_length - , h_matching_transfer_encoding - , h_matching_upgrade - - , h_connection - , h_content_length - , h_content_length_num - , h_content_length_ws - , h_transfer_encoding - , h_upgrade - - , h_matching_transfer_encoding_token_start - , h_matching_transfer_encoding_chunked - , h_matching_transfer_encoding_token - - , h_matching_connection_token_start - , h_matching_connection_keep_alive - , h_matching_connection_close - , h_matching_connection_upgrade - , h_matching_connection_token - - , h_transfer_encoding_chunked - , h_connection_keep_alive - , h_connection_close - , h_connection_upgrade - }; - -enum http_host_state - { - s_http_host_dead = 1 - , s_http_userinfo_start - , s_http_userinfo - , s_http_host_start - , s_http_host_v6_start - , s_http_host - , s_http_host_v6 - , s_http_host_v6_end - , s_http_host_v6_zone_start - , s_http_host_v6_zone - , s_http_host_port_start - , s_http_host_port -}; - -/* Macros for character classes; depends on strict-mode */ -#define CR '\r' -#define LF '\n' -#define LOWER(c) (unsigned char)(c | 0x20) -#define IS_ALPHA(c) (LOWER(c) >= 'a' && LOWER(c) <= 'z') -#define IS_NUM(c) ((c) >= '0' && (c) <= '9') -#define IS_ALPHANUM(c) (IS_ALPHA(c) || IS_NUM(c)) -#define IS_HEX(c) (IS_NUM(c) || (LOWER(c) >= 'a' && LOWER(c) <= 'f')) -#define IS_MARK(c) ((c) == '-' || (c) == '_' || (c) == '.' || \ - (c) == '!' || (c) == '~' || (c) == '*' || (c) == '\'' || (c) == '(' || \ - (c) == ')') -#define IS_USERINFO_CHAR(c) (IS_ALPHANUM(c) || IS_MARK(c) || (c) == '%' || \ - (c) == ';' || (c) == ':' || (c) == '&' || (c) == '=' || (c) == '+' || \ - (c) == '$' || (c) == ',') - -#define STRICT_TOKEN(c) ((c == ' ') ? 0 : tokens[(unsigned char)c]) - -#if HTTP_PARSER_STRICT -#define TOKEN(c) STRICT_TOKEN(c) -#define IS_URL_CHAR(c) (BIT_AT(normal_url_char, (unsigned char)c)) -#define IS_HOST_CHAR(c) (IS_ALPHANUM(c) || (c) == '.' || (c) == '-') -#else -#define TOKEN(c) tokens[(unsigned char)c] -#define IS_URL_CHAR(c) \ - (BIT_AT(normal_url_char, (unsigned char)c) || ((c) & 0x80)) -#define IS_HOST_CHAR(c) \ - (IS_ALPHANUM(c) || (c) == '.' || (c) == '-' || (c) == '_') -#endif - -/** - * Verify that a char is a valid visible (printable) US-ASCII - * character or %x80-FF - **/ -#define IS_HEADER_CHAR(ch) \ - (ch == CR || ch == LF || ch == 9 || ((unsigned char)ch > 31 && ch != 127)) - -#define start_state (parser->type == HTTP_REQUEST ? s_start_req : s_start_res) - - -#if HTTP_PARSER_STRICT -# define STRICT_CHECK(cond) \ -do { \ - if (cond) { \ - SET_ERRNO(HPE_STRICT); \ - goto error; \ - } \ -} while (0) -# define NEW_MESSAGE() (http_should_keep_alive(parser) ? start_state : s_dead) -#else -# define STRICT_CHECK(cond) -# define NEW_MESSAGE() start_state -#endif - - -/* Map errno values to strings for human-readable output */ -#define HTTP_STRERROR_GEN(n, s) { "HPE_" #n, s }, -static struct { - const char *name; - const char *description; -} http_strerror_tab[] = { - HTTP_ERRNO_MAP(HTTP_STRERROR_GEN) -}; -#undef HTTP_STRERROR_GEN - -int http_message_needs_eof(const http_parser *parser); - -/* Our URL parser. - * - * This is designed to be shared by http_parser_execute() for URL validation, - * hence it has a state transition + byte-for-byte interface. In addition, it - * is meant to be embedded in http_parser_parse_url(), which does the dirty - * work of turning state transitions URL components for its API. - * - * This function should only be invoked with non-space characters. It is - * assumed that the caller cares about (and can detect) the transition between - * URL and non-URL states by looking for these. - */ -static enum state -parse_url_char(enum state s, const char ch) -{ - if (ch == ' ' || ch == '\r' || ch == '\n') { - return s_dead; - } - -#if HTTP_PARSER_STRICT - if (ch == '\t' || ch == '\f') { - return s_dead; - } -#endif - - switch (s) { - case s_req_spaces_before_url: - /* Proxied requests are followed by scheme of an absolute URI (alpha). - * All methods except CONNECT are followed by '/' or '*'. - */ - - if (ch == '/' || ch == '*') { - return s_req_path; - } - - if (IS_ALPHA(ch)) { - return s_req_schema; - } - - break; - - case s_req_schema: - if (IS_ALPHA(ch)) { - return s; - } - - if (ch == ':') { - return s_req_schema_slash; - } - - break; - - case s_req_schema_slash: - if (ch == '/') { - return s_req_schema_slash_slash; - } - - break; - - case s_req_schema_slash_slash: - if (ch == '/') { - return s_req_server_start; - } - - break; - - case s_req_server_with_at: - if (ch == '@') { - return s_dead; - } - - /* fall through */ - case s_req_server_start: - case s_req_server: - if (ch == '/') { - return s_req_path; - } - - if (ch == '?') { - return s_req_query_string_start; - } - - if (ch == '@') { - return s_req_server_with_at; - } - - if (IS_USERINFO_CHAR(ch) || ch == '[' || ch == ']') { - return s_req_server; - } - - break; - - case s_req_path: - if (IS_URL_CHAR(ch)) { - return s; - } - - switch (ch) { - case '?': - return s_req_query_string_start; - - case '#': - return s_req_fragment_start; - } - - break; - - case s_req_query_string_start: - case s_req_query_string: - if (IS_URL_CHAR(ch)) { - return s_req_query_string; - } - - switch (ch) { - case '?': - /* allow extra '?' in query string */ - return s_req_query_string; - - case '#': - return s_req_fragment_start; - } - - break; - - case s_req_fragment_start: - if (IS_URL_CHAR(ch)) { - return s_req_fragment; - } - - switch (ch) { - case '?': - return s_req_fragment; - - case '#': - return s; - } - - break; - - case s_req_fragment: - if (IS_URL_CHAR(ch)) { - return s; - } - - switch (ch) { - case '?': - case '#': - return s; - } - - break; - - default: - break; - } - - /* We should never fall out of the switch above unless there's an error */ - return s_dead; -} - -size_t http_parser_execute (http_parser *parser, - const http_parser_settings *settings, - const char *data, - size_t len) -{ - char c, ch; - int8_t unhex_val; - const char *p = data; - const char *header_field_mark = 0; - const char *header_value_mark = 0; - const char *url_mark = 0; - const char *body_mark = 0; - const char *status_mark = 0; - enum state p_state = (enum state) parser->state; - const unsigned int lenient = parser->lenient_http_headers; - uint32_t nread = parser->nread; - - /* We're in an error state. Don't bother doing anything. */ - if (HTTP_PARSER_ERRNO(parser) != HPE_OK) { - return 0; - } - - if (len == 0) { - switch (CURRENT_STATE()) { - case s_body_identity_eof: - /* Use of CALLBACK_NOTIFY() here would erroneously return 1 byte read if - * we got paused. - */ - CALLBACK_NOTIFY_NOADVANCE(message_complete); - return 0; - - case s_dead: - case s_start_req_or_res: - case s_start_res: - case s_start_req: - return 0; - - default: - SET_ERRNO(HPE_INVALID_EOF_STATE); - return 1; - } - } - - - if (CURRENT_STATE() == s_header_field) - header_field_mark = data; - if (CURRENT_STATE() == s_header_value) - header_value_mark = data; - switch (CURRENT_STATE()) { - case s_req_path: - case s_req_schema: - case s_req_schema_slash: - case s_req_schema_slash_slash: - case s_req_server_start: - case s_req_server: - case s_req_server_with_at: - case s_req_query_string_start: - case s_req_query_string: - case s_req_fragment_start: - case s_req_fragment: - url_mark = data; - break; - case s_res_status: - status_mark = data; - break; - default: - break; - } - - for (p=data; p != data + len; p++) { - ch = *p; - - if (PARSING_HEADER(CURRENT_STATE())) - COUNT_HEADER_SIZE(1); - -reexecute: - switch (CURRENT_STATE()) { - - case s_dead: - /* this state is used after a 'Connection: close' message - * the parser will error out if it reads another message - */ - if (LIKELY(ch == CR || ch == LF)) - break; - - SET_ERRNO(HPE_CLOSED_CONNECTION); - goto error; - - case s_start_req_or_res: - { - if (ch == CR || ch == LF) - break; - parser->flags = 0; - parser->extra_flags = 0; - parser->content_length = ULLONG_MAX; - - if (ch == 'H') { - UPDATE_STATE(s_res_or_resp_H); - - CALLBACK_NOTIFY(message_begin); - } else { - parser->type = HTTP_REQUEST; - UPDATE_STATE(s_start_req); - REEXECUTE(); - } - - break; - } - - case s_res_or_resp_H: - if (ch == 'T') { - parser->type = HTTP_RESPONSE; - UPDATE_STATE(s_res_HT); - } else { - if (UNLIKELY(ch != 'E')) { - SET_ERRNO(HPE_INVALID_CONSTANT); - goto error; - } - - parser->type = HTTP_REQUEST; - parser->method = HTTP_HEAD; - parser->index = 2; - UPDATE_STATE(s_req_method); - } - break; - - case s_start_res: - { - if (ch == CR || ch == LF) - break; - parser->flags = 0; - parser->extra_flags = 0; - parser->content_length = ULLONG_MAX; - - if (ch == 'H') { - UPDATE_STATE(s_res_H); - } else { - SET_ERRNO(HPE_INVALID_CONSTANT); - goto error; - } - - CALLBACK_NOTIFY(message_begin); - break; - } - - case s_res_H: - STRICT_CHECK(ch != 'T'); - UPDATE_STATE(s_res_HT); - break; - - case s_res_HT: - STRICT_CHECK(ch != 'T'); - UPDATE_STATE(s_res_HTT); - break; - - case s_res_HTT: - STRICT_CHECK(ch != 'P'); - UPDATE_STATE(s_res_HTTP); - break; - - case s_res_HTTP: - STRICT_CHECK(ch != '/'); - UPDATE_STATE(s_res_http_major); - break; - - case s_res_http_major: - if (UNLIKELY(!IS_NUM(ch))) { - SET_ERRNO(HPE_INVALID_VERSION); - goto error; - } - - parser->http_major = ch - '0'; - UPDATE_STATE(s_res_http_dot); - break; - - case s_res_http_dot: - { - if (UNLIKELY(ch != '.')) { - SET_ERRNO(HPE_INVALID_VERSION); - goto error; - } - - UPDATE_STATE(s_res_http_minor); - break; - } - - case s_res_http_minor: - if (UNLIKELY(!IS_NUM(ch))) { - SET_ERRNO(HPE_INVALID_VERSION); - goto error; - } - - parser->http_minor = ch - '0'; - UPDATE_STATE(s_res_http_end); - break; - - case s_res_http_end: - { - if (UNLIKELY(ch != ' ')) { - SET_ERRNO(HPE_INVALID_VERSION); - goto error; - } - - UPDATE_STATE(s_res_first_status_code); - break; - } - - case s_res_first_status_code: - { - if (!IS_NUM(ch)) { - if (ch == ' ') { - break; - } - - SET_ERRNO(HPE_INVALID_STATUS); - goto error; - } - parser->status_code = ch - '0'; - UPDATE_STATE(s_res_status_code); - break; - } - - case s_res_status_code: - { - if (!IS_NUM(ch)) { - switch (ch) { - case ' ': - UPDATE_STATE(s_res_status_start); - break; - case CR: - case LF: - UPDATE_STATE(s_res_status_start); - REEXECUTE(); - break; - default: - SET_ERRNO(HPE_INVALID_STATUS); - goto error; - } - break; - } - - parser->status_code *= 10; - parser->status_code += ch - '0'; - - if (UNLIKELY(parser->status_code > 999)) { - SET_ERRNO(HPE_INVALID_STATUS); - goto error; - } - - break; - } - - case s_res_status_start: - { - MARK(status); - UPDATE_STATE(s_res_status); - parser->index = 0; - - if (ch == CR || ch == LF) - REEXECUTE(); - - break; - } - - case s_res_status: - if (ch == CR) { - UPDATE_STATE(s_res_line_almost_done); - CALLBACK_DATA(status); - break; - } - - if (ch == LF) { - UPDATE_STATE(s_header_field_start); - CALLBACK_DATA(status); - break; - } - - break; - - case s_res_line_almost_done: - STRICT_CHECK(ch != LF); - UPDATE_STATE(s_header_field_start); - break; - - case s_start_req: - { - if (ch == CR || ch == LF) - break; - parser->flags = 0; - parser->extra_flags = 0; - parser->content_length = ULLONG_MAX; - - if (UNLIKELY(!IS_ALPHA(ch))) { - SET_ERRNO(HPE_INVALID_METHOD); - goto error; - } - - parser->method = (enum http_method) 0; - parser->index = 1; - switch (ch) { - case 'A': parser->method = HTTP_ACL; break; - case 'B': parser->method = HTTP_BIND; break; - case 'C': parser->method = HTTP_CONNECT; /* or COPY, CHECKOUT */ break; - case 'D': parser->method = HTTP_DELETE; break; - case 'G': parser->method = HTTP_GET; break; - case 'H': parser->method = HTTP_HEAD; break; - case 'L': parser->method = HTTP_LOCK; /* or LINK */ break; - case 'M': parser->method = HTTP_MKCOL; /* or MOVE, MKACTIVITY, MERGE, M-SEARCH, MKCALENDAR */ break; - case 'N': parser->method = HTTP_NOTIFY; break; - case 'O': parser->method = HTTP_OPTIONS; break; - case 'P': parser->method = HTTP_POST; - /* or PROPFIND|PROPPATCH|PUT|PATCH|PURGE */ - break; - case 'R': parser->method = HTTP_REPORT; /* or REBIND */ break; - case 'S': parser->method = HTTP_SUBSCRIBE; /* or SEARCH, SOURCE */ break; - case 'T': parser->method = HTTP_TRACE; break; - case 'U': parser->method = HTTP_UNLOCK; /* or UNSUBSCRIBE, UNBIND, UNLINK */ break; - default: - SET_ERRNO(HPE_INVALID_METHOD); - goto error; - } - UPDATE_STATE(s_req_method); - - CALLBACK_NOTIFY(message_begin); - - break; - } - - case s_req_method: - { - const char *matcher; - if (UNLIKELY(ch == '\0')) { - SET_ERRNO(HPE_INVALID_METHOD); - goto error; - } - - matcher = method_strings[parser->method]; - if (ch == ' ' && matcher[parser->index] == '\0') { - UPDATE_STATE(s_req_spaces_before_url); - } else if (ch == matcher[parser->index]) { - ; /* nada */ - } else if ((ch >= 'A' && ch <= 'Z') || ch == '-') { - - switch (parser->method << 16 | parser->index << 8 | ch) { -#define XX(meth, pos, ch, new_meth) \ - case (HTTP_##meth << 16 | pos << 8 | ch): \ - parser->method = HTTP_##new_meth; break; - - XX(POST, 1, 'U', PUT) - XX(POST, 1, 'A', PATCH) - XX(POST, 1, 'R', PROPFIND) - XX(PUT, 2, 'R', PURGE) - XX(CONNECT, 1, 'H', CHECKOUT) - XX(CONNECT, 2, 'P', COPY) - XX(MKCOL, 1, 'O', MOVE) - XX(MKCOL, 1, 'E', MERGE) - XX(MKCOL, 1, '-', MSEARCH) - XX(MKCOL, 2, 'A', MKACTIVITY) - XX(MKCOL, 3, 'A', MKCALENDAR) - XX(SUBSCRIBE, 1, 'E', SEARCH) - XX(SUBSCRIBE, 1, 'O', SOURCE) - XX(REPORT, 2, 'B', REBIND) - XX(PROPFIND, 4, 'P', PROPPATCH) - XX(LOCK, 1, 'I', LINK) - XX(UNLOCK, 2, 'S', UNSUBSCRIBE) - XX(UNLOCK, 2, 'B', UNBIND) - XX(UNLOCK, 3, 'I', UNLINK) -#undef XX - default: - SET_ERRNO(HPE_INVALID_METHOD); - goto error; - } - } else { - SET_ERRNO(HPE_INVALID_METHOD); - goto error; - } - - ++parser->index; - break; - } - - case s_req_spaces_before_url: - { - if (ch == ' ') break; - - MARK(url); - if (parser->method == HTTP_CONNECT) { - UPDATE_STATE(s_req_server_start); - } - - UPDATE_STATE(parse_url_char(CURRENT_STATE(), ch)); - if (UNLIKELY(CURRENT_STATE() == s_dead)) { - SET_ERRNO(HPE_INVALID_URL); - goto error; - } - - break; - } - - case s_req_schema: - case s_req_schema_slash: - case s_req_schema_slash_slash: - case s_req_server_start: - { - switch (ch) { - /* No whitespace allowed here */ - case ' ': - case CR: - case LF: - SET_ERRNO(HPE_INVALID_URL); - goto error; - default: - UPDATE_STATE(parse_url_char(CURRENT_STATE(), ch)); - if (UNLIKELY(CURRENT_STATE() == s_dead)) { - SET_ERRNO(HPE_INVALID_URL); - goto error; - } - } - - break; - } - - case s_req_server: - case s_req_server_with_at: - case s_req_path: - case s_req_query_string_start: - case s_req_query_string: - case s_req_fragment_start: - case s_req_fragment: - { - switch (ch) { - case ' ': - UPDATE_STATE(s_req_http_start); - CALLBACK_DATA(url); - break; - case CR: - case LF: - parser->http_major = 0; - parser->http_minor = 9; - UPDATE_STATE((ch == CR) ? - s_req_line_almost_done : - s_header_field_start); - CALLBACK_DATA(url); - break; - default: - UPDATE_STATE(parse_url_char(CURRENT_STATE(), ch)); - if (UNLIKELY(CURRENT_STATE() == s_dead)) { - SET_ERRNO(HPE_INVALID_URL); - goto error; - } - } - break; - } - - case s_req_http_start: - switch (ch) { - case ' ': - break; - case 'H': - UPDATE_STATE(s_req_http_H); - break; - case 'I': - if (parser->method == HTTP_SOURCE) { - UPDATE_STATE(s_req_http_I); - break; - } - /* fall through */ - default: - SET_ERRNO(HPE_INVALID_CONSTANT); - goto error; - } - break; - - case s_req_http_H: - STRICT_CHECK(ch != 'T'); - UPDATE_STATE(s_req_http_HT); - break; - - case s_req_http_HT: - STRICT_CHECK(ch != 'T'); - UPDATE_STATE(s_req_http_HTT); - break; - - case s_req_http_HTT: - STRICT_CHECK(ch != 'P'); - UPDATE_STATE(s_req_http_HTTP); - break; - - case s_req_http_I: - STRICT_CHECK(ch != 'C'); - UPDATE_STATE(s_req_http_IC); - break; - - case s_req_http_IC: - STRICT_CHECK(ch != 'E'); - UPDATE_STATE(s_req_http_HTTP); /* Treat "ICE" as "HTTP". */ - break; - - case s_req_http_HTTP: - STRICT_CHECK(ch != '/'); - UPDATE_STATE(s_req_http_major); - break; - - case s_req_http_major: - if (UNLIKELY(!IS_NUM(ch))) { - SET_ERRNO(HPE_INVALID_VERSION); - goto error; - } - - parser->http_major = ch - '0'; - UPDATE_STATE(s_req_http_dot); - break; - - case s_req_http_dot: - { - if (UNLIKELY(ch != '.')) { - SET_ERRNO(HPE_INVALID_VERSION); - goto error; - } - - UPDATE_STATE(s_req_http_minor); - break; - } - - case s_req_http_minor: - if (UNLIKELY(!IS_NUM(ch))) { - SET_ERRNO(HPE_INVALID_VERSION); - goto error; - } - - parser->http_minor = ch - '0'; - UPDATE_STATE(s_req_http_end); - break; - - case s_req_http_end: - { - if (ch == CR) { - UPDATE_STATE(s_req_line_almost_done); - break; - } - - if (ch == LF) { - UPDATE_STATE(s_header_field_start); - break; - } - - SET_ERRNO(HPE_INVALID_VERSION); - goto error; - break; - } - - /* end of request line */ - case s_req_line_almost_done: - { - if (UNLIKELY(ch != LF)) { - SET_ERRNO(HPE_LF_EXPECTED); - goto error; - } - - UPDATE_STATE(s_header_field_start); - break; - } - - case s_header_field_start: - { - if (ch == CR) { - UPDATE_STATE(s_headers_almost_done); - break; - } - - if (ch == LF) { - /* they might be just sending \n instead of \r\n so this would be - * the second \n to denote the end of headers*/ - UPDATE_STATE(s_headers_almost_done); - REEXECUTE(); - } - - c = TOKEN(ch); - - if (UNLIKELY(!c)) { - SET_ERRNO(HPE_INVALID_HEADER_TOKEN); - goto error; - } - - MARK(header_field); - - parser->index = 0; - UPDATE_STATE(s_header_field); - - switch (c) { - case 'c': - parser->header_state = h_C; - break; - - case 'p': - parser->header_state = h_matching_proxy_connection; - break; - - case 't': - parser->header_state = h_matching_transfer_encoding; - break; - - case 'u': - parser->header_state = h_matching_upgrade; - break; - - default: - parser->header_state = h_general; - break; - } - break; - } - - case s_header_field: - { - const char* start = p; - for (; p != data + len; p++) { - ch = *p; - c = TOKEN(ch); - - if (!c) - break; - - switch (parser->header_state) { - case h_general: { - size_t left = data + len - p; - const char* pe = p + MIN(left, max_header_size); - while (p+1 < pe && TOKEN(p[1])) { - p++; - } - break; - } - - case h_C: - parser->index++; - parser->header_state = (c == 'o' ? h_CO : h_general); - break; - - case h_CO: - parser->index++; - parser->header_state = (c == 'n' ? h_CON : h_general); - break; - - case h_CON: - parser->index++; - switch (c) { - case 'n': - parser->header_state = h_matching_connection; - break; - case 't': - parser->header_state = h_matching_content_length; - break; - default: - parser->header_state = h_general; - break; - } - break; - - /* connection */ - - case h_matching_connection: - parser->index++; - if (parser->index > sizeof(CONNECTION)-1 - || c != CONNECTION[parser->index]) { - parser->header_state = h_general; - } else if (parser->index == sizeof(CONNECTION)-2) { - parser->header_state = h_connection; - } - break; - - /* proxy-connection */ - - case h_matching_proxy_connection: - parser->index++; - if (parser->index > sizeof(PROXY_CONNECTION)-1 - || c != PROXY_CONNECTION[parser->index]) { - parser->header_state = h_general; - } else if (parser->index == sizeof(PROXY_CONNECTION)-2) { - parser->header_state = h_connection; - } - break; - - /* content-length */ - - case h_matching_content_length: - parser->index++; - if (parser->index > sizeof(CONTENT_LENGTH)-1 - || c != CONTENT_LENGTH[parser->index]) { - parser->header_state = h_general; - } else if (parser->index == sizeof(CONTENT_LENGTH)-2) { - parser->header_state = h_content_length; - } - break; - - /* transfer-encoding */ - - case h_matching_transfer_encoding: - parser->index++; - if (parser->index > sizeof(TRANSFER_ENCODING)-1 - || c != TRANSFER_ENCODING[parser->index]) { - parser->header_state = h_general; - } else if (parser->index == sizeof(TRANSFER_ENCODING)-2) { - parser->header_state = h_transfer_encoding; - parser->extra_flags |= F_TRANSFER_ENCODING >> 8; - } - break; - - /* upgrade */ - - case h_matching_upgrade: - parser->index++; - if (parser->index > sizeof(UPGRADE)-1 - || c != UPGRADE[parser->index]) { - parser->header_state = h_general; - } else if (parser->index == sizeof(UPGRADE)-2) { - parser->header_state = h_upgrade; - } - break; - - case h_connection: - case h_content_length: - case h_transfer_encoding: - case h_upgrade: - if (ch != ' ') parser->header_state = h_general; - break; - - default: - assert(0 && "Unknown header_state"); - break; - } - } - - if (p == data + len) { - --p; - COUNT_HEADER_SIZE(p - start); - break; - } - - COUNT_HEADER_SIZE(p - start); - - if (ch == ':') { - UPDATE_STATE(s_header_value_discard_ws); - CALLBACK_DATA(header_field); - break; - } - - SET_ERRNO(HPE_INVALID_HEADER_TOKEN); - goto error; - } - - case s_header_value_discard_ws: - if (ch == ' ' || ch == '\t') break; - - if (ch == CR) { - UPDATE_STATE(s_header_value_discard_ws_almost_done); - break; - } - - if (ch == LF) { - UPDATE_STATE(s_header_value_discard_lws); - break; - } - - /* fall through */ - - case s_header_value_start: - { - MARK(header_value); - - UPDATE_STATE(s_header_value); - parser->index = 0; - - c = LOWER(ch); - - switch (parser->header_state) { - case h_upgrade: - parser->flags |= F_UPGRADE; - parser->header_state = h_general; - break; - - case h_transfer_encoding: - /* looking for 'Transfer-Encoding: chunked' */ - if ('c' == c) { - parser->header_state = h_matching_transfer_encoding_chunked; - } else { - parser->header_state = h_matching_transfer_encoding_token; - } - break; - - /* Multi-value `Transfer-Encoding` header */ - case h_matching_transfer_encoding_token_start: - break; - - case h_content_length: - if (UNLIKELY(!IS_NUM(ch))) { - SET_ERRNO(HPE_INVALID_CONTENT_LENGTH); - goto error; - } - - if (parser->flags & F_CONTENTLENGTH) { - SET_ERRNO(HPE_UNEXPECTED_CONTENT_LENGTH); - goto error; - } - - parser->flags |= F_CONTENTLENGTH; - parser->content_length = ch - '0'; - parser->header_state = h_content_length_num; - break; - - /* when obsolete line folding is encountered for content length - * continue to the s_header_value state */ - case h_content_length_ws: - break; - - case h_connection: - /* looking for 'Connection: keep-alive' */ - if (c == 'k') { - parser->header_state = h_matching_connection_keep_alive; - /* looking for 'Connection: close' */ - } else if (c == 'c') { - parser->header_state = h_matching_connection_close; - } else if (c == 'u') { - parser->header_state = h_matching_connection_upgrade; - } else { - parser->header_state = h_matching_connection_token; - } - break; - - /* Multi-value `Connection` header */ - case h_matching_connection_token_start: - break; - - default: - parser->header_state = h_general; - break; - } - break; - } - - case s_header_value: - { - const char* start = p; - enum header_states h_state = (enum header_states) parser->header_state; - for (; p != data + len; p++) { - ch = *p; - if (ch == CR) { - UPDATE_STATE(s_header_almost_done); - parser->header_state = h_state; - CALLBACK_DATA(header_value); - break; - } - - if (ch == LF) { - UPDATE_STATE(s_header_almost_done); - COUNT_HEADER_SIZE(p - start); - parser->header_state = h_state; - CALLBACK_DATA_NOADVANCE(header_value); - REEXECUTE(); - } - - if (!lenient && !IS_HEADER_CHAR(ch)) { - SET_ERRNO(HPE_INVALID_HEADER_TOKEN); - goto error; - } - - c = LOWER(ch); - - switch (h_state) { - case h_general: - { - size_t left = data + len - p; - const char* pe = p + MIN(left, max_header_size); - - for (; p != pe; p++) { - ch = *p; - if (ch == CR || ch == LF) { - --p; - break; - } - if (!lenient && !IS_HEADER_CHAR(ch)) { - SET_ERRNO(HPE_INVALID_HEADER_TOKEN); - goto error; - } - } - if (p == data + len) - --p; - break; - } - - case h_connection: - case h_transfer_encoding: - assert(0 && "Shouldn't get here."); - break; - - case h_content_length: - if (ch == ' ') break; - h_state = h_content_length_num; - /* fall through */ - - case h_content_length_num: - { - uint64_t t; - - if (ch == ' ') { - h_state = h_content_length_ws; - break; - } - - if (UNLIKELY(!IS_NUM(ch))) { - SET_ERRNO(HPE_INVALID_CONTENT_LENGTH); - parser->header_state = h_state; - goto error; - } - - t = parser->content_length; - t *= 10; - t += ch - '0'; - - /* Overflow? Test against a conservative limit for simplicity. */ - if (UNLIKELY((ULLONG_MAX - 10) / 10 < parser->content_length)) { - SET_ERRNO(HPE_INVALID_CONTENT_LENGTH); - parser->header_state = h_state; - goto error; - } - - parser->content_length = t; - break; - } - - case h_content_length_ws: - if (ch == ' ') break; - SET_ERRNO(HPE_INVALID_CONTENT_LENGTH); - parser->header_state = h_state; - goto error; - - /* Transfer-Encoding: chunked */ - case h_matching_transfer_encoding_token_start: - /* looking for 'Transfer-Encoding: chunked' */ - if ('c' == c) { - h_state = h_matching_transfer_encoding_chunked; - } else if (STRICT_TOKEN(c)) { - /* TODO(indutny): similar code below does this, but why? - * At the very least it seems to be inconsistent given that - * h_matching_transfer_encoding_token does not check for - * `STRICT_TOKEN` - */ - h_state = h_matching_transfer_encoding_token; - } else if (c == ' ' || c == '\t') { - /* Skip lws */ - } else { - h_state = h_general; - } - break; - - case h_matching_transfer_encoding_chunked: - parser->index++; - if (parser->index > sizeof(CHUNKED)-1 - || c != CHUNKED[parser->index]) { - h_state = h_matching_transfer_encoding_token; - } else if (parser->index == sizeof(CHUNKED)-2) { - h_state = h_transfer_encoding_chunked; - } - break; - - case h_matching_transfer_encoding_token: - if (ch == ',') { - h_state = h_matching_transfer_encoding_token_start; - parser->index = 0; - } - break; - - case h_matching_connection_token_start: - /* looking for 'Connection: keep-alive' */ - if (c == 'k') { - h_state = h_matching_connection_keep_alive; - /* looking for 'Connection: close' */ - } else if (c == 'c') { - h_state = h_matching_connection_close; - } else if (c == 'u') { - h_state = h_matching_connection_upgrade; - } else if (STRICT_TOKEN(c)) { - h_state = h_matching_connection_token; - } else if (c == ' ' || c == '\t') { - /* Skip lws */ - } else { - h_state = h_general; - } - break; - - /* looking for 'Connection: keep-alive' */ - case h_matching_connection_keep_alive: - parser->index++; - if (parser->index > sizeof(KEEP_ALIVE)-1 - || c != KEEP_ALIVE[parser->index]) { - h_state = h_matching_connection_token; - } else if (parser->index == sizeof(KEEP_ALIVE)-2) { - h_state = h_connection_keep_alive; - } - break; - - /* looking for 'Connection: close' */ - case h_matching_connection_close: - parser->index++; - if (parser->index > sizeof(CLOSE)-1 || c != CLOSE[parser->index]) { - h_state = h_matching_connection_token; - } else if (parser->index == sizeof(CLOSE)-2) { - h_state = h_connection_close; - } - break; - - /* looking for 'Connection: upgrade' */ - case h_matching_connection_upgrade: - parser->index++; - if (parser->index > sizeof(UPGRADE) - 1 || - c != UPGRADE[parser->index]) { - h_state = h_matching_connection_token; - } else if (parser->index == sizeof(UPGRADE)-2) { - h_state = h_connection_upgrade; - } - break; - - case h_matching_connection_token: - if (ch == ',') { - h_state = h_matching_connection_token_start; - parser->index = 0; - } - break; - - case h_transfer_encoding_chunked: - if (ch != ' ') h_state = h_matching_transfer_encoding_token; - break; - - case h_connection_keep_alive: - case h_connection_close: - case h_connection_upgrade: - if (ch == ',') { - if (h_state == h_connection_keep_alive) { - parser->flags |= F_CONNECTION_KEEP_ALIVE; - } else if (h_state == h_connection_close) { - parser->flags |= F_CONNECTION_CLOSE; - } else if (h_state == h_connection_upgrade) { - parser->flags |= F_CONNECTION_UPGRADE; - } - h_state = h_matching_connection_token_start; - parser->index = 0; - } else if (ch != ' ') { - h_state = h_matching_connection_token; - } - break; - - default: - UPDATE_STATE(s_header_value); - h_state = h_general; - break; - } - } - parser->header_state = h_state; - - if (p == data + len) - --p; - - COUNT_HEADER_SIZE(p - start); - break; - } - - case s_header_almost_done: - { - if (UNLIKELY(ch != LF)) { - SET_ERRNO(HPE_LF_EXPECTED); - goto error; - } - - UPDATE_STATE(s_header_value_lws); - break; - } - - case s_header_value_lws: - { - if (ch == ' ' || ch == '\t') { - if (parser->header_state == h_content_length_num) { - /* treat obsolete line folding as space */ - parser->header_state = h_content_length_ws; - } - UPDATE_STATE(s_header_value_start); - REEXECUTE(); - } - - /* finished the header */ - switch (parser->header_state) { - case h_connection_keep_alive: - parser->flags |= F_CONNECTION_KEEP_ALIVE; - break; - case h_connection_close: - parser->flags |= F_CONNECTION_CLOSE; - break; - case h_transfer_encoding_chunked: - parser->flags |= F_CHUNKED; - break; - case h_connection_upgrade: - parser->flags |= F_CONNECTION_UPGRADE; - break; - default: - break; - } - - UPDATE_STATE(s_header_field_start); - REEXECUTE(); - } - - case s_header_value_discard_ws_almost_done: - { - STRICT_CHECK(ch != LF); - UPDATE_STATE(s_header_value_discard_lws); - break; - } - - case s_header_value_discard_lws: - { - if (ch == ' ' || ch == '\t') { - UPDATE_STATE(s_header_value_discard_ws); - break; - } else { - switch (parser->header_state) { - case h_connection_keep_alive: - parser->flags |= F_CONNECTION_KEEP_ALIVE; - break; - case h_connection_close: - parser->flags |= F_CONNECTION_CLOSE; - break; - case h_connection_upgrade: - parser->flags |= F_CONNECTION_UPGRADE; - break; - case h_transfer_encoding_chunked: - parser->flags |= F_CHUNKED; - break; - case h_content_length: - /* do not allow empty content length */ - SET_ERRNO(HPE_INVALID_CONTENT_LENGTH); - goto error; - break; - default: - break; - } - - /* header value was empty */ - MARK(header_value); - UPDATE_STATE(s_header_field_start); - CALLBACK_DATA_NOADVANCE(header_value); - REEXECUTE(); - } - } - - case s_headers_almost_done: - { - STRICT_CHECK(ch != LF); - - if (parser->flags & F_TRAILING) { - /* End of a chunked request */ - UPDATE_STATE(s_message_done); - CALLBACK_NOTIFY_NOADVANCE(chunk_complete); - REEXECUTE(); - } - - /* Cannot us transfer-encoding and a content-length header together - per the HTTP specification. (RFC 7230 Section 3.3.3) */ - if ((parser->extra_flags & (F_TRANSFER_ENCODING >> 8)) && - (parser->flags & F_CONTENTLENGTH)) { - /* Allow it for lenient parsing as long as `Transfer-Encoding` is - * not `chunked` - */ - if (!lenient || (parser->flags & F_CHUNKED)) { - SET_ERRNO(HPE_UNEXPECTED_CONTENT_LENGTH); - goto error; - } - } - - UPDATE_STATE(s_headers_done); - - /* Set this here so that on_headers_complete() callbacks can see it */ - if ((parser->flags & F_UPGRADE) && - (parser->flags & F_CONNECTION_UPGRADE)) { - /* For responses, "Upgrade: foo" and "Connection: upgrade" are - * mandatory only when it is a 101 Switching Protocols response, - * otherwise it is purely informational, to announce support. - */ - parser->upgrade = - (parser->type == HTTP_REQUEST || parser->status_code == 101); - } else { - parser->upgrade = (parser->method == HTTP_CONNECT); - } - - /* Here we call the headers_complete callback. This is somewhat - * different than other callbacks because if the user returns 1, we - * will interpret that as saying that this message has no body. This - * is needed for the annoying case of recieving a response to a HEAD - * request. - * - * We'd like to use CALLBACK_NOTIFY_NOADVANCE() here but we cannot, so - * we have to simulate it by handling a change in errno below. - */ - if (settings->on_headers_complete) { - switch (settings->on_headers_complete(parser)) { - case 0: - break; - - case 2: - parser->upgrade = 1; - - /* fall through */ - case 1: - parser->flags |= F_SKIPBODY; - break; - - default: - SET_ERRNO(HPE_CB_headers_complete); - RETURN(p - data); /* Error */ - } - } - - if (HTTP_PARSER_ERRNO(parser) != HPE_OK) { - RETURN(p - data); - } - - REEXECUTE(); - } - - case s_headers_done: - { - int hasBody; - STRICT_CHECK(ch != LF); - - parser->nread = 0; - nread = 0; - - hasBody = parser->flags & F_CHUNKED || - (parser->content_length > 0 && parser->content_length != ULLONG_MAX); - if (parser->upgrade && (parser->method == HTTP_CONNECT || - (parser->flags & F_SKIPBODY) || !hasBody)) { - /* Exit, the rest of the message is in a different protocol. */ - UPDATE_STATE(NEW_MESSAGE()); - CALLBACK_NOTIFY(message_complete); - RETURN((p - data) + 1); - } - - if (parser->flags & F_SKIPBODY) { - UPDATE_STATE(NEW_MESSAGE()); - CALLBACK_NOTIFY(message_complete); - } else if (parser->flags & F_CHUNKED) { - /* chunked encoding - ignore Content-Length header, - * prepare for a chunk */ - UPDATE_STATE(s_chunk_size_start); - } else if (parser->extra_flags & (F_TRANSFER_ENCODING >> 8)) { - if (parser->type == HTTP_REQUEST && !lenient) { - /* RFC 7230 3.3.3 */ - - /* If a Transfer-Encoding header field - * is present in a request and the chunked transfer coding is not - * the final encoding, the message body length cannot be determined - * reliably; the server MUST respond with the 400 (Bad Request) - * status code and then close the connection. - */ - SET_ERRNO(HPE_INVALID_TRANSFER_ENCODING); - RETURN(p - data); /* Error */ - } else { - /* RFC 7230 3.3.3 */ - - /* If a Transfer-Encoding header field is present in a response and - * the chunked transfer coding is not the final encoding, the - * message body length is determined by reading the connection until - * it is closed by the server. - */ - UPDATE_STATE(s_body_identity_eof); - } - } else { - if (parser->content_length == 0) { - /* Content-Length header given but zero: Content-Length: 0\r\n */ - UPDATE_STATE(NEW_MESSAGE()); - CALLBACK_NOTIFY(message_complete); - } else if (parser->content_length != ULLONG_MAX) { - /* Content-Length header given and non-zero */ - UPDATE_STATE(s_body_identity); - } else { - if (!http_message_needs_eof(parser)) { - /* Assume content-length 0 - read the next */ - UPDATE_STATE(NEW_MESSAGE()); - CALLBACK_NOTIFY(message_complete); - } else { - /* Read body until EOF */ - UPDATE_STATE(s_body_identity_eof); - } - } - } - - break; - } - - case s_body_identity: - { - uint64_t to_read = MIN(parser->content_length, - (uint64_t) ((data + len) - p)); - - assert(parser->content_length != 0 - && parser->content_length != ULLONG_MAX); - - /* The difference between advancing content_length and p is because - * the latter will automaticaly advance on the next loop iteration. - * Further, if content_length ends up at 0, we want to see the last - * byte again for our message complete callback. - */ - MARK(body); - parser->content_length -= to_read; - p += to_read - 1; - - if (parser->content_length == 0) { - UPDATE_STATE(s_message_done); - - /* Mimic CALLBACK_DATA_NOADVANCE() but with one extra byte. - * - * The alternative to doing this is to wait for the next byte to - * trigger the data callback, just as in every other case. The - * problem with this is that this makes it difficult for the test - * harness to distinguish between complete-on-EOF and - * complete-on-length. It's not clear that this distinction is - * important for applications, but let's keep it for now. - */ - CALLBACK_DATA_(body, p - body_mark + 1, p - data); - REEXECUTE(); - } - - break; - } - - /* read until EOF */ - case s_body_identity_eof: - MARK(body); - p = data + len - 1; - - break; - - case s_message_done: - UPDATE_STATE(NEW_MESSAGE()); - CALLBACK_NOTIFY(message_complete); - if (parser->upgrade) { - /* Exit, the rest of the message is in a different protocol. */ - RETURN((p - data) + 1); - } - break; - - case s_chunk_size_start: - { - assert(nread == 1); - assert(parser->flags & F_CHUNKED); - - unhex_val = unhex[(unsigned char)ch]; - if (UNLIKELY(unhex_val == -1)) { - SET_ERRNO(HPE_INVALID_CHUNK_SIZE); - goto error; - } - - parser->content_length = unhex_val; - UPDATE_STATE(s_chunk_size); - break; - } - - case s_chunk_size: - { - uint64_t t; - - assert(parser->flags & F_CHUNKED); - - if (ch == CR) { - UPDATE_STATE(s_chunk_size_almost_done); - break; - } - - unhex_val = unhex[(unsigned char)ch]; - - if (unhex_val == -1) { - if (ch == ';' || ch == ' ') { - UPDATE_STATE(s_chunk_parameters); - break; - } - - SET_ERRNO(HPE_INVALID_CHUNK_SIZE); - goto error; - } - - t = parser->content_length; - t *= 16; - t += unhex_val; - - /* Overflow? Test against a conservative limit for simplicity. */ - if (UNLIKELY((ULLONG_MAX - 16) / 16 < parser->content_length)) { - SET_ERRNO(HPE_INVALID_CONTENT_LENGTH); - goto error; - } - - parser->content_length = t; - break; - } - - case s_chunk_parameters: - { - assert(parser->flags & F_CHUNKED); - /* just ignore this shit. TODO check for overflow */ - if (ch == CR) { - UPDATE_STATE(s_chunk_size_almost_done); - break; - } - break; - } - - case s_chunk_size_almost_done: - { - assert(parser->flags & F_CHUNKED); - STRICT_CHECK(ch != LF); - - parser->nread = 0; - nread = 0; - - if (parser->content_length == 0) { - parser->flags |= F_TRAILING; - UPDATE_STATE(s_header_field_start); - } else { - UPDATE_STATE(s_chunk_data); - } - CALLBACK_NOTIFY(chunk_header); - break; - } - - case s_chunk_data: - { - uint64_t to_read = MIN(parser->content_length, - (uint64_t) ((data + len) - p)); - - assert(parser->flags & F_CHUNKED); - assert(parser->content_length != 0 - && parser->content_length != ULLONG_MAX); - - /* See the explanation in s_body_identity for why the content - * length and data pointers are managed this way. - */ - MARK(body); - parser->content_length -= to_read; - p += to_read - 1; - - if (parser->content_length == 0) { - UPDATE_STATE(s_chunk_data_almost_done); - } - - break; - } - - case s_chunk_data_almost_done: - assert(parser->flags & F_CHUNKED); - assert(parser->content_length == 0); - STRICT_CHECK(ch != CR); - UPDATE_STATE(s_chunk_data_done); - CALLBACK_DATA(body); - break; - - case s_chunk_data_done: - assert(parser->flags & F_CHUNKED); - STRICT_CHECK(ch != LF); - parser->nread = 0; - nread = 0; - UPDATE_STATE(s_chunk_size_start); - CALLBACK_NOTIFY(chunk_complete); - break; - - default: - assert(0 && "unhandled state"); - SET_ERRNO(HPE_INVALID_INTERNAL_STATE); - goto error; - } - } - - /* Run callbacks for any marks that we have leftover after we ran out of - * bytes. There should be at most one of these set, so it's OK to invoke - * them in series (unset marks will not result in callbacks). - * - * We use the NOADVANCE() variety of callbacks here because 'p' has already - * overflowed 'data' and this allows us to correct for the off-by-one that - * we'd otherwise have (since CALLBACK_DATA() is meant to be run with a 'p' - * value that's in-bounds). - */ - - assert(((header_field_mark ? 1 : 0) + - (header_value_mark ? 1 : 0) + - (url_mark ? 1 : 0) + - (body_mark ? 1 : 0) + - (status_mark ? 1 : 0)) <= 1); - - CALLBACK_DATA_NOADVANCE(header_field); - CALLBACK_DATA_NOADVANCE(header_value); - CALLBACK_DATA_NOADVANCE(url); - CALLBACK_DATA_NOADVANCE(body); - CALLBACK_DATA_NOADVANCE(status); - - RETURN(len); - -error: - if (HTTP_PARSER_ERRNO(parser) == HPE_OK) { - SET_ERRNO(HPE_UNKNOWN); - } - - RETURN(p - data); -} - - -/* Does the parser need to see an EOF to find the end of the message? */ -int -http_message_needs_eof (const http_parser *parser) -{ - if (parser->type == HTTP_REQUEST) { - return 0; - } - - /* See RFC 2616 section 4.4 */ - if (parser->status_code / 100 == 1 || /* 1xx e.g. Continue */ - parser->status_code == 204 || /* No Content */ - parser->status_code == 304 || /* Not Modified */ - parser->flags & F_SKIPBODY) { /* response to a HEAD request */ - return 0; - } - - /* RFC 7230 3.3.3, see `s_headers_almost_done` */ - if ((parser->extra_flags & (F_TRANSFER_ENCODING >> 8)) && - (parser->flags & F_CHUNKED) == 0) { - return 1; - } - - if ((parser->flags & F_CHUNKED) || parser->content_length != ULLONG_MAX) { - return 0; - } - - return 1; -} - - -int -http_should_keep_alive (const http_parser *parser) -{ - if (parser->http_major > 0 && parser->http_minor > 0) { - /* HTTP/1.1 */ - if (parser->flags & F_CONNECTION_CLOSE) { - return 0; - } - } else { - /* HTTP/1.0 or earlier */ - if (!(parser->flags & F_CONNECTION_KEEP_ALIVE)) { - return 0; - } - } - - return !http_message_needs_eof(parser); -} - - -const char * -http_method_str (enum http_method m) -{ - return ELEM_AT(method_strings, m, ""); -} - -const char * -http_status_str (enum http_status s) -{ - switch (s) { -#define XX(num, name, string) case HTTP_STATUS_##name: return #string; - HTTP_STATUS_MAP(XX) -#undef XX - default: return ""; - } -} - -void -http_parser_init (http_parser *parser, enum http_parser_type t) -{ - void *data = parser->data; /* preserve application data */ - memset(parser, 0, sizeof(*parser)); - parser->data = data; - parser->type = t; - parser->state = (t == HTTP_REQUEST ? s_start_req : (t == HTTP_RESPONSE ? s_start_res : s_start_req_or_res)); - parser->http_errno = HPE_OK; -} - -void -http_parser_settings_init(http_parser_settings *settings) -{ - memset(settings, 0, sizeof(*settings)); -} - -const char * -http_errno_name(enum http_errno err) { - assert(((size_t) err) < ARRAY_SIZE(http_strerror_tab)); - return http_strerror_tab[err].name; -} - -const char * -http_errno_description(enum http_errno err) { - assert(((size_t) err) < ARRAY_SIZE(http_strerror_tab)); - return http_strerror_tab[err].description; -} - -static enum http_host_state -http_parse_host_char(enum http_host_state s, const char ch) { - switch(s) { - case s_http_userinfo: - case s_http_userinfo_start: - if (ch == '@') { - return s_http_host_start; - } - - if (IS_USERINFO_CHAR(ch)) { - return s_http_userinfo; - } - break; - - case s_http_host_start: - if (ch == '[') { - return s_http_host_v6_start; - } - - if (IS_HOST_CHAR(ch)) { - return s_http_host; - } - - break; - - case s_http_host: - if (IS_HOST_CHAR(ch)) { - return s_http_host; - } - - /* fall through */ - case s_http_host_v6_end: - if (ch == ':') { - return s_http_host_port_start; - } - - break; - - case s_http_host_v6: - if (ch == ']') { - return s_http_host_v6_end; - } - - /* fall through */ - case s_http_host_v6_start: - if (IS_HEX(ch) || ch == ':' || ch == '.') { - return s_http_host_v6; - } - - if (s == s_http_host_v6 && ch == '%') { - return s_http_host_v6_zone_start; - } - break; - - case s_http_host_v6_zone: - if (ch == ']') { - return s_http_host_v6_end; - } - - /* fall through */ - case s_http_host_v6_zone_start: - /* RFC 6874 Zone ID consists of 1*( unreserved / pct-encoded) */ - if (IS_ALPHANUM(ch) || ch == '%' || ch == '.' || ch == '-' || ch == '_' || - ch == '~') { - return s_http_host_v6_zone; - } - break; - - case s_http_host_port: - case s_http_host_port_start: - if (IS_NUM(ch)) { - return s_http_host_port; - } - - break; - - default: - break; - } - return s_http_host_dead; -} - -static int -http_parse_host(const char * buf, struct http_parser_url *u, int found_at) { - enum http_host_state s; - - const char *p; - size_t buflen = u->field_data[UF_HOST].off + u->field_data[UF_HOST].len; - - assert(u->field_set & (1 << UF_HOST)); - - u->field_data[UF_HOST].len = 0; - - s = found_at ? s_http_userinfo_start : s_http_host_start; - - for (p = buf + u->field_data[UF_HOST].off; p < buf + buflen; p++) { - enum http_host_state new_s = http_parse_host_char(s, *p); - - if (new_s == s_http_host_dead) { - return 1; - } - - switch(new_s) { - case s_http_host: - if (s != s_http_host) { - u->field_data[UF_HOST].off = (uint16_t)(p - buf); - } - u->field_data[UF_HOST].len++; - break; - - case s_http_host_v6: - if (s != s_http_host_v6) { - u->field_data[UF_HOST].off = (uint16_t)(p - buf); - } - u->field_data[UF_HOST].len++; - break; - - case s_http_host_v6_zone_start: - case s_http_host_v6_zone: - u->field_data[UF_HOST].len++; - break; - - case s_http_host_port: - if (s != s_http_host_port) { - u->field_data[UF_PORT].off = (uint16_t)(p - buf); - u->field_data[UF_PORT].len = 0; - u->field_set |= (1 << UF_PORT); - } - u->field_data[UF_PORT].len++; - break; - - case s_http_userinfo: - if (s != s_http_userinfo) { - u->field_data[UF_USERINFO].off = (uint16_t)(p - buf); - u->field_data[UF_USERINFO].len = 0; - u->field_set |= (1 << UF_USERINFO); - } - u->field_data[UF_USERINFO].len++; - break; - - default: - break; - } - s = new_s; - } - - /* Make sure we don't end somewhere unexpected */ - switch (s) { - case s_http_host_start: - case s_http_host_v6_start: - case s_http_host_v6: - case s_http_host_v6_zone_start: - case s_http_host_v6_zone: - case s_http_host_port_start: - case s_http_userinfo: - case s_http_userinfo_start: - return 1; - default: - break; - } - - return 0; -} - -void -http_parser_url_init(struct http_parser_url *u) { - memset(u, 0, sizeof(*u)); -} - -int -http_parser_parse_url(const char *buf, size_t buflen, int is_connect, - struct http_parser_url *u) -{ - enum state s; - const char *p; - enum http_parser_url_fields uf, old_uf; - int found_at = 0; - - if (buflen == 0) { - return 1; - } - - u->port = u->field_set = 0; - s = is_connect ? s_req_server_start : s_req_spaces_before_url; - old_uf = UF_MAX; - - for (p = buf; p < buf + buflen; p++) { - s = parse_url_char(s, *p); - - /* Figure out the next field that we're operating on */ - switch (s) { - case s_dead: - return 1; - - /* Skip delimeters */ - case s_req_schema_slash: - case s_req_schema_slash_slash: - case s_req_server_start: - case s_req_query_string_start: - case s_req_fragment_start: - continue; - - case s_req_schema: - uf = UF_SCHEMA; - break; - - case s_req_server_with_at: - found_at = 1; - - /* fall through */ - case s_req_server: - uf = UF_HOST; - break; - - case s_req_path: - uf = UF_PATH; - break; - - case s_req_query_string: - uf = UF_QUERY; - break; - - case s_req_fragment: - uf = UF_FRAGMENT; - break; - - default: - assert(!"Unexpected state"); - return 1; - } - - /* Nothing's changed; soldier on */ - if (uf == old_uf) { - u->field_data[uf].len++; - continue; - } - - u->field_data[uf].off = (uint16_t)(p - buf); - u->field_data[uf].len = 1; - - u->field_set |= (1 << uf); - old_uf = uf; - } - - /* host must be present if there is a schema */ - /* parsing http:///toto will fail */ - if ((u->field_set & (1 << UF_SCHEMA)) && - (u->field_set & (1 << UF_HOST)) == 0) { - return 1; - } - - if (u->field_set & (1 << UF_HOST)) { - if (http_parse_host(buf, u, found_at) != 0) { - return 1; - } - } - - /* CONNECT requests can only contain "hostname:port" */ - if (is_connect && u->field_set != ((1 << UF_HOST)|(1 << UF_PORT))) { - return 1; - } - - if (u->field_set & (1 << UF_PORT)) { - uint16_t off; - uint16_t len; - const char* p; - const char* end; - unsigned long v; - - off = u->field_data[UF_PORT].off; - len = u->field_data[UF_PORT].len; - end = buf + off + len; - - /* NOTE: The characters are already validated and are in the [0-9] range */ - assert(off + len <= buflen && "Port number overflow"); - v = 0; - for (p = buf + off; p < end; p++) { - v *= 10; - v += *p - '0'; - - /* Ports have a max value of 2^16 */ - if (v > 0xffff) { - return 1; - } - } - - u->port = (uint16_t) v; - } - - return 0; -} - -void -http_parser_pause(http_parser *parser, int paused) { - /* Users should only be pausing/unpausing a parser that is not in an error - * state. In non-debug builds, there's not much that we can do about this - * other than ignore it. - */ - if (HTTP_PARSER_ERRNO(parser) == HPE_OK || - HTTP_PARSER_ERRNO(parser) == HPE_PAUSED) { - uint32_t nread = parser->nread; /* used by the SET_ERRNO macro */ - SET_ERRNO((paused) ? HPE_PAUSED : HPE_OK); - } else { - assert(0 && "Attempting to pause parser in error state"); - } -} - -int -http_body_is_final(const struct http_parser *parser) { - return parser->state == s_message_done; -} - -unsigned long -http_parser_version(void) { - return HTTP_PARSER_VERSION_MAJOR * 0x10000 | - HTTP_PARSER_VERSION_MINOR * 0x00100 | - HTTP_PARSER_VERSION_PATCH * 0x00001; -} - -void -http_parser_set_max_header_size(uint32_t size) { - max_header_size = size; -} diff --git a/third_party/python/aiohttp/vendor/http-parser/http_parser.gyp b/third_party/python/aiohttp/vendor/http-parser/http_parser.gyp deleted file mode 100644 index ef34ecaeaeab..000000000000 --- a/third_party/python/aiohttp/vendor/http-parser/http_parser.gyp +++ /dev/null @@ -1,111 +0,0 @@ -# This file is used with the GYP meta build system. -# http://code.google.com/p/gyp/ -# To build try this: -# svn co http://gyp.googlecode.com/svn/trunk gyp -# ./gyp/gyp -f make --depth=`pwd` http_parser.gyp -# ./out/Debug/test -{ - 'target_defaults': { - 'default_configuration': 'Debug', - 'configurations': { - # TODO: hoist these out and put them somewhere common, because - # RuntimeLibrary MUST MATCH across the entire project - 'Debug': { - 'defines': [ 'DEBUG', '_DEBUG' ], - 'cflags': [ '-Wall', '-Wextra', '-O0', '-g', '-ftrapv' ], - 'msvs_settings': { - 'VCCLCompilerTool': { - 'RuntimeLibrary': 1, # static debug - }, - }, - }, - 'Release': { - 'defines': [ 'NDEBUG' ], - 'cflags': [ '-Wall', '-Wextra', '-O3' ], - 'msvs_settings': { - 'VCCLCompilerTool': { - 'RuntimeLibrary': 0, # static release - }, - }, - } - }, - 'msvs_settings': { - 'VCCLCompilerTool': { - }, - 'VCLibrarianTool': { - }, - 'VCLinkerTool': { - 'GenerateDebugInformation': 'true', - }, - }, - 'conditions': [ - ['OS == "win"', { - 'defines': [ - 'WIN32' - ], - }] - ], - }, - - 'targets': [ - { - 'target_name': 'http_parser', - 'type': 'static_library', - 'include_dirs': [ '.' ], - 'direct_dependent_settings': { - 'defines': [ 'HTTP_PARSER_STRICT=0' ], - 'include_dirs': [ '.' ], - }, - 'defines': [ 'HTTP_PARSER_STRICT=0' ], - 'sources': [ './http_parser.c', ], - 'conditions': [ - ['OS=="win"', { - 'msvs_settings': { - 'VCCLCompilerTool': { - # Compile as C++. http_parser.c is actually C99, but C++ is - # close enough in this case. - 'CompileAs': 2, - }, - }, - }] - ], - }, - - { - 'target_name': 'http_parser_strict', - 'type': 'static_library', - 'include_dirs': [ '.' ], - 'direct_dependent_settings': { - 'defines': [ 'HTTP_PARSER_STRICT=1' ], - 'include_dirs': [ '.' ], - }, - 'defines': [ 'HTTP_PARSER_STRICT=1' ], - 'sources': [ './http_parser.c', ], - 'conditions': [ - ['OS=="win"', { - 'msvs_settings': { - 'VCCLCompilerTool': { - # Compile as C++. http_parser.c is actually C99, but C++ is - # close enough in this case. - 'CompileAs': 2, - }, - }, - }] - ], - }, - - { - 'target_name': 'test-nonstrict', - 'type': 'executable', - 'dependencies': [ 'http_parser' ], - 'sources': [ 'test.c' ] - }, - - { - 'target_name': 'test-strict', - 'type': 'executable', - 'dependencies': [ 'http_parser_strict' ], - 'sources': [ 'test.c' ] - } - ] -} diff --git a/third_party/python/aiohttp/vendor/http-parser/http_parser.h b/third_party/python/aiohttp/vendor/http-parser/http_parser.h deleted file mode 100644 index df8825260dfe..000000000000 --- a/third_party/python/aiohttp/vendor/http-parser/http_parser.h +++ /dev/null @@ -1,443 +0,0 @@ -/* Copyright Joyent, Inc. and other Node contributors. All rights reserved. - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to - * deal in the Software without restriction, including without limitation the - * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or - * sell copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in - * all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS - * IN THE SOFTWARE. - */ -#ifndef http_parser_h -#define http_parser_h -#ifdef __cplusplus -extern "C" { -#endif - -/* Also update SONAME in the Makefile whenever you change these. */ -#define HTTP_PARSER_VERSION_MAJOR 2 -#define HTTP_PARSER_VERSION_MINOR 9 -#define HTTP_PARSER_VERSION_PATCH 4 - -#include -#if defined(_WIN32) && !defined(__MINGW32__) && \ - (!defined(_MSC_VER) || _MSC_VER<1600) && !defined(__WINE__) -#include -typedef __int8 int8_t; -typedef unsigned __int8 uint8_t; -typedef __int16 int16_t; -typedef unsigned __int16 uint16_t; -typedef __int32 int32_t; -typedef unsigned __int32 uint32_t; -typedef __int64 int64_t; -typedef unsigned __int64 uint64_t; -#else -#include -#endif - -/* Compile with -DHTTP_PARSER_STRICT=0 to make less checks, but run - * faster - */ -#ifndef HTTP_PARSER_STRICT -# define HTTP_PARSER_STRICT 1 -#endif - -/* Maximium header size allowed. If the macro is not defined - * before including this header then the default is used. To - * change the maximum header size, define the macro in the build - * environment (e.g. -DHTTP_MAX_HEADER_SIZE=). To remove - * the effective limit on the size of the header, define the macro - * to a very large number (e.g. -DHTTP_MAX_HEADER_SIZE=0x7fffffff) - */ -#ifndef HTTP_MAX_HEADER_SIZE -# define HTTP_MAX_HEADER_SIZE (80*1024) -#endif - -typedef struct http_parser http_parser; -typedef struct http_parser_settings http_parser_settings; - - -/* Callbacks should return non-zero to indicate an error. The parser will - * then halt execution. - * - * The one exception is on_headers_complete. In a HTTP_RESPONSE parser - * returning '1' from on_headers_complete will tell the parser that it - * should not expect a body. This is used when receiving a response to a - * HEAD request which may contain 'Content-Length' or 'Transfer-Encoding: - * chunked' headers that indicate the presence of a body. - * - * Returning `2` from on_headers_complete will tell parser that it should not - * expect neither a body nor any futher responses on this connection. This is - * useful for handling responses to a CONNECT request which may not contain - * `Upgrade` or `Connection: upgrade` headers. - * - * http_data_cb does not return data chunks. It will be called arbitrarily - * many times for each string. E.G. you might get 10 callbacks for "on_url" - * each providing just a few characters more data. - */ -typedef int (*http_data_cb) (http_parser*, const char *at, size_t length); -typedef int (*http_cb) (http_parser*); - - -/* Status Codes */ -#define HTTP_STATUS_MAP(XX) \ - XX(100, CONTINUE, Continue) \ - XX(101, SWITCHING_PROTOCOLS, Switching Protocols) \ - XX(102, PROCESSING, Processing) \ - XX(200, OK, OK) \ - XX(201, CREATED, Created) \ - XX(202, ACCEPTED, Accepted) \ - XX(203, NON_AUTHORITATIVE_INFORMATION, Non-Authoritative Information) \ - XX(204, NO_CONTENT, No Content) \ - XX(205, RESET_CONTENT, Reset Content) \ - XX(206, PARTIAL_CONTENT, Partial Content) \ - XX(207, MULTI_STATUS, Multi-Status) \ - XX(208, ALREADY_REPORTED, Already Reported) \ - XX(226, IM_USED, IM Used) \ - XX(300, MULTIPLE_CHOICES, Multiple Choices) \ - XX(301, MOVED_PERMANENTLY, Moved Permanently) \ - XX(302, FOUND, Found) \ - XX(303, SEE_OTHER, See Other) \ - XX(304, NOT_MODIFIED, Not Modified) \ - XX(305, USE_PROXY, Use Proxy) \ - XX(307, TEMPORARY_REDIRECT, Temporary Redirect) \ - XX(308, PERMANENT_REDIRECT, Permanent Redirect) \ - XX(400, BAD_REQUEST, Bad Request) \ - XX(401, UNAUTHORIZED, Unauthorized) \ - XX(402, PAYMENT_REQUIRED, Payment Required) \ - XX(403, FORBIDDEN, Forbidden) \ - XX(404, NOT_FOUND, Not Found) \ - XX(405, METHOD_NOT_ALLOWED, Method Not Allowed) \ - XX(406, NOT_ACCEPTABLE, Not Acceptable) \ - XX(407, PROXY_AUTHENTICATION_REQUIRED, Proxy Authentication Required) \ - XX(408, REQUEST_TIMEOUT, Request Timeout) \ - XX(409, CONFLICT, Conflict) \ - XX(410, GONE, Gone) \ - XX(411, LENGTH_REQUIRED, Length Required) \ - XX(412, PRECONDITION_FAILED, Precondition Failed) \ - XX(413, PAYLOAD_TOO_LARGE, Payload Too Large) \ - XX(414, URI_TOO_LONG, URI Too Long) \ - XX(415, UNSUPPORTED_MEDIA_TYPE, Unsupported Media Type) \ - XX(416, RANGE_NOT_SATISFIABLE, Range Not Satisfiable) \ - XX(417, EXPECTATION_FAILED, Expectation Failed) \ - XX(421, MISDIRECTED_REQUEST, Misdirected Request) \ - XX(422, UNPROCESSABLE_ENTITY, Unprocessable Entity) \ - XX(423, LOCKED, Locked) \ - XX(424, FAILED_DEPENDENCY, Failed Dependency) \ - XX(426, UPGRADE_REQUIRED, Upgrade Required) \ - XX(428, PRECONDITION_REQUIRED, Precondition Required) \ - XX(429, TOO_MANY_REQUESTS, Too Many Requests) \ - XX(431, REQUEST_HEADER_FIELDS_TOO_LARGE, Request Header Fields Too Large) \ - XX(451, UNAVAILABLE_FOR_LEGAL_REASONS, Unavailable For Legal Reasons) \ - XX(500, INTERNAL_SERVER_ERROR, Internal Server Error) \ - XX(501, NOT_IMPLEMENTED, Not Implemented) \ - XX(502, BAD_GATEWAY, Bad Gateway) \ - XX(503, SERVICE_UNAVAILABLE, Service Unavailable) \ - XX(504, GATEWAY_TIMEOUT, Gateway Timeout) \ - XX(505, HTTP_VERSION_NOT_SUPPORTED, HTTP Version Not Supported) \ - XX(506, VARIANT_ALSO_NEGOTIATES, Variant Also Negotiates) \ - XX(507, INSUFFICIENT_STORAGE, Insufficient Storage) \ - XX(508, LOOP_DETECTED, Loop Detected) \ - XX(510, NOT_EXTENDED, Not Extended) \ - XX(511, NETWORK_AUTHENTICATION_REQUIRED, Network Authentication Required) \ - -enum http_status - { -#define XX(num, name, string) HTTP_STATUS_##name = num, - HTTP_STATUS_MAP(XX) -#undef XX - }; - - -/* Request Methods */ -#define HTTP_METHOD_MAP(XX) \ - XX(0, DELETE, DELETE) \ - XX(1, GET, GET) \ - XX(2, HEAD, HEAD) \ - XX(3, POST, POST) \ - XX(4, PUT, PUT) \ - /* pathological */ \ - XX(5, CONNECT, CONNECT) \ - XX(6, OPTIONS, OPTIONS) \ - XX(7, TRACE, TRACE) \ - /* WebDAV */ \ - XX(8, COPY, COPY) \ - XX(9, LOCK, LOCK) \ - XX(10, MKCOL, MKCOL) \ - XX(11, MOVE, MOVE) \ - XX(12, PROPFIND, PROPFIND) \ - XX(13, PROPPATCH, PROPPATCH) \ - XX(14, SEARCH, SEARCH) \ - XX(15, UNLOCK, UNLOCK) \ - XX(16, BIND, BIND) \ - XX(17, REBIND, REBIND) \ - XX(18, UNBIND, UNBIND) \ - XX(19, ACL, ACL) \ - /* subversion */ \ - XX(20, REPORT, REPORT) \ - XX(21, MKACTIVITY, MKACTIVITY) \ - XX(22, CHECKOUT, CHECKOUT) \ - XX(23, MERGE, MERGE) \ - /* upnp */ \ - XX(24, MSEARCH, M-SEARCH) \ - XX(25, NOTIFY, NOTIFY) \ - XX(26, SUBSCRIBE, SUBSCRIBE) \ - XX(27, UNSUBSCRIBE, UNSUBSCRIBE) \ - /* RFC-5789 */ \ - XX(28, PATCH, PATCH) \ - XX(29, PURGE, PURGE) \ - /* CalDAV */ \ - XX(30, MKCALENDAR, MKCALENDAR) \ - /* RFC-2068, section 19.6.1.2 */ \ - XX(31, LINK, LINK) \ - XX(32, UNLINK, UNLINK) \ - /* icecast */ \ - XX(33, SOURCE, SOURCE) \ - -enum http_method - { -#define XX(num, name, string) HTTP_##name = num, - HTTP_METHOD_MAP(XX) -#undef XX - }; - - -enum http_parser_type { HTTP_REQUEST, HTTP_RESPONSE, HTTP_BOTH }; - - -/* Flag values for http_parser.flags field */ -enum flags - { F_CHUNKED = 1 << 0 - , F_CONNECTION_KEEP_ALIVE = 1 << 1 - , F_CONNECTION_CLOSE = 1 << 2 - , F_CONNECTION_UPGRADE = 1 << 3 - , F_TRAILING = 1 << 4 - , F_UPGRADE = 1 << 5 - , F_SKIPBODY = 1 << 6 - , F_CONTENTLENGTH = 1 << 7 - , F_TRANSFER_ENCODING = 1 << 8 /* Never set in http_parser.flags */ - }; - - -/* Map for errno-related constants - * - * The provided argument should be a macro that takes 2 arguments. - */ -#define HTTP_ERRNO_MAP(XX) \ - /* No error */ \ - XX(OK, "success") \ - \ - /* Callback-related errors */ \ - XX(CB_message_begin, "the on_message_begin callback failed") \ - XX(CB_url, "the on_url callback failed") \ - XX(CB_header_field, "the on_header_field callback failed") \ - XX(CB_header_value, "the on_header_value callback failed") \ - XX(CB_headers_complete, "the on_headers_complete callback failed") \ - XX(CB_body, "the on_body callback failed") \ - XX(CB_message_complete, "the on_message_complete callback failed") \ - XX(CB_status, "the on_status callback failed") \ - XX(CB_chunk_header, "the on_chunk_header callback failed") \ - XX(CB_chunk_complete, "the on_chunk_complete callback failed") \ - \ - /* Parsing-related errors */ \ - XX(INVALID_EOF_STATE, "stream ended at an unexpected time") \ - XX(HEADER_OVERFLOW, \ - "too many header bytes seen; overflow detected") \ - XX(CLOSED_CONNECTION, \ - "data received after completed connection: close message") \ - XX(INVALID_VERSION, "invalid HTTP version") \ - XX(INVALID_STATUS, "invalid HTTP status code") \ - XX(INVALID_METHOD, "invalid HTTP method") \ - XX(INVALID_URL, "invalid URL") \ - XX(INVALID_HOST, "invalid host") \ - XX(INVALID_PORT, "invalid port") \ - XX(INVALID_PATH, "invalid path") \ - XX(INVALID_QUERY_STRING, "invalid query string") \ - XX(INVALID_FRAGMENT, "invalid fragment") \ - XX(LF_EXPECTED, "LF character expected") \ - XX(INVALID_HEADER_TOKEN, "invalid character in header") \ - XX(INVALID_CONTENT_LENGTH, \ - "invalid character in content-length header") \ - XX(UNEXPECTED_CONTENT_LENGTH, \ - "unexpected content-length header") \ - XX(INVALID_CHUNK_SIZE, \ - "invalid character in chunk size header") \ - XX(INVALID_CONSTANT, "invalid constant string") \ - XX(INVALID_INTERNAL_STATE, "encountered unexpected internal state")\ - XX(STRICT, "strict mode assertion failed") \ - XX(PAUSED, "parser is paused") \ - XX(UNKNOWN, "an unknown error occurred") \ - XX(INVALID_TRANSFER_ENCODING, \ - "request has invalid transfer-encoding") \ - - -/* Define HPE_* values for each errno value above */ -#define HTTP_ERRNO_GEN(n, s) HPE_##n, -enum http_errno { - HTTP_ERRNO_MAP(HTTP_ERRNO_GEN) -}; -#undef HTTP_ERRNO_GEN - - -/* Get an http_errno value from an http_parser */ -#define HTTP_PARSER_ERRNO(p) ((enum http_errno) (p)->http_errno) - - -struct http_parser { - /** PRIVATE **/ - unsigned int type : 2; /* enum http_parser_type */ - unsigned int flags : 8; /* F_* values from 'flags' enum; semi-public */ - unsigned int state : 7; /* enum state from http_parser.c */ - unsigned int header_state : 7; /* enum header_state from http_parser.c */ - unsigned int index : 5; /* index into current matcher */ - unsigned int extra_flags : 2; - unsigned int lenient_http_headers : 1; - - uint32_t nread; /* # bytes read in various scenarios */ - uint64_t content_length; /* # bytes in body (0 if no Content-Length header) */ - - /** READ-ONLY **/ - unsigned short http_major; - unsigned short http_minor; - unsigned int status_code : 16; /* responses only */ - unsigned int method : 8; /* requests only */ - unsigned int http_errno : 7; - - /* 1 = Upgrade header was present and the parser has exited because of that. - * 0 = No upgrade header present. - * Should be checked when http_parser_execute() returns in addition to - * error checking. - */ - unsigned int upgrade : 1; - - /** PUBLIC **/ - void *data; /* A pointer to get hook to the "connection" or "socket" object */ -}; - - -struct http_parser_settings { - http_cb on_message_begin; - http_data_cb on_url; - http_data_cb on_status; - http_data_cb on_header_field; - http_data_cb on_header_value; - http_cb on_headers_complete; - http_data_cb on_body; - http_cb on_message_complete; - /* When on_chunk_header is called, the current chunk length is stored - * in parser->content_length. - */ - http_cb on_chunk_header; - http_cb on_chunk_complete; -}; - - -enum http_parser_url_fields - { UF_SCHEMA = 0 - , UF_HOST = 1 - , UF_PORT = 2 - , UF_PATH = 3 - , UF_QUERY = 4 - , UF_FRAGMENT = 5 - , UF_USERINFO = 6 - , UF_MAX = 7 - }; - - -/* Result structure for http_parser_parse_url(). - * - * Callers should index into field_data[] with UF_* values iff field_set - * has the relevant (1 << UF_*) bit set. As a courtesy to clients (and - * because we probably have padding left over), we convert any port to - * a uint16_t. - */ -struct http_parser_url { - uint16_t field_set; /* Bitmask of (1 << UF_*) values */ - uint16_t port; /* Converted UF_PORT string */ - - struct { - uint16_t off; /* Offset into buffer in which field starts */ - uint16_t len; /* Length of run in buffer */ - } field_data[UF_MAX]; -}; - - -/* Returns the library version. Bits 16-23 contain the major version number, - * bits 8-15 the minor version number and bits 0-7 the patch level. - * Usage example: - * - * unsigned long version = http_parser_version(); - * unsigned major = (version >> 16) & 255; - * unsigned minor = (version >> 8) & 255; - * unsigned patch = version & 255; - * printf("http_parser v%u.%u.%u\n", major, minor, patch); - */ -unsigned long http_parser_version(void); - -void http_parser_init(http_parser *parser, enum http_parser_type type); - - -/* Initialize http_parser_settings members to 0 - */ -void http_parser_settings_init(http_parser_settings *settings); - - -/* Executes the parser. Returns number of parsed bytes. Sets - * `parser->http_errno` on error. */ -size_t http_parser_execute(http_parser *parser, - const http_parser_settings *settings, - const char *data, - size_t len); - - -/* If http_should_keep_alive() in the on_headers_complete or - * on_message_complete callback returns 0, then this should be - * the last message on the connection. - * If you are the server, respond with the "Connection: close" header. - * If you are the client, close the connection. - */ -int http_should_keep_alive(const http_parser *parser); - -/* Returns a string version of the HTTP method. */ -const char *http_method_str(enum http_method m); - -/* Returns a string version of the HTTP status code. */ -const char *http_status_str(enum http_status s); - -/* Return a string name of the given error */ -const char *http_errno_name(enum http_errno err); - -/* Return a string description of the given error */ -const char *http_errno_description(enum http_errno err); - -/* Initialize all http_parser_url members to 0 */ -void http_parser_url_init(struct http_parser_url *u); - -/* Parse a URL; return nonzero on failure */ -int http_parser_parse_url(const char *buf, size_t buflen, - int is_connect, - struct http_parser_url *u); - -/* Pause or un-pause the parser; a nonzero value pauses */ -void http_parser_pause(http_parser *parser, int paused); - -/* Checks if this is the final chunk of the body. */ -int http_body_is_final(const http_parser *parser); - -/* Change the maximum header size provided at compile time. */ -void http_parser_set_max_header_size(uint32_t size); - -#ifdef __cplusplus -} -#endif -#endif diff --git a/third_party/python/aiohttp/vendor/http-parser/test.c b/third_party/python/aiohttp/vendor/http-parser/test.c deleted file mode 100644 index 798342451efd..000000000000 --- a/third_party/python/aiohttp/vendor/http-parser/test.c +++ /dev/null @@ -1,4600 +0,0 @@ -/* Copyright Joyent, Inc. and other Node contributors. All rights reserved. - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to - * deal in the Software without restriction, including without limitation the - * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or - * sell copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in - * all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS - * IN THE SOFTWARE. - */ -#include "http_parser.h" -#include -#include -#include -#include /* rand */ -#include -#include - -#if defined(__APPLE__) -# undef strlncpy -#endif /* defined(__APPLE__) */ - -#undef TRUE -#define TRUE 1 -#undef FALSE -#define FALSE 0 - -#define MAX_HEADERS 13 -#define MAX_ELEMENT_SIZE 2048 -#define MAX_CHUNKS 16 - -#define MIN(a,b) ((a) < (b) ? (a) : (b)) - -#define ARRAY_SIZE(x) (sizeof(x) / sizeof(*x)) - -static http_parser parser; - -struct message { - const char *name; // for debugging purposes - const char *raw; - enum http_parser_type type; - enum http_method method; - int status_code; - char response_status[MAX_ELEMENT_SIZE]; - char request_path[MAX_ELEMENT_SIZE]; - char request_url[MAX_ELEMENT_SIZE]; - char fragment[MAX_ELEMENT_SIZE]; - char query_string[MAX_ELEMENT_SIZE]; - char body[MAX_ELEMENT_SIZE]; - size_t body_size; - const char *host; - const char *userinfo; - uint16_t port; - int num_headers; - enum { NONE=0, FIELD, VALUE } last_header_element; - char headers [MAX_HEADERS][2][MAX_ELEMENT_SIZE]; - int should_keep_alive; - - int num_chunks; - int num_chunks_complete; - int chunk_lengths[MAX_CHUNKS]; - - const char *upgrade; // upgraded body - - unsigned short http_major; - unsigned short http_minor; - - int message_begin_cb_called; - int headers_complete_cb_called; - int message_complete_cb_called; - int status_cb_called; - int message_complete_on_eof; - int body_is_final; -}; - -static int currently_parsing_eof; - -static struct message messages[5]; -static int num_messages; -static http_parser_settings *current_pause_parser; - -/* * R E Q U E S T S * */ -const struct message requests[] = -#define CURL_GET 0 -{ {.name= "curl get" - ,.type= HTTP_REQUEST - ,.raw= "GET /test HTTP/1.1\r\n" - "User-Agent: curl/7.18.0 (i486-pc-linux-gnu) libcurl/7.18.0 OpenSSL/0.9.8g zlib/1.2.3.3 libidn/1.1\r\n" - "Host: 0.0.0.0=5000\r\n" - "Accept: */*\r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_GET - ,.query_string= "" - ,.fragment= "" - ,.request_path= "/test" - ,.request_url= "/test" - ,.num_headers= 3 - ,.headers= - { { "User-Agent", "curl/7.18.0 (i486-pc-linux-gnu) libcurl/7.18.0 OpenSSL/0.9.8g zlib/1.2.3.3 libidn/1.1" } - , { "Host", "0.0.0.0=5000" } - , { "Accept", "*/*" } - } - ,.body= "" - } - -#define FIREFOX_GET 1 -, {.name= "firefox get" - ,.type= HTTP_REQUEST - ,.raw= "GET /favicon.ico HTTP/1.1\r\n" - "Host: 0.0.0.0=5000\r\n" - "User-Agent: Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9) Gecko/2008061015 Firefox/3.0\r\n" - "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8\r\n" - "Accept-Language: en-us,en;q=0.5\r\n" - "Accept-Encoding: gzip,deflate\r\n" - "Accept-Charset: ISO-8859-1,utf-8;q=0.7,*;q=0.7\r\n" - "Keep-Alive: 300\r\n" - "Connection: keep-alive\r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_GET - ,.query_string= "" - ,.fragment= "" - ,.request_path= "/favicon.ico" - ,.request_url= "/favicon.ico" - ,.num_headers= 8 - ,.headers= - { { "Host", "0.0.0.0=5000" } - , { "User-Agent", "Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9) Gecko/2008061015 Firefox/3.0" } - , { "Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" } - , { "Accept-Language", "en-us,en;q=0.5" } - , { "Accept-Encoding", "gzip,deflate" } - , { "Accept-Charset", "ISO-8859-1,utf-8;q=0.7,*;q=0.7" } - , { "Keep-Alive", "300" } - , { "Connection", "keep-alive" } - } - ,.body= "" - } - -#define DUMBLUCK 2 -, {.name= "dumbluck" - ,.type= HTTP_REQUEST - ,.raw= "GET /dumbluck HTTP/1.1\r\n" - "aaaaaaaaaaaaa:++++++++++\r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_GET - ,.query_string= "" - ,.fragment= "" - ,.request_path= "/dumbluck" - ,.request_url= "/dumbluck" - ,.num_headers= 1 - ,.headers= - { { "aaaaaaaaaaaaa", "++++++++++" } - } - ,.body= "" - } - -#define FRAGMENT_IN_URI 3 -, {.name= "fragment in url" - ,.type= HTTP_REQUEST - ,.raw= "GET /forums/1/topics/2375?page=1#posts-17408 HTTP/1.1\r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_GET - ,.query_string= "page=1" - ,.fragment= "posts-17408" - ,.request_path= "/forums/1/topics/2375" - /* XXX request url does include fragment? */ - ,.request_url= "/forums/1/topics/2375?page=1#posts-17408" - ,.num_headers= 0 - ,.body= "" - } - -#define GET_NO_HEADERS_NO_BODY 4 -, {.name= "get no headers no body" - ,.type= HTTP_REQUEST - ,.raw= "GET /get_no_headers_no_body/world HTTP/1.1\r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE /* would need Connection: close */ - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_GET - ,.query_string= "" - ,.fragment= "" - ,.request_path= "/get_no_headers_no_body/world" - ,.request_url= "/get_no_headers_no_body/world" - ,.num_headers= 0 - ,.body= "" - } - -#define GET_ONE_HEADER_NO_BODY 5 -, {.name= "get one header no body" - ,.type= HTTP_REQUEST - ,.raw= "GET /get_one_header_no_body HTTP/1.1\r\n" - "Accept: */*\r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE /* would need Connection: close */ - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_GET - ,.query_string= "" - ,.fragment= "" - ,.request_path= "/get_one_header_no_body" - ,.request_url= "/get_one_header_no_body" - ,.num_headers= 1 - ,.headers= - { { "Accept" , "*/*" } - } - ,.body= "" - } - -#define GET_FUNKY_CONTENT_LENGTH 6 -, {.name= "get funky content length body hello" - ,.type= HTTP_REQUEST - ,.raw= "GET /get_funky_content_length_body_hello HTTP/1.0\r\n" - "conTENT-Length: 5\r\n" - "\r\n" - "HELLO" - ,.should_keep_alive= FALSE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 0 - ,.method= HTTP_GET - ,.query_string= "" - ,.fragment= "" - ,.request_path= "/get_funky_content_length_body_hello" - ,.request_url= "/get_funky_content_length_body_hello" - ,.num_headers= 1 - ,.headers= - { { "conTENT-Length" , "5" } - } - ,.body= "HELLO" - } - -#define POST_IDENTITY_BODY_WORLD 7 -, {.name= "post identity body world" - ,.type= HTTP_REQUEST - ,.raw= "POST /post_identity_body_world?q=search#hey HTTP/1.1\r\n" - "Accept: */*\r\n" - "Content-Length: 5\r\n" - "\r\n" - "World" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_POST - ,.query_string= "q=search" - ,.fragment= "hey" - ,.request_path= "/post_identity_body_world" - ,.request_url= "/post_identity_body_world?q=search#hey" - ,.num_headers= 2 - ,.headers= - { { "Accept", "*/*" } - , { "Content-Length", "5" } - } - ,.body= "World" - } - -#define POST_CHUNKED_ALL_YOUR_BASE 8 -, {.name= "post - chunked body: all your base are belong to us" - ,.type= HTTP_REQUEST - ,.raw= "POST /post_chunked_all_your_base HTTP/1.1\r\n" - "Transfer-Encoding: chunked\r\n" - "\r\n" - "1e\r\nall your base are belong to us\r\n" - "0\r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_POST - ,.query_string= "" - ,.fragment= "" - ,.request_path= "/post_chunked_all_your_base" - ,.request_url= "/post_chunked_all_your_base" - ,.num_headers= 1 - ,.headers= - { { "Transfer-Encoding" , "chunked" } - } - ,.body= "all your base are belong to us" - ,.num_chunks_complete= 2 - ,.chunk_lengths= { 0x1e } - } - -#define TWO_CHUNKS_MULT_ZERO_END 9 -, {.name= "two chunks ; triple zero ending" - ,.type= HTTP_REQUEST - ,.raw= "POST /two_chunks_mult_zero_end HTTP/1.1\r\n" - "Transfer-Encoding: chunked\r\n" - "\r\n" - "5\r\nhello\r\n" - "6\r\n world\r\n" - "000\r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_POST - ,.query_string= "" - ,.fragment= "" - ,.request_path= "/two_chunks_mult_zero_end" - ,.request_url= "/two_chunks_mult_zero_end" - ,.num_headers= 1 - ,.headers= - { { "Transfer-Encoding", "chunked" } - } - ,.body= "hello world" - ,.num_chunks_complete= 3 - ,.chunk_lengths= { 5, 6 } - } - -#define CHUNKED_W_TRAILING_HEADERS 10 -, {.name= "chunked with trailing headers. blech." - ,.type= HTTP_REQUEST - ,.raw= "POST /chunked_w_trailing_headers HTTP/1.1\r\n" - "Transfer-Encoding: chunked\r\n" - "\r\n" - "5\r\nhello\r\n" - "6\r\n world\r\n" - "0\r\n" - "Vary: *\r\n" - "Content-Type: text/plain\r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_POST - ,.query_string= "" - ,.fragment= "" - ,.request_path= "/chunked_w_trailing_headers" - ,.request_url= "/chunked_w_trailing_headers" - ,.num_headers= 3 - ,.headers= - { { "Transfer-Encoding", "chunked" } - , { "Vary", "*" } - , { "Content-Type", "text/plain" } - } - ,.body= "hello world" - ,.num_chunks_complete= 3 - ,.chunk_lengths= { 5, 6 } - } - -#define CHUNKED_W_NONSENSE_AFTER_LENGTH 11 -, {.name= "with nonsense after the length" - ,.type= HTTP_REQUEST - ,.raw= "POST /chunked_w_nonsense_after_length HTTP/1.1\r\n" - "Transfer-Encoding: chunked\r\n" - "\r\n" - "5; ilovew3;whattheluck=aretheseparametersfor\r\nhello\r\n" - "6; blahblah; blah\r\n world\r\n" - "0\r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_POST - ,.query_string= "" - ,.fragment= "" - ,.request_path= "/chunked_w_nonsense_after_length" - ,.request_url= "/chunked_w_nonsense_after_length" - ,.num_headers= 1 - ,.headers= - { { "Transfer-Encoding", "chunked" } - } - ,.body= "hello world" - ,.num_chunks_complete= 3 - ,.chunk_lengths= { 5, 6 } - } - -#define WITH_QUOTES 12 -, {.name= "with quotes" - ,.type= HTTP_REQUEST - ,.raw= "GET /with_\"stupid\"_quotes?foo=\"bar\" HTTP/1.1\r\n\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_GET - ,.query_string= "foo=\"bar\"" - ,.fragment= "" - ,.request_path= "/with_\"stupid\"_quotes" - ,.request_url= "/with_\"stupid\"_quotes?foo=\"bar\"" - ,.num_headers= 0 - ,.headers= { } - ,.body= "" - } - -#define APACHEBENCH_GET 13 -/* The server receiving this request SHOULD NOT wait for EOF - * to know that content-length == 0. - * How to represent this in a unit test? message_complete_on_eof - * Compare with NO_CONTENT_LENGTH_RESPONSE. - */ -, {.name = "apachebench get" - ,.type= HTTP_REQUEST - ,.raw= "GET /test HTTP/1.0\r\n" - "Host: 0.0.0.0:5000\r\n" - "User-Agent: ApacheBench/2.3\r\n" - "Accept: */*\r\n\r\n" - ,.should_keep_alive= FALSE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 0 - ,.method= HTTP_GET - ,.query_string= "" - ,.fragment= "" - ,.request_path= "/test" - ,.request_url= "/test" - ,.num_headers= 3 - ,.headers= { { "Host", "0.0.0.0:5000" } - , { "User-Agent", "ApacheBench/2.3" } - , { "Accept", "*/*" } - } - ,.body= "" - } - -#define QUERY_URL_WITH_QUESTION_MARK_GET 14 -/* Some clients include '?' characters in query strings. - */ -, {.name = "query url with question mark" - ,.type= HTTP_REQUEST - ,.raw= "GET /test.cgi?foo=bar?baz HTTP/1.1\r\n\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_GET - ,.query_string= "foo=bar?baz" - ,.fragment= "" - ,.request_path= "/test.cgi" - ,.request_url= "/test.cgi?foo=bar?baz" - ,.num_headers= 0 - ,.headers= {} - ,.body= "" - } - -#define PREFIX_NEWLINE_GET 15 -/* Some clients, especially after a POST in a keep-alive connection, - * will send an extra CRLF before the next request - */ -, {.name = "newline prefix get" - ,.type= HTTP_REQUEST - ,.raw= "\r\nGET /test HTTP/1.1\r\n\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_GET - ,.query_string= "" - ,.fragment= "" - ,.request_path= "/test" - ,.request_url= "/test" - ,.num_headers= 0 - ,.headers= { } - ,.body= "" - } - -#define UPGRADE_REQUEST 16 -, {.name = "upgrade request" - ,.type= HTTP_REQUEST - ,.raw= "GET /demo HTTP/1.1\r\n" - "Host: example.com\r\n" - "Connection: Upgrade\r\n" - "Sec-WebSocket-Key2: 12998 5 Y3 1 .P00\r\n" - "Sec-WebSocket-Protocol: sample\r\n" - "Upgrade: WebSocket\r\n" - "Sec-WebSocket-Key1: 4 @1 46546xW%0l 1 5\r\n" - "Origin: http://example.com\r\n" - "\r\n" - "Hot diggity dogg" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_GET - ,.query_string= "" - ,.fragment= "" - ,.request_path= "/demo" - ,.request_url= "/demo" - ,.num_headers= 7 - ,.upgrade="Hot diggity dogg" - ,.headers= { { "Host", "example.com" } - , { "Connection", "Upgrade" } - , { "Sec-WebSocket-Key2", "12998 5 Y3 1 .P00" } - , { "Sec-WebSocket-Protocol", "sample" } - , { "Upgrade", "WebSocket" } - , { "Sec-WebSocket-Key1", "4 @1 46546xW%0l 1 5" } - , { "Origin", "http://example.com" } - } - ,.body= "" - } - -#define CONNECT_REQUEST 17 -, {.name = "connect request" - ,.type= HTTP_REQUEST - ,.raw= "CONNECT 0-home0.netscape.com:443 HTTP/1.0\r\n" - "User-agent: Mozilla/1.1N\r\n" - "Proxy-authorization: basic aGVsbG86d29ybGQ=\r\n" - "\r\n" - "some data\r\n" - "and yet even more data" - ,.should_keep_alive= FALSE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 0 - ,.method= HTTP_CONNECT - ,.query_string= "" - ,.fragment= "" - ,.request_path= "" - ,.request_url= "0-home0.netscape.com:443" - ,.num_headers= 2 - ,.upgrade="some data\r\nand yet even more data" - ,.headers= { { "User-agent", "Mozilla/1.1N" } - , { "Proxy-authorization", "basic aGVsbG86d29ybGQ=" } - } - ,.body= "" - } - -#define REPORT_REQ 18 -, {.name= "report request" - ,.type= HTTP_REQUEST - ,.raw= "REPORT /test HTTP/1.1\r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_REPORT - ,.query_string= "" - ,.fragment= "" - ,.request_path= "/test" - ,.request_url= "/test" - ,.num_headers= 0 - ,.headers= {} - ,.body= "" - } - -#define NO_HTTP_VERSION 19 -, {.name= "request with no http version" - ,.type= HTTP_REQUEST - ,.raw= "GET /\r\n" - "\r\n" - ,.should_keep_alive= FALSE - ,.message_complete_on_eof= FALSE - ,.http_major= 0 - ,.http_minor= 9 - ,.method= HTTP_GET - ,.query_string= "" - ,.fragment= "" - ,.request_path= "/" - ,.request_url= "/" - ,.num_headers= 0 - ,.headers= {} - ,.body= "" - } - -#define MSEARCH_REQ 20 -, {.name= "m-search request" - ,.type= HTTP_REQUEST - ,.raw= "M-SEARCH * HTTP/1.1\r\n" - "HOST: 239.255.255.250:1900\r\n" - "MAN: \"ssdp:discover\"\r\n" - "ST: \"ssdp:all\"\r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_MSEARCH - ,.query_string= "" - ,.fragment= "" - ,.request_path= "*" - ,.request_url= "*" - ,.num_headers= 3 - ,.headers= { { "HOST", "239.255.255.250:1900" } - , { "MAN", "\"ssdp:discover\"" } - , { "ST", "\"ssdp:all\"" } - } - ,.body= "" - } - -#define LINE_FOLDING_IN_HEADER 21 -, {.name= "line folding in header value" - ,.type= HTTP_REQUEST - ,.raw= "GET / HTTP/1.1\r\n" - "Line1: abc\r\n" - "\tdef\r\n" - " ghi\r\n" - "\t\tjkl\r\n" - " mno \r\n" - "\t \tqrs\r\n" - "Line2: \t line2\t\r\n" - "Line3:\r\n" - " line3\r\n" - "Line4: \r\n" - " \r\n" - "Connection:\r\n" - " close\r\n" - "\r\n" - ,.should_keep_alive= FALSE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_GET - ,.query_string= "" - ,.fragment= "" - ,.request_path= "/" - ,.request_url= "/" - ,.num_headers= 5 - ,.headers= { { "Line1", "abc\tdef ghi\t\tjkl mno \t \tqrs" } - , { "Line2", "line2\t" } - , { "Line3", "line3" } - , { "Line4", "" } - , { "Connection", "close" }, - } - ,.body= "" - } - - -#define QUERY_TERMINATED_HOST 22 -, {.name= "host terminated by a query string" - ,.type= HTTP_REQUEST - ,.raw= "GET http://hypnotoad.org?hail=all HTTP/1.1\r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_GET - ,.query_string= "hail=all" - ,.fragment= "" - ,.request_path= "" - ,.request_url= "http://hypnotoad.org?hail=all" - ,.host= "hypnotoad.org" - ,.num_headers= 0 - ,.headers= { } - ,.body= "" - } - -#define QUERY_TERMINATED_HOSTPORT 23 -, {.name= "host:port terminated by a query string" - ,.type= HTTP_REQUEST - ,.raw= "GET http://hypnotoad.org:1234?hail=all HTTP/1.1\r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_GET - ,.query_string= "hail=all" - ,.fragment= "" - ,.request_path= "" - ,.request_url= "http://hypnotoad.org:1234?hail=all" - ,.host= "hypnotoad.org" - ,.port= 1234 - ,.num_headers= 0 - ,.headers= { } - ,.body= "" - } - -#define SPACE_TERMINATED_HOSTPORT 24 -, {.name= "host:port terminated by a space" - ,.type= HTTP_REQUEST - ,.raw= "GET http://hypnotoad.org:1234 HTTP/1.1\r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_GET - ,.query_string= "" - ,.fragment= "" - ,.request_path= "" - ,.request_url= "http://hypnotoad.org:1234" - ,.host= "hypnotoad.org" - ,.port= 1234 - ,.num_headers= 0 - ,.headers= { } - ,.body= "" - } - -#define PATCH_REQ 25 -, {.name = "PATCH request" - ,.type= HTTP_REQUEST - ,.raw= "PATCH /file.txt HTTP/1.1\r\n" - "Host: www.example.com\r\n" - "Content-Type: application/example\r\n" - "If-Match: \"e0023aa4e\"\r\n" - "Content-Length: 10\r\n" - "\r\n" - "cccccccccc" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_PATCH - ,.query_string= "" - ,.fragment= "" - ,.request_path= "/file.txt" - ,.request_url= "/file.txt" - ,.num_headers= 4 - ,.headers= { { "Host", "www.example.com" } - , { "Content-Type", "application/example" } - , { "If-Match", "\"e0023aa4e\"" } - , { "Content-Length", "10" } - } - ,.body= "cccccccccc" - } - -#define CONNECT_CAPS_REQUEST 26 -, {.name = "connect caps request" - ,.type= HTTP_REQUEST - ,.raw= "CONNECT HOME0.NETSCAPE.COM:443 HTTP/1.0\r\n" - "User-agent: Mozilla/1.1N\r\n" - "Proxy-authorization: basic aGVsbG86d29ybGQ=\r\n" - "\r\n" - ,.should_keep_alive= FALSE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 0 - ,.method= HTTP_CONNECT - ,.query_string= "" - ,.fragment= "" - ,.request_path= "" - ,.request_url= "HOME0.NETSCAPE.COM:443" - ,.num_headers= 2 - ,.upgrade="" - ,.headers= { { "User-agent", "Mozilla/1.1N" } - , { "Proxy-authorization", "basic aGVsbG86d29ybGQ=" } - } - ,.body= "" - } - -#if !HTTP_PARSER_STRICT -#define UTF8_PATH_REQ 27 -, {.name= "utf-8 path request" - ,.type= HTTP_REQUEST - ,.raw= "GET /δ¶/δt/pope?q=1#narf HTTP/1.1\r\n" - "Host: github.com\r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_GET - ,.query_string= "q=1" - ,.fragment= "narf" - ,.request_path= "/δ¶/δt/pope" - ,.request_url= "/δ¶/δt/pope?q=1#narf" - ,.num_headers= 1 - ,.headers= { {"Host", "github.com" } - } - ,.body= "" - } - -#define HOSTNAME_UNDERSCORE 28 -, {.name = "hostname underscore" - ,.type= HTTP_REQUEST - ,.raw= "CONNECT home_0.netscape.com:443 HTTP/1.0\r\n" - "User-agent: Mozilla/1.1N\r\n" - "Proxy-authorization: basic aGVsbG86d29ybGQ=\r\n" - "\r\n" - ,.should_keep_alive= FALSE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 0 - ,.method= HTTP_CONNECT - ,.query_string= "" - ,.fragment= "" - ,.request_path= "" - ,.request_url= "home_0.netscape.com:443" - ,.num_headers= 2 - ,.upgrade="" - ,.headers= { { "User-agent", "Mozilla/1.1N" } - , { "Proxy-authorization", "basic aGVsbG86d29ybGQ=" } - } - ,.body= "" - } -#endif /* !HTTP_PARSER_STRICT */ - -/* see https://github.com/ry/http-parser/issues/47 */ -#define EAT_TRAILING_CRLF_NO_CONNECTION_CLOSE 29 -, {.name = "eat CRLF between requests, no \"Connection: close\" header" - ,.raw= "POST / HTTP/1.1\r\n" - "Host: www.example.com\r\n" - "Content-Type: application/x-www-form-urlencoded\r\n" - "Content-Length: 4\r\n" - "\r\n" - "q=42\r\n" /* note the trailing CRLF */ - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_POST - ,.query_string= "" - ,.fragment= "" - ,.request_path= "/" - ,.request_url= "/" - ,.num_headers= 3 - ,.upgrade= 0 - ,.headers= { { "Host", "www.example.com" } - , { "Content-Type", "application/x-www-form-urlencoded" } - , { "Content-Length", "4" } - } - ,.body= "q=42" - } - -/* see https://github.com/ry/http-parser/issues/47 */ -#define EAT_TRAILING_CRLF_WITH_CONNECTION_CLOSE 30 -, {.name = "eat CRLF between requests even if \"Connection: close\" is set" - ,.raw= "POST / HTTP/1.1\r\n" - "Host: www.example.com\r\n" - "Content-Type: application/x-www-form-urlencoded\r\n" - "Content-Length: 4\r\n" - "Connection: close\r\n" - "\r\n" - "q=42\r\n" /* note the trailing CRLF */ - ,.should_keep_alive= FALSE - ,.message_complete_on_eof= FALSE /* input buffer isn't empty when on_message_complete is called */ - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_POST - ,.query_string= "" - ,.fragment= "" - ,.request_path= "/" - ,.request_url= "/" - ,.num_headers= 4 - ,.upgrade= 0 - ,.headers= { { "Host", "www.example.com" } - , { "Content-Type", "application/x-www-form-urlencoded" } - , { "Content-Length", "4" } - , { "Connection", "close" } - } - ,.body= "q=42" - } - -#define PURGE_REQ 31 -, {.name = "PURGE request" - ,.type= HTTP_REQUEST - ,.raw= "PURGE /file.txt HTTP/1.1\r\n" - "Host: www.example.com\r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_PURGE - ,.query_string= "" - ,.fragment= "" - ,.request_path= "/file.txt" - ,.request_url= "/file.txt" - ,.num_headers= 1 - ,.headers= { { "Host", "www.example.com" } } - ,.body= "" - } - -#define SEARCH_REQ 32 -, {.name = "SEARCH request" - ,.type= HTTP_REQUEST - ,.raw= "SEARCH / HTTP/1.1\r\n" - "Host: www.example.com\r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_SEARCH - ,.query_string= "" - ,.fragment= "" - ,.request_path= "/" - ,.request_url= "/" - ,.num_headers= 1 - ,.headers= { { "Host", "www.example.com" } } - ,.body= "" - } - -#define PROXY_WITH_BASIC_AUTH 33 -, {.name= "host:port and basic_auth" - ,.type= HTTP_REQUEST - ,.raw= "GET http://a%12:b!&*$@hypnotoad.org:1234/toto HTTP/1.1\r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_GET - ,.fragment= "" - ,.request_path= "/toto" - ,.request_url= "http://a%12:b!&*$@hypnotoad.org:1234/toto" - ,.host= "hypnotoad.org" - ,.userinfo= "a%12:b!&*$" - ,.port= 1234 - ,.num_headers= 0 - ,.headers= { } - ,.body= "" - } - -#define LINE_FOLDING_IN_HEADER_WITH_LF 34 -, {.name= "line folding in header value" - ,.type= HTTP_REQUEST - ,.raw= "GET / HTTP/1.1\n" - "Line1: abc\n" - "\tdef\n" - " ghi\n" - "\t\tjkl\n" - " mno \n" - "\t \tqrs\n" - "Line2: \t line2\t\n" - "Line3:\n" - " line3\n" - "Line4: \n" - " \n" - "Connection:\n" - " close\n" - "\n" - ,.should_keep_alive= FALSE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_GET - ,.query_string= "" - ,.fragment= "" - ,.request_path= "/" - ,.request_url= "/" - ,.num_headers= 5 - ,.headers= { { "Line1", "abc\tdef ghi\t\tjkl mno \t \tqrs" } - , { "Line2", "line2\t" } - , { "Line3", "line3" } - , { "Line4", "" } - , { "Connection", "close" }, - } - ,.body= "" - } - -#define CONNECTION_MULTI 35 -, {.name = "multiple connection header values with folding" - ,.type= HTTP_REQUEST - ,.raw= "GET /demo HTTP/1.1\r\n" - "Host: example.com\r\n" - "Connection: Something,\r\n" - " Upgrade, ,Keep-Alive\r\n" - "Sec-WebSocket-Key2: 12998 5 Y3 1 .P00\r\n" - "Sec-WebSocket-Protocol: sample\r\n" - "Upgrade: WebSocket\r\n" - "Sec-WebSocket-Key1: 4 @1 46546xW%0l 1 5\r\n" - "Origin: http://example.com\r\n" - "\r\n" - "Hot diggity dogg" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_GET - ,.query_string= "" - ,.fragment= "" - ,.request_path= "/demo" - ,.request_url= "/demo" - ,.num_headers= 7 - ,.upgrade="Hot diggity dogg" - ,.headers= { { "Host", "example.com" } - , { "Connection", "Something, Upgrade, ,Keep-Alive" } - , { "Sec-WebSocket-Key2", "12998 5 Y3 1 .P00" } - , { "Sec-WebSocket-Protocol", "sample" } - , { "Upgrade", "WebSocket" } - , { "Sec-WebSocket-Key1", "4 @1 46546xW%0l 1 5" } - , { "Origin", "http://example.com" } - } - ,.body= "" - } - -#define CONNECTION_MULTI_LWS 36 -, {.name = "multiple connection header values with folding and lws" - ,.type= HTTP_REQUEST - ,.raw= "GET /demo HTTP/1.1\r\n" - "Connection: keep-alive, upgrade\r\n" - "Upgrade: WebSocket\r\n" - "\r\n" - "Hot diggity dogg" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_GET - ,.query_string= "" - ,.fragment= "" - ,.request_path= "/demo" - ,.request_url= "/demo" - ,.num_headers= 2 - ,.upgrade="Hot diggity dogg" - ,.headers= { { "Connection", "keep-alive, upgrade" } - , { "Upgrade", "WebSocket" } - } - ,.body= "" - } - -#define CONNECTION_MULTI_LWS_CRLF 37 -, {.name = "multiple connection header values with folding and lws" - ,.type= HTTP_REQUEST - ,.raw= "GET /demo HTTP/1.1\r\n" - "Connection: keep-alive, \r\n upgrade\r\n" - "Upgrade: WebSocket\r\n" - "\r\n" - "Hot diggity dogg" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_GET - ,.query_string= "" - ,.fragment= "" - ,.request_path= "/demo" - ,.request_url= "/demo" - ,.num_headers= 2 - ,.upgrade="Hot diggity dogg" - ,.headers= { { "Connection", "keep-alive, upgrade" } - , { "Upgrade", "WebSocket" } - } - ,.body= "" - } - -#define UPGRADE_POST_REQUEST 38 -, {.name = "upgrade post request" - ,.type= HTTP_REQUEST - ,.raw= "POST /demo HTTP/1.1\r\n" - "Host: example.com\r\n" - "Connection: Upgrade\r\n" - "Upgrade: HTTP/2.0\r\n" - "Content-Length: 15\r\n" - "\r\n" - "sweet post body" - "Hot diggity dogg" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_POST - ,.request_path= "/demo" - ,.request_url= "/demo" - ,.num_headers= 4 - ,.upgrade="Hot diggity dogg" - ,.headers= { { "Host", "example.com" } - , { "Connection", "Upgrade" } - , { "Upgrade", "HTTP/2.0" } - , { "Content-Length", "15" } - } - ,.body= "sweet post body" - } - -#define CONNECT_WITH_BODY_REQUEST 39 -, {.name = "connect with body request" - ,.type= HTTP_REQUEST - ,.raw= "CONNECT foo.bar.com:443 HTTP/1.0\r\n" - "User-agent: Mozilla/1.1N\r\n" - "Proxy-authorization: basic aGVsbG86d29ybGQ=\r\n" - "Content-Length: 10\r\n" - "\r\n" - "blarfcicle" - ,.should_keep_alive= FALSE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 0 - ,.method= HTTP_CONNECT - ,.request_url= "foo.bar.com:443" - ,.num_headers= 3 - ,.upgrade="blarfcicle" - ,.headers= { { "User-agent", "Mozilla/1.1N" } - , { "Proxy-authorization", "basic aGVsbG86d29ybGQ=" } - , { "Content-Length", "10" } - } - ,.body= "" - } - -/* Examples from the Internet draft for LINK/UNLINK methods: - * https://tools.ietf.org/id/draft-snell-link-method-01.html#rfc.section.5 - */ - -#define LINK_REQUEST 40 -, {.name = "link request" - ,.type= HTTP_REQUEST - ,.raw= "LINK /images/my_dog.jpg HTTP/1.1\r\n" - "Host: example.com\r\n" - "Link: ; rel=\"tag\"\r\n" - "Link: ; rel=\"tag\"\r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_LINK - ,.request_path= "/images/my_dog.jpg" - ,.request_url= "/images/my_dog.jpg" - ,.query_string= "" - ,.fragment= "" - ,.num_headers= 3 - ,.headers= { { "Host", "example.com" } - , { "Link", "; rel=\"tag\"" } - , { "Link", "; rel=\"tag\"" } - } - ,.body= "" - } - -#define UNLINK_REQUEST 41 -, {.name = "unlink request" - ,.type= HTTP_REQUEST - ,.raw= "UNLINK /images/my_dog.jpg HTTP/1.1\r\n" - "Host: example.com\r\n" - "Link: ; rel=\"tag\"\r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_UNLINK - ,.request_path= "/images/my_dog.jpg" - ,.request_url= "/images/my_dog.jpg" - ,.query_string= "" - ,.fragment= "" - ,.num_headers= 2 - ,.headers= { { "Host", "example.com" } - , { "Link", "; rel=\"tag\"" } - } - ,.body= "" - } - -#define SOURCE_REQUEST 42 -, {.name = "source request" - ,.type= HTTP_REQUEST - ,.raw= "SOURCE /music/sweet/music HTTP/1.1\r\n" - "Host: example.com\r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_SOURCE - ,.request_path= "/music/sweet/music" - ,.request_url= "/music/sweet/music" - ,.query_string= "" - ,.fragment= "" - ,.num_headers= 1 - ,.headers= { { "Host", "example.com" } } - ,.body= "" - } - -#define SOURCE_ICE_REQUEST 42 -, {.name = "source request" - ,.type= HTTP_REQUEST - ,.raw= "SOURCE /music/sweet/music ICE/1.0\r\n" - "Host: example.com\r\n" - "\r\n" - ,.should_keep_alive= FALSE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 0 - ,.method= HTTP_SOURCE - ,.request_path= "/music/sweet/music" - ,.request_url= "/music/sweet/music" - ,.query_string= "" - ,.fragment= "" - ,.num_headers= 1 - ,.headers= { { "Host", "example.com" } } - ,.body= "" - } - -#define POST_MULTI_TE_LAST_CHUNKED 43 -, {.name= "post - multi coding transfer-encoding chunked body" - ,.type= HTTP_REQUEST - ,.raw= "POST / HTTP/1.1\r\n" - "Transfer-Encoding: deflate, chunked\r\n" - "\r\n" - "1e\r\nall your base are belong to us\r\n" - "0\r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_POST - ,.query_string= "" - ,.fragment= "" - ,.request_path= "/" - ,.request_url= "/" - ,.num_headers= 1 - ,.headers= - { { "Transfer-Encoding" , "deflate, chunked" } - } - ,.body= "all your base are belong to us" - ,.num_chunks_complete= 2 - ,.chunk_lengths= { 0x1e } - } - -#define POST_MULTI_LINE_TE_LAST_CHUNKED 44 -, {.name= "post - multi line coding transfer-encoding chunked body" - ,.type= HTTP_REQUEST - ,.raw= "POST / HTTP/1.1\r\n" - "Transfer-Encoding: deflate,\r\n" - " chunked\r\n" - "\r\n" - "1e\r\nall your base are belong to us\r\n" - "0\r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_POST - ,.query_string= "" - ,.fragment= "" - ,.request_path= "/" - ,.request_url= "/" - ,.num_headers= 1 - ,.headers= - { { "Transfer-Encoding" , "deflate, chunked" } - } - ,.body= "all your base are belong to us" - ,.num_chunks_complete= 2 - ,.chunk_lengths= { 0x1e } - } -}; - -/* * R E S P O N S E S * */ -const struct message responses[] = -#define GOOGLE_301 0 -{ {.name= "google 301" - ,.type= HTTP_RESPONSE - ,.raw= "HTTP/1.1 301 Moved Permanently\r\n" - "Location: http://www.google.com/\r\n" - "Content-Type: text/html; charset=UTF-8\r\n" - "Date: Sun, 26 Apr 2009 11:11:49 GMT\r\n" - "Expires: Tue, 26 May 2009 11:11:49 GMT\r\n" - "X-$PrototypeBI-Version: 1.6.0.3\r\n" /* $ char in header field */ - "Cache-Control: public, max-age=2592000\r\n" - "Server: gws\r\n" - "Content-Length: 219 \r\n" - "\r\n" - "\n" - "301 Moved\n" - "

    301 Moved

    \n" - "The document has moved\n" - "here.\r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.status_code= 301 - ,.response_status= "Moved Permanently" - ,.num_headers= 8 - ,.headers= - { { "Location", "http://www.google.com/" } - , { "Content-Type", "text/html; charset=UTF-8" } - , { "Date", "Sun, 26 Apr 2009 11:11:49 GMT" } - , { "Expires", "Tue, 26 May 2009 11:11:49 GMT" } - , { "X-$PrototypeBI-Version", "1.6.0.3" } - , { "Cache-Control", "public, max-age=2592000" } - , { "Server", "gws" } - , { "Content-Length", "219 " } - } - ,.body= "\n" - "301 Moved\n" - "

    301 Moved

    \n" - "The document has moved\n" - "here.\r\n" - "\r\n" - } - -#define NO_CONTENT_LENGTH_RESPONSE 1 -/* The client should wait for the server's EOF. That is, when content-length - * is not specified, and "Connection: close", the end of body is specified - * by the EOF. - * Compare with APACHEBENCH_GET - */ -, {.name= "no content-length response" - ,.type= HTTP_RESPONSE - ,.raw= "HTTP/1.1 200 OK\r\n" - "Date: Tue, 04 Aug 2009 07:59:32 GMT\r\n" - "Server: Apache\r\n" - "X-Powered-By: Servlet/2.5 JSP/2.1\r\n" - "Content-Type: text/xml; charset=utf-8\r\n" - "Connection: close\r\n" - "\r\n" - "\n" - "\n" - " \n" - " \n" - " SOAP-ENV:Client\n" - " Client Error\n" - " \n" - " \n" - "" - ,.should_keep_alive= FALSE - ,.message_complete_on_eof= TRUE - ,.http_major= 1 - ,.http_minor= 1 - ,.status_code= 200 - ,.response_status= "OK" - ,.num_headers= 5 - ,.headers= - { { "Date", "Tue, 04 Aug 2009 07:59:32 GMT" } - , { "Server", "Apache" } - , { "X-Powered-By", "Servlet/2.5 JSP/2.1" } - , { "Content-Type", "text/xml; charset=utf-8" } - , { "Connection", "close" } - } - ,.body= "\n" - "\n" - " \n" - " \n" - " SOAP-ENV:Client\n" - " Client Error\n" - " \n" - " \n" - "" - } - -#define NO_HEADERS_NO_BODY_404 2 -, {.name= "404 no headers no body" - ,.type= HTTP_RESPONSE - ,.raw= "HTTP/1.1 404 Not Found\r\n\r\n" - ,.should_keep_alive= FALSE - ,.message_complete_on_eof= TRUE - ,.http_major= 1 - ,.http_minor= 1 - ,.status_code= 404 - ,.response_status= "Not Found" - ,.num_headers= 0 - ,.headers= {} - ,.body_size= 0 - ,.body= "" - } - -#define NO_REASON_PHRASE 3 -, {.name= "301 no response phrase" - ,.type= HTTP_RESPONSE - ,.raw= "HTTP/1.1 301\r\n\r\n" - ,.should_keep_alive = FALSE - ,.message_complete_on_eof= TRUE - ,.http_major= 1 - ,.http_minor= 1 - ,.status_code= 301 - ,.response_status= "" - ,.num_headers= 0 - ,.headers= {} - ,.body= "" - } - -#define TRAILING_SPACE_ON_CHUNKED_BODY 4 -, {.name="200 trailing space on chunked body" - ,.type= HTTP_RESPONSE - ,.raw= "HTTP/1.1 200 OK\r\n" - "Content-Type: text/plain\r\n" - "Transfer-Encoding: chunked\r\n" - "\r\n" - "25 \r\n" - "This is the data in the first chunk\r\n" - "\r\n" - "1C\r\n" - "and this is the second one\r\n" - "\r\n" - "0 \r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.status_code= 200 - ,.response_status= "OK" - ,.num_headers= 2 - ,.headers= - { {"Content-Type", "text/plain" } - , {"Transfer-Encoding", "chunked" } - } - ,.body_size = 37+28 - ,.body = - "This is the data in the first chunk\r\n" - "and this is the second one\r\n" - ,.num_chunks_complete= 3 - ,.chunk_lengths= { 0x25, 0x1c } - } - -#define NO_CARRIAGE_RET 5 -, {.name="no carriage ret" - ,.type= HTTP_RESPONSE - ,.raw= "HTTP/1.1 200 OK\n" - "Content-Type: text/html; charset=utf-8\n" - "Connection: close\n" - "\n" - "these headers are from http://news.ycombinator.com/" - ,.should_keep_alive= FALSE - ,.message_complete_on_eof= TRUE - ,.http_major= 1 - ,.http_minor= 1 - ,.status_code= 200 - ,.response_status= "OK" - ,.num_headers= 2 - ,.headers= - { {"Content-Type", "text/html; charset=utf-8" } - , {"Connection", "close" } - } - ,.body= "these headers are from http://news.ycombinator.com/" - } - -#define PROXY_CONNECTION 6 -, {.name="proxy connection" - ,.type= HTTP_RESPONSE - ,.raw= "HTTP/1.1 200 OK\r\n" - "Content-Type: text/html; charset=UTF-8\r\n" - "Content-Length: 11\r\n" - "Proxy-Connection: close\r\n" - "Date: Thu, 31 Dec 2009 20:55:48 +0000\r\n" - "\r\n" - "hello world" - ,.should_keep_alive= FALSE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.status_code= 200 - ,.response_status= "OK" - ,.num_headers= 4 - ,.headers= - { {"Content-Type", "text/html; charset=UTF-8" } - , {"Content-Length", "11" } - , {"Proxy-Connection", "close" } - , {"Date", "Thu, 31 Dec 2009 20:55:48 +0000"} - } - ,.body= "hello world" - } - -#define UNDERSTORE_HEADER_KEY 7 - // shown by - // curl -o /dev/null -v "http://ad.doubleclick.net/pfadx/DARTSHELLCONFIGXML;dcmt=text/xml;" -, {.name="underscore header key" - ,.type= HTTP_RESPONSE - ,.raw= "HTTP/1.1 200 OK\r\n" - "Server: DCLK-AdSvr\r\n" - "Content-Type: text/xml\r\n" - "Content-Length: 0\r\n" - "DCLK_imp: v7;x;114750856;0-0;0;17820020;0/0;21603567/21621457/1;;~okv=;dcmt=text/xml;;~cs=o\r\n\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.status_code= 200 - ,.response_status= "OK" - ,.num_headers= 4 - ,.headers= - { {"Server", "DCLK-AdSvr" } - , {"Content-Type", "text/xml" } - , {"Content-Length", "0" } - , {"DCLK_imp", "v7;x;114750856;0-0;0;17820020;0/0;21603567/21621457/1;;~okv=;dcmt=text/xml;;~cs=o" } - } - ,.body= "" - } - -#define BONJOUR_MADAME_FR 8 -/* The client should not merge two headers fields when the first one doesn't - * have a value. - */ -, {.name= "bonjourmadame.fr" - ,.type= HTTP_RESPONSE - ,.raw= "HTTP/1.0 301 Moved Permanently\r\n" - "Date: Thu, 03 Jun 2010 09:56:32 GMT\r\n" - "Server: Apache/2.2.3 (Red Hat)\r\n" - "Cache-Control: public\r\n" - "Pragma: \r\n" - "Location: http://www.bonjourmadame.fr/\r\n" - "Vary: Accept-Encoding\r\n" - "Content-Length: 0\r\n" - "Content-Type: text/html; charset=UTF-8\r\n" - "Connection: keep-alive\r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 0 - ,.status_code= 301 - ,.response_status= "Moved Permanently" - ,.num_headers= 9 - ,.headers= - { { "Date", "Thu, 03 Jun 2010 09:56:32 GMT" } - , { "Server", "Apache/2.2.3 (Red Hat)" } - , { "Cache-Control", "public" } - , { "Pragma", "" } - , { "Location", "http://www.bonjourmadame.fr/" } - , { "Vary", "Accept-Encoding" } - , { "Content-Length", "0" } - , { "Content-Type", "text/html; charset=UTF-8" } - , { "Connection", "keep-alive" } - } - ,.body= "" - } - -#define RES_FIELD_UNDERSCORE 9 -/* Should handle spaces in header fields */ -, {.name= "field underscore" - ,.type= HTTP_RESPONSE - ,.raw= "HTTP/1.1 200 OK\r\n" - "Date: Tue, 28 Sep 2010 01:14:13 GMT\r\n" - "Server: Apache\r\n" - "Cache-Control: no-cache, must-revalidate\r\n" - "Expires: Mon, 26 Jul 1997 05:00:00 GMT\r\n" - ".et-Cookie: PlaxoCS=1274804622353690521; path=/; domain=.plaxo.com\r\n" - "Vary: Accept-Encoding\r\n" - "_eep-Alive: timeout=45\r\n" /* semantic value ignored */ - "_onnection: Keep-Alive\r\n" /* semantic value ignored */ - "Transfer-Encoding: chunked\r\n" - "Content-Type: text/html\r\n" - "Connection: close\r\n" - "\r\n" - "0\r\n\r\n" - ,.should_keep_alive= FALSE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.status_code= 200 - ,.response_status= "OK" - ,.num_headers= 11 - ,.headers= - { { "Date", "Tue, 28 Sep 2010 01:14:13 GMT" } - , { "Server", "Apache" } - , { "Cache-Control", "no-cache, must-revalidate" } - , { "Expires", "Mon, 26 Jul 1997 05:00:00 GMT" } - , { ".et-Cookie", "PlaxoCS=1274804622353690521; path=/; domain=.plaxo.com" } - , { "Vary", "Accept-Encoding" } - , { "_eep-Alive", "timeout=45" } - , { "_onnection", "Keep-Alive" } - , { "Transfer-Encoding", "chunked" } - , { "Content-Type", "text/html" } - , { "Connection", "close" } - } - ,.body= "" - ,.num_chunks_complete= 1 - ,.chunk_lengths= {} - } - -#define NON_ASCII_IN_STATUS_LINE 10 -/* Should handle non-ASCII in status line */ -, {.name= "non-ASCII in status line" - ,.type= HTTP_RESPONSE - ,.raw= "HTTP/1.1 500 Oriëntatieprobleem\r\n" - "Date: Fri, 5 Nov 2010 23:07:12 GMT+2\r\n" - "Content-Length: 0\r\n" - "Connection: close\r\n" - "\r\n" - ,.should_keep_alive= FALSE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.status_code= 500 - ,.response_status= "Oriëntatieprobleem" - ,.num_headers= 3 - ,.headers= - { { "Date", "Fri, 5 Nov 2010 23:07:12 GMT+2" } - , { "Content-Length", "0" } - , { "Connection", "close" } - } - ,.body= "" - } - -#define HTTP_VERSION_0_9 11 -/* Should handle HTTP/0.9 */ -, {.name= "http version 0.9" - ,.type= HTTP_RESPONSE - ,.raw= "HTTP/0.9 200 OK\r\n" - "\r\n" - ,.should_keep_alive= FALSE - ,.message_complete_on_eof= TRUE - ,.http_major= 0 - ,.http_minor= 9 - ,.status_code= 200 - ,.response_status= "OK" - ,.num_headers= 0 - ,.headers= - {} - ,.body= "" - } - -#define NO_CONTENT_LENGTH_NO_TRANSFER_ENCODING_RESPONSE 12 -/* The client should wait for the server's EOF. That is, when neither - * content-length nor transfer-encoding is specified, the end of body - * is specified by the EOF. - */ -, {.name= "neither content-length nor transfer-encoding response" - ,.type= HTTP_RESPONSE - ,.raw= "HTTP/1.1 200 OK\r\n" - "Content-Type: text/plain\r\n" - "\r\n" - "hello world" - ,.should_keep_alive= FALSE - ,.message_complete_on_eof= TRUE - ,.http_major= 1 - ,.http_minor= 1 - ,.status_code= 200 - ,.response_status= "OK" - ,.num_headers= 1 - ,.headers= - { { "Content-Type", "text/plain" } - } - ,.body= "hello world" - } - -#define NO_BODY_HTTP10_KA_200 13 -, {.name= "HTTP/1.0 with keep-alive and EOF-terminated 200 status" - ,.type= HTTP_RESPONSE - ,.raw= "HTTP/1.0 200 OK\r\n" - "Connection: keep-alive\r\n" - "\r\n" - ,.should_keep_alive= FALSE - ,.message_complete_on_eof= TRUE - ,.http_major= 1 - ,.http_minor= 0 - ,.status_code= 200 - ,.response_status= "OK" - ,.num_headers= 1 - ,.headers= - { { "Connection", "keep-alive" } - } - ,.body_size= 0 - ,.body= "" - } - -#define NO_BODY_HTTP10_KA_204 14 -, {.name= "HTTP/1.0 with keep-alive and a 204 status" - ,.type= HTTP_RESPONSE - ,.raw= "HTTP/1.0 204 No content\r\n" - "Connection: keep-alive\r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 0 - ,.status_code= 204 - ,.response_status= "No content" - ,.num_headers= 1 - ,.headers= - { { "Connection", "keep-alive" } - } - ,.body_size= 0 - ,.body= "" - } - -#define NO_BODY_HTTP11_KA_200 15 -, {.name= "HTTP/1.1 with an EOF-terminated 200 status" - ,.type= HTTP_RESPONSE - ,.raw= "HTTP/1.1 200 OK\r\n" - "\r\n" - ,.should_keep_alive= FALSE - ,.message_complete_on_eof= TRUE - ,.http_major= 1 - ,.http_minor= 1 - ,.status_code= 200 - ,.response_status= "OK" - ,.num_headers= 0 - ,.headers={} - ,.body_size= 0 - ,.body= "" - } - -#define NO_BODY_HTTP11_KA_204 16 -, {.name= "HTTP/1.1 with a 204 status" - ,.type= HTTP_RESPONSE - ,.raw= "HTTP/1.1 204 No content\r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.status_code= 204 - ,.response_status= "No content" - ,.num_headers= 0 - ,.headers={} - ,.body_size= 0 - ,.body= "" - } - -#define NO_BODY_HTTP11_NOKA_204 17 -, {.name= "HTTP/1.1 with a 204 status and keep-alive disabled" - ,.type= HTTP_RESPONSE - ,.raw= "HTTP/1.1 204 No content\r\n" - "Connection: close\r\n" - "\r\n" - ,.should_keep_alive= FALSE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.status_code= 204 - ,.response_status= "No content" - ,.num_headers= 1 - ,.headers= - { { "Connection", "close" } - } - ,.body_size= 0 - ,.body= "" - } - -#define NO_BODY_HTTP11_KA_CHUNKED_200 18 -, {.name= "HTTP/1.1 with chunked endocing and a 200 response" - ,.type= HTTP_RESPONSE - ,.raw= "HTTP/1.1 200 OK\r\n" - "Transfer-Encoding: chunked\r\n" - "\r\n" - "0\r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.status_code= 200 - ,.response_status= "OK" - ,.num_headers= 1 - ,.headers= - { { "Transfer-Encoding", "chunked" } - } - ,.body_size= 0 - ,.body= "" - ,.num_chunks_complete= 1 - } - -#if !HTTP_PARSER_STRICT -#define SPACE_IN_FIELD_RES 19 -/* Should handle spaces in header fields */ -, {.name= "field space" - ,.type= HTTP_RESPONSE - ,.raw= "HTTP/1.1 200 OK\r\n" - "Server: Microsoft-IIS/6.0\r\n" - "X-Powered-By: ASP.NET\r\n" - "en-US Content-Type: text/xml\r\n" /* this is the problem */ - "Content-Type: text/xml\r\n" - "Content-Length: 16\r\n" - "Date: Fri, 23 Jul 2010 18:45:38 GMT\r\n" - "Connection: keep-alive\r\n" - "\r\n" - "hello" /* fake body */ - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.status_code= 200 - ,.response_status= "OK" - ,.num_headers= 7 - ,.headers= - { { "Server", "Microsoft-IIS/6.0" } - , { "X-Powered-By", "ASP.NET" } - , { "en-US Content-Type", "text/xml" } - , { "Content-Type", "text/xml" } - , { "Content-Length", "16" } - , { "Date", "Fri, 23 Jul 2010 18:45:38 GMT" } - , { "Connection", "keep-alive" } - } - ,.body= "hello" - } -#endif /* !HTTP_PARSER_STRICT */ - -#define AMAZON_COM 20 -, {.name= "amazon.com" - ,.type= HTTP_RESPONSE - ,.raw= "HTTP/1.1 301 MovedPermanently\r\n" - "Date: Wed, 15 May 2013 17:06:33 GMT\r\n" - "Server: Server\r\n" - "x-amz-id-1: 0GPHKXSJQ826RK7GZEB2\r\n" - "p3p: policyref=\"http://www.amazon.com/w3c/p3p.xml\",CP=\"CAO DSP LAW CUR ADM IVAo IVDo CONo OTPo OUR DELi PUBi OTRi BUS PHY ONL UNI PUR FIN COM NAV INT DEM CNT STA HEA PRE LOC GOV OTC \"\r\n" - "x-amz-id-2: STN69VZxIFSz9YJLbz1GDbxpbjG6Qjmmq5E3DxRhOUw+Et0p4hr7c/Q8qNcx4oAD\r\n" - "Location: http://www.amazon.com/Dan-Brown/e/B000AP9DSU/ref=s9_pop_gw_al1?_encoding=UTF8&refinementId=618073011&pf_rd_m=ATVPDKIKX0DER&pf_rd_s=center-2&pf_rd_r=0SHYY5BZXN3KR20BNFAY&pf_rd_t=101&pf_rd_p=1263340922&pf_rd_i=507846\r\n" - "Vary: Accept-Encoding,User-Agent\r\n" - "Content-Type: text/html; charset=ISO-8859-1\r\n" - "Transfer-Encoding: chunked\r\n" - "\r\n" - "1\r\n" - "\n\r\n" - "0\r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.status_code= 301 - ,.response_status= "MovedPermanently" - ,.num_headers= 9 - ,.headers= { { "Date", "Wed, 15 May 2013 17:06:33 GMT" } - , { "Server", "Server" } - , { "x-amz-id-1", "0GPHKXSJQ826RK7GZEB2" } - , { "p3p", "policyref=\"http://www.amazon.com/w3c/p3p.xml\",CP=\"CAO DSP LAW CUR ADM IVAo IVDo CONo OTPo OUR DELi PUBi OTRi BUS PHY ONL UNI PUR FIN COM NAV INT DEM CNT STA HEA PRE LOC GOV OTC \"" } - , { "x-amz-id-2", "STN69VZxIFSz9YJLbz1GDbxpbjG6Qjmmq5E3DxRhOUw+Et0p4hr7c/Q8qNcx4oAD" } - , { "Location", "http://www.amazon.com/Dan-Brown/e/B000AP9DSU/ref=s9_pop_gw_al1?_encoding=UTF8&refinementId=618073011&pf_rd_m=ATVPDKIKX0DER&pf_rd_s=center-2&pf_rd_r=0SHYY5BZXN3KR20BNFAY&pf_rd_t=101&pf_rd_p=1263340922&pf_rd_i=507846" } - , { "Vary", "Accept-Encoding,User-Agent" } - , { "Content-Type", "text/html; charset=ISO-8859-1" } - , { "Transfer-Encoding", "chunked" } - } - ,.body= "\n" - ,.num_chunks_complete= 2 - ,.chunk_lengths= { 1 } - } - -#define EMPTY_REASON_PHRASE_AFTER_SPACE 20 -, {.name= "empty reason phrase after space" - ,.type= HTTP_RESPONSE - ,.raw= "HTTP/1.1 200 \r\n" - "\r\n" - ,.should_keep_alive= FALSE - ,.message_complete_on_eof= TRUE - ,.http_major= 1 - ,.http_minor= 1 - ,.status_code= 200 - ,.response_status= "" - ,.num_headers= 0 - ,.headers= {} - ,.body= "" - } - -#define CONTENT_LENGTH_X 21 -, {.name= "Content-Length-X" - ,.type= HTTP_RESPONSE - ,.raw= "HTTP/1.1 200 OK\r\n" - "Content-Length-X: 0\r\n" - "Transfer-Encoding: chunked\r\n" - "\r\n" - "2\r\n" - "OK\r\n" - "0\r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.status_code= 200 - ,.response_status= "OK" - ,.num_headers= 2 - ,.headers= { { "Content-Length-X", "0" } - , { "Transfer-Encoding", "chunked" } - } - ,.body= "OK" - ,.num_chunks_complete= 2 - ,.chunk_lengths= { 2 } - } - -#define HTTP_101_RESPONSE_WITH_UPGRADE_HEADER 22 -, {.name= "HTTP 101 response with Upgrade header" - ,.type= HTTP_RESPONSE - ,.raw= "HTTP/1.1 101 Switching Protocols\r\n" - "Connection: upgrade\r\n" - "Upgrade: h2c\r\n" - "\r\n" - "proto" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.status_code= 101 - ,.response_status= "Switching Protocols" - ,.upgrade= "proto" - ,.num_headers= 2 - ,.headers= - { { "Connection", "upgrade" } - , { "Upgrade", "h2c" } - } - } - -#define HTTP_101_RESPONSE_WITH_UPGRADE_HEADER_AND_CONTENT_LENGTH 23 -, {.name= "HTTP 101 response with Upgrade and Content-Length header" - ,.type= HTTP_RESPONSE - ,.raw= "HTTP/1.1 101 Switching Protocols\r\n" - "Connection: upgrade\r\n" - "Upgrade: h2c\r\n" - "Content-Length: 4\r\n" - "\r\n" - "body" - "proto" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.status_code= 101 - ,.response_status= "Switching Protocols" - ,.body= "body" - ,.upgrade= "proto" - ,.num_headers= 3 - ,.headers= - { { "Connection", "upgrade" } - , { "Upgrade", "h2c" } - , { "Content-Length", "4" } - } - } - -#define HTTP_101_RESPONSE_WITH_UPGRADE_HEADER_AND_TRANSFER_ENCODING 24 -, {.name= "HTTP 101 response with Upgrade and Transfer-Encoding header" - ,.type= HTTP_RESPONSE - ,.raw= "HTTP/1.1 101 Switching Protocols\r\n" - "Connection: upgrade\r\n" - "Upgrade: h2c\r\n" - "Transfer-Encoding: chunked\r\n" - "\r\n" - "2\r\n" - "bo\r\n" - "2\r\n" - "dy\r\n" - "0\r\n" - "\r\n" - "proto" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.status_code= 101 - ,.response_status= "Switching Protocols" - ,.body= "body" - ,.upgrade= "proto" - ,.num_headers= 3 - ,.headers= - { { "Connection", "upgrade" } - , { "Upgrade", "h2c" } - , { "Transfer-Encoding", "chunked" } - } - ,.num_chunks_complete= 3 - ,.chunk_lengths= { 2, 2 } - } - -#define HTTP_200_RESPONSE_WITH_UPGRADE_HEADER 25 -, {.name= "HTTP 200 response with Upgrade header" - ,.type= HTTP_RESPONSE - ,.raw= "HTTP/1.1 200 OK\r\n" - "Connection: upgrade\r\n" - "Upgrade: h2c\r\n" - "\r\n" - "body" - ,.should_keep_alive= FALSE - ,.message_complete_on_eof= TRUE - ,.http_major= 1 - ,.http_minor= 1 - ,.status_code= 200 - ,.response_status= "OK" - ,.body= "body" - ,.upgrade= NULL - ,.num_headers= 2 - ,.headers= - { { "Connection", "upgrade" } - , { "Upgrade", "h2c" } - } - } - -#define HTTP_200_RESPONSE_WITH_UPGRADE_HEADER_AND_CONTENT_LENGTH 26 -, {.name= "HTTP 200 response with Upgrade and Content-Length header" - ,.type= HTTP_RESPONSE - ,.raw= "HTTP/1.1 200 OK\r\n" - "Connection: upgrade\r\n" - "Upgrade: h2c\r\n" - "Content-Length: 4\r\n" - "\r\n" - "body" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.status_code= 200 - ,.response_status= "OK" - ,.num_headers= 3 - ,.body= "body" - ,.upgrade= NULL - ,.headers= - { { "Connection", "upgrade" } - , { "Upgrade", "h2c" } - , { "Content-Length", "4" } - } - } - -#define HTTP_200_RESPONSE_WITH_UPGRADE_HEADER_AND_TRANSFER_ENCODING 27 -, {.name= "HTTP 200 response with Upgrade and Transfer-Encoding header" - ,.type= HTTP_RESPONSE - ,.raw= "HTTP/1.1 200 OK\r\n" - "Connection: upgrade\r\n" - "Upgrade: h2c\r\n" - "Transfer-Encoding: chunked\r\n" - "\r\n" - "2\r\n" - "bo\r\n" - "2\r\n" - "dy\r\n" - "0\r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.status_code= 200 - ,.response_status= "OK" - ,.num_headers= 3 - ,.body= "body" - ,.upgrade= NULL - ,.headers= - { { "Connection", "upgrade" } - , { "Upgrade", "h2c" } - , { "Transfer-Encoding", "chunked" } - } - ,.num_chunks_complete= 3 - ,.chunk_lengths= { 2, 2 } - } -#define HTTP_200_MULTI_TE_NOT_LAST_CHUNKED 28 -, {.name= "HTTP 200 response with `chunked` being *not last* Transfer-Encoding" - ,.type= HTTP_RESPONSE - ,.raw= "HTTP/1.1 200 OK\r\n" - "Transfer-Encoding: chunked, identity\r\n" - "\r\n" - "2\r\n" - "OK\r\n" - "0\r\n" - "\r\n" - ,.should_keep_alive= FALSE - ,.message_complete_on_eof= TRUE - ,.http_major= 1 - ,.http_minor= 1 - ,.status_code= 200 - ,.response_status= "OK" - ,.num_headers= 1 - ,.headers= { { "Transfer-Encoding", "chunked, identity" } - } - ,.body= "2\r\nOK\r\n0\r\n\r\n" - ,.num_chunks_complete= 0 - } -}; - -/* strnlen() is a POSIX.2008 addition. Can't rely on it being available so - * define it ourselves. - */ -size_t -strnlen(const char *s, size_t maxlen) -{ - const char *p; - - p = memchr(s, '\0', maxlen); - if (p == NULL) - return maxlen; - - return p - s; -} - -size_t -strlncat(char *dst, size_t len, const char *src, size_t n) -{ - size_t slen; - size_t dlen; - size_t rlen; - size_t ncpy; - - slen = strnlen(src, n); - dlen = strnlen(dst, len); - - if (dlen < len) { - rlen = len - dlen; - ncpy = slen < rlen ? slen : (rlen - 1); - memcpy(dst + dlen, src, ncpy); - dst[dlen + ncpy] = '\0'; - } - - assert(len > slen + dlen); - return slen + dlen; -} - -size_t -strlncpy(char *dst, size_t len, const char *src, size_t n) -{ - size_t slen; - size_t ncpy; - - slen = strnlen(src, n); - - if (len > 0) { - ncpy = slen < len ? slen : (len - 1); - memcpy(dst, src, ncpy); - dst[ncpy] = '\0'; - } - - assert(len > slen); - return slen; -} - -int -request_url_cb (http_parser *p, const char *buf, size_t len) -{ - assert(p == &parser); - strlncat(messages[num_messages].request_url, - sizeof(messages[num_messages].request_url), - buf, - len); - return 0; -} - -int -header_field_cb (http_parser *p, const char *buf, size_t len) -{ - assert(p == &parser); - struct message *m = &messages[num_messages]; - - if (m->last_header_element != FIELD) - m->num_headers++; - - strlncat(m->headers[m->num_headers-1][0], - sizeof(m->headers[m->num_headers-1][0]), - buf, - len); - - m->last_header_element = FIELD; - - return 0; -} - -int -header_value_cb (http_parser *p, const char *buf, size_t len) -{ - assert(p == &parser); - struct message *m = &messages[num_messages]; - - strlncat(m->headers[m->num_headers-1][1], - sizeof(m->headers[m->num_headers-1][1]), - buf, - len); - - m->last_header_element = VALUE; - - return 0; -} - -void -check_body_is_final (const http_parser *p) -{ - if (messages[num_messages].body_is_final) { - fprintf(stderr, "\n\n *** Error http_body_is_final() should return 1 " - "on last on_body callback call " - "but it doesn't! ***\n\n"); - assert(0); - abort(); - } - messages[num_messages].body_is_final = http_body_is_final(p); -} - -int -body_cb (http_parser *p, const char *buf, size_t len) -{ - assert(p == &parser); - strlncat(messages[num_messages].body, - sizeof(messages[num_messages].body), - buf, - len); - messages[num_messages].body_size += len; - check_body_is_final(p); - // printf("body_cb: '%s'\n", requests[num_messages].body); - return 0; -} - -int -count_body_cb (http_parser *p, const char *buf, size_t len) -{ - assert(p == &parser); - assert(buf); - messages[num_messages].body_size += len; - check_body_is_final(p); - return 0; -} - -int -message_begin_cb (http_parser *p) -{ - assert(p == &parser); - assert(!messages[num_messages].message_begin_cb_called); - messages[num_messages].message_begin_cb_called = TRUE; - return 0; -} - -int -headers_complete_cb (http_parser *p) -{ - assert(p == &parser); - messages[num_messages].method = parser.method; - messages[num_messages].status_code = parser.status_code; - messages[num_messages].http_major = parser.http_major; - messages[num_messages].http_minor = parser.http_minor; - messages[num_messages].headers_complete_cb_called = TRUE; - messages[num_messages].should_keep_alive = http_should_keep_alive(&parser); - return 0; -} - -int -message_complete_cb (http_parser *p) -{ - assert(p == &parser); - if (messages[num_messages].should_keep_alive != - http_should_keep_alive(&parser)) - { - fprintf(stderr, "\n\n *** Error http_should_keep_alive() should have same " - "value in both on_message_complete and on_headers_complete " - "but it doesn't! ***\n\n"); - assert(0); - abort(); - } - - if (messages[num_messages].body_size && - http_body_is_final(p) && - !messages[num_messages].body_is_final) - { - fprintf(stderr, "\n\n *** Error http_body_is_final() should return 1 " - "on last on_body callback call " - "but it doesn't! ***\n\n"); - assert(0); - abort(); - } - - messages[num_messages].message_complete_cb_called = TRUE; - - messages[num_messages].message_complete_on_eof = currently_parsing_eof; - - num_messages++; - return 0; -} - -int -response_status_cb (http_parser *p, const char *buf, size_t len) -{ - assert(p == &parser); - - messages[num_messages].status_cb_called = TRUE; - - strlncat(messages[num_messages].response_status, - sizeof(messages[num_messages].response_status), - buf, - len); - return 0; -} - -int -chunk_header_cb (http_parser *p) -{ - assert(p == &parser); - int chunk_idx = messages[num_messages].num_chunks; - messages[num_messages].num_chunks++; - if (chunk_idx < MAX_CHUNKS) { - messages[num_messages].chunk_lengths[chunk_idx] = p->content_length; - } - - return 0; -} - -int -chunk_complete_cb (http_parser *p) -{ - assert(p == &parser); - - /* Here we want to verify that each chunk_header_cb is matched by a - * chunk_complete_cb, so not only should the total number of calls to - * both callbacks be the same, but they also should be interleaved - * properly */ - assert(messages[num_messages].num_chunks == - messages[num_messages].num_chunks_complete + 1); - - messages[num_messages].num_chunks_complete++; - return 0; -} - -/* These dontcall_* callbacks exist so that we can verify that when we're - * paused, no additional callbacks are invoked */ -int -dontcall_message_begin_cb (http_parser *p) -{ - if (p) { } // gcc - fprintf(stderr, "\n\n*** on_message_begin() called on paused parser ***\n\n"); - abort(); -} - -int -dontcall_header_field_cb (http_parser *p, const char *buf, size_t len) -{ - if (p || buf || len) { } // gcc - fprintf(stderr, "\n\n*** on_header_field() called on paused parser ***\n\n"); - abort(); -} - -int -dontcall_header_value_cb (http_parser *p, const char *buf, size_t len) -{ - if (p || buf || len) { } // gcc - fprintf(stderr, "\n\n*** on_header_value() called on paused parser ***\n\n"); - abort(); -} - -int -dontcall_request_url_cb (http_parser *p, const char *buf, size_t len) -{ - if (p || buf || len) { } // gcc - fprintf(stderr, "\n\n*** on_request_url() called on paused parser ***\n\n"); - abort(); -} - -int -dontcall_body_cb (http_parser *p, const char *buf, size_t len) -{ - if (p || buf || len) { } // gcc - fprintf(stderr, "\n\n*** on_body_cb() called on paused parser ***\n\n"); - abort(); -} - -int -dontcall_headers_complete_cb (http_parser *p) -{ - if (p) { } // gcc - fprintf(stderr, "\n\n*** on_headers_complete() called on paused " - "parser ***\n\n"); - abort(); -} - -int -dontcall_message_complete_cb (http_parser *p) -{ - if (p) { } // gcc - fprintf(stderr, "\n\n*** on_message_complete() called on paused " - "parser ***\n\n"); - abort(); -} - -int -dontcall_response_status_cb (http_parser *p, const char *buf, size_t len) -{ - if (p || buf || len) { } // gcc - fprintf(stderr, "\n\n*** on_status() called on paused parser ***\n\n"); - abort(); -} - -int -dontcall_chunk_header_cb (http_parser *p) -{ - if (p) { } // gcc - fprintf(stderr, "\n\n*** on_chunk_header() called on paused parser ***\n\n"); - exit(1); -} - -int -dontcall_chunk_complete_cb (http_parser *p) -{ - if (p) { } // gcc - fprintf(stderr, "\n\n*** on_chunk_complete() " - "called on paused parser ***\n\n"); - exit(1); -} - -static http_parser_settings settings_dontcall = - {.on_message_begin = dontcall_message_begin_cb - ,.on_header_field = dontcall_header_field_cb - ,.on_header_value = dontcall_header_value_cb - ,.on_url = dontcall_request_url_cb - ,.on_status = dontcall_response_status_cb - ,.on_body = dontcall_body_cb - ,.on_headers_complete = dontcall_headers_complete_cb - ,.on_message_complete = dontcall_message_complete_cb - ,.on_chunk_header = dontcall_chunk_header_cb - ,.on_chunk_complete = dontcall_chunk_complete_cb - }; - -/* These pause_* callbacks always pause the parser and just invoke the regular - * callback that tracks content. Before returning, we overwrite the parser - * settings to point to the _dontcall variety so that we can verify that - * the pause actually did, you know, pause. */ -int -pause_message_begin_cb (http_parser *p) -{ - http_parser_pause(p, 1); - *current_pause_parser = settings_dontcall; - return message_begin_cb(p); -} - -int -pause_header_field_cb (http_parser *p, const char *buf, size_t len) -{ - http_parser_pause(p, 1); - *current_pause_parser = settings_dontcall; - return header_field_cb(p, buf, len); -} - -int -pause_header_value_cb (http_parser *p, const char *buf, size_t len) -{ - http_parser_pause(p, 1); - *current_pause_parser = settings_dontcall; - return header_value_cb(p, buf, len); -} - -int -pause_request_url_cb (http_parser *p, const char *buf, size_t len) -{ - http_parser_pause(p, 1); - *current_pause_parser = settings_dontcall; - return request_url_cb(p, buf, len); -} - -int -pause_body_cb (http_parser *p, const char *buf, size_t len) -{ - http_parser_pause(p, 1); - *current_pause_parser = settings_dontcall; - return body_cb(p, buf, len); -} - -int -pause_headers_complete_cb (http_parser *p) -{ - http_parser_pause(p, 1); - *current_pause_parser = settings_dontcall; - return headers_complete_cb(p); -} - -int -pause_message_complete_cb (http_parser *p) -{ - http_parser_pause(p, 1); - *current_pause_parser = settings_dontcall; - return message_complete_cb(p); -} - -int -pause_response_status_cb (http_parser *p, const char *buf, size_t len) -{ - http_parser_pause(p, 1); - *current_pause_parser = settings_dontcall; - return response_status_cb(p, buf, len); -} - -int -pause_chunk_header_cb (http_parser *p) -{ - http_parser_pause(p, 1); - *current_pause_parser = settings_dontcall; - return chunk_header_cb(p); -} - -int -pause_chunk_complete_cb (http_parser *p) -{ - http_parser_pause(p, 1); - *current_pause_parser = settings_dontcall; - return chunk_complete_cb(p); -} - -int -connect_headers_complete_cb (http_parser *p) -{ - headers_complete_cb(p); - return 1; -} - -int -connect_message_complete_cb (http_parser *p) -{ - messages[num_messages].should_keep_alive = http_should_keep_alive(&parser); - return message_complete_cb(p); -} - -static http_parser_settings settings_pause = - {.on_message_begin = pause_message_begin_cb - ,.on_header_field = pause_header_field_cb - ,.on_header_value = pause_header_value_cb - ,.on_url = pause_request_url_cb - ,.on_status = pause_response_status_cb - ,.on_body = pause_body_cb - ,.on_headers_complete = pause_headers_complete_cb - ,.on_message_complete = pause_message_complete_cb - ,.on_chunk_header = pause_chunk_header_cb - ,.on_chunk_complete = pause_chunk_complete_cb - }; - -static http_parser_settings settings = - {.on_message_begin = message_begin_cb - ,.on_header_field = header_field_cb - ,.on_header_value = header_value_cb - ,.on_url = request_url_cb - ,.on_status = response_status_cb - ,.on_body = body_cb - ,.on_headers_complete = headers_complete_cb - ,.on_message_complete = message_complete_cb - ,.on_chunk_header = chunk_header_cb - ,.on_chunk_complete = chunk_complete_cb - }; - -static http_parser_settings settings_count_body = - {.on_message_begin = message_begin_cb - ,.on_header_field = header_field_cb - ,.on_header_value = header_value_cb - ,.on_url = request_url_cb - ,.on_status = response_status_cb - ,.on_body = count_body_cb - ,.on_headers_complete = headers_complete_cb - ,.on_message_complete = message_complete_cb - ,.on_chunk_header = chunk_header_cb - ,.on_chunk_complete = chunk_complete_cb - }; - -static http_parser_settings settings_connect = - {.on_message_begin = message_begin_cb - ,.on_header_field = header_field_cb - ,.on_header_value = header_value_cb - ,.on_url = request_url_cb - ,.on_status = response_status_cb - ,.on_body = dontcall_body_cb - ,.on_headers_complete = connect_headers_complete_cb - ,.on_message_complete = connect_message_complete_cb - ,.on_chunk_header = chunk_header_cb - ,.on_chunk_complete = chunk_complete_cb - }; - -static http_parser_settings settings_null = - {.on_message_begin = 0 - ,.on_header_field = 0 - ,.on_header_value = 0 - ,.on_url = 0 - ,.on_status = 0 - ,.on_body = 0 - ,.on_headers_complete = 0 - ,.on_message_complete = 0 - ,.on_chunk_header = 0 - ,.on_chunk_complete = 0 - }; - -void -parser_init (enum http_parser_type type) -{ - num_messages = 0; - http_parser_init(&parser, type); - memset(&messages, 0, sizeof messages); -} - -size_t parse (const char *buf, size_t len) -{ - size_t nparsed; - currently_parsing_eof = (len == 0); - nparsed = http_parser_execute(&parser, &settings, buf, len); - return nparsed; -} - -size_t parse_count_body (const char *buf, size_t len) -{ - size_t nparsed; - currently_parsing_eof = (len == 0); - nparsed = http_parser_execute(&parser, &settings_count_body, buf, len); - return nparsed; -} - -size_t parse_pause (const char *buf, size_t len) -{ - size_t nparsed; - http_parser_settings s = settings_pause; - - currently_parsing_eof = (len == 0); - current_pause_parser = &s; - nparsed = http_parser_execute(&parser, current_pause_parser, buf, len); - return nparsed; -} - -size_t parse_connect (const char *buf, size_t len) -{ - size_t nparsed; - currently_parsing_eof = (len == 0); - nparsed = http_parser_execute(&parser, &settings_connect, buf, len); - return nparsed; -} - -static inline int -check_str_eq (const struct message *m, - const char *prop, - const char *expected, - const char *found) { - if ((expected == NULL) != (found == NULL)) { - printf("\n*** Error: %s in '%s' ***\n\n", prop, m->name); - printf("expected %s\n", (expected == NULL) ? "NULL" : expected); - printf(" found %s\n", (found == NULL) ? "NULL" : found); - return 0; - } - if (expected != NULL && 0 != strcmp(expected, found)) { - printf("\n*** Error: %s in '%s' ***\n\n", prop, m->name); - printf("expected '%s'\n", expected); - printf(" found '%s'\n", found); - return 0; - } - return 1; -} - -static inline int -check_num_eq (const struct message *m, - const char *prop, - int expected, - int found) { - if (expected != found) { - printf("\n*** Error: %s in '%s' ***\n\n", prop, m->name); - printf("expected %d\n", expected); - printf(" found %d\n", found); - return 0; - } - return 1; -} - -#define MESSAGE_CHECK_STR_EQ(expected, found, prop) \ - if (!check_str_eq(expected, #prop, expected->prop, found->prop)) return 0 - -#define MESSAGE_CHECK_NUM_EQ(expected, found, prop) \ - if (!check_num_eq(expected, #prop, expected->prop, found->prop)) return 0 - -#define MESSAGE_CHECK_URL_EQ(u, expected, found, prop, fn) \ -do { \ - char ubuf[256]; \ - \ - if ((u)->field_set & (1 << (fn))) { \ - memcpy(ubuf, (found)->request_url + (u)->field_data[(fn)].off, \ - (u)->field_data[(fn)].len); \ - ubuf[(u)->field_data[(fn)].len] = '\0'; \ - } else { \ - ubuf[0] = '\0'; \ - } \ - \ - check_str_eq(expected, #prop, expected->prop, ubuf); \ -} while(0) - -int -message_eq (int index, int connect, const struct message *expected) -{ - int i; - struct message *m = &messages[index]; - - MESSAGE_CHECK_NUM_EQ(expected, m, http_major); - MESSAGE_CHECK_NUM_EQ(expected, m, http_minor); - - if (expected->type == HTTP_REQUEST) { - MESSAGE_CHECK_NUM_EQ(expected, m, method); - } else { - MESSAGE_CHECK_NUM_EQ(expected, m, status_code); - MESSAGE_CHECK_STR_EQ(expected, m, response_status); - assert(m->status_cb_called); - } - - if (!connect) { - MESSAGE_CHECK_NUM_EQ(expected, m, should_keep_alive); - MESSAGE_CHECK_NUM_EQ(expected, m, message_complete_on_eof); - } - - assert(m->message_begin_cb_called); - assert(m->headers_complete_cb_called); - assert(m->message_complete_cb_called); - - - MESSAGE_CHECK_STR_EQ(expected, m, request_url); - - /* Check URL components; we can't do this w/ CONNECT since it doesn't - * send us a well-formed URL. - */ - if (*m->request_url && m->method != HTTP_CONNECT) { - struct http_parser_url u; - - if (http_parser_parse_url(m->request_url, strlen(m->request_url), 0, &u)) { - fprintf(stderr, "\n\n*** failed to parse URL %s ***\n\n", - m->request_url); - abort(); - } - - if (expected->host) { - MESSAGE_CHECK_URL_EQ(&u, expected, m, host, UF_HOST); - } - - if (expected->userinfo) { - MESSAGE_CHECK_URL_EQ(&u, expected, m, userinfo, UF_USERINFO); - } - - m->port = (u.field_set & (1 << UF_PORT)) ? - u.port : 0; - - MESSAGE_CHECK_URL_EQ(&u, expected, m, query_string, UF_QUERY); - MESSAGE_CHECK_URL_EQ(&u, expected, m, fragment, UF_FRAGMENT); - MESSAGE_CHECK_URL_EQ(&u, expected, m, request_path, UF_PATH); - MESSAGE_CHECK_NUM_EQ(expected, m, port); - } - - if (connect) { - check_num_eq(m, "body_size", 0, m->body_size); - } else if (expected->body_size) { - MESSAGE_CHECK_NUM_EQ(expected, m, body_size); - } else { - MESSAGE_CHECK_STR_EQ(expected, m, body); - } - - if (connect) { - check_num_eq(m, "num_chunks_complete", 0, m->num_chunks_complete); - } else { - assert(m->num_chunks == m->num_chunks_complete); - MESSAGE_CHECK_NUM_EQ(expected, m, num_chunks_complete); - for (i = 0; i < m->num_chunks && i < MAX_CHUNKS; i++) { - MESSAGE_CHECK_NUM_EQ(expected, m, chunk_lengths[i]); - } - } - - MESSAGE_CHECK_NUM_EQ(expected, m, num_headers); - - int r; - for (i = 0; i < m->num_headers; i++) { - r = check_str_eq(expected, "header field", expected->headers[i][0], m->headers[i][0]); - if (!r) return 0; - r = check_str_eq(expected, "header value", expected->headers[i][1], m->headers[i][1]); - if (!r) return 0; - } - - if (!connect) { - MESSAGE_CHECK_STR_EQ(expected, m, upgrade); - } - - return 1; -} - -/* Given a sequence of varargs messages, return the number of them that the - * parser should successfully parse, taking into account that upgraded - * messages prevent all subsequent messages from being parsed. - */ -size_t -count_parsed_messages(const size_t nmsgs, ...) { - size_t i; - va_list ap; - - va_start(ap, nmsgs); - - for (i = 0; i < nmsgs; i++) { - struct message *m = va_arg(ap, struct message *); - - if (m->upgrade) { - va_end(ap); - return i + 1; - } - } - - va_end(ap); - return nmsgs; -} - -/* Given a sequence of bytes and the number of these that we were able to - * parse, verify that upgrade bodies are correct. - */ -void -upgrade_message_fix(char *body, const size_t nread, const size_t nmsgs, ...) { - va_list ap; - size_t i; - size_t off = 0; - - va_start(ap, nmsgs); - - for (i = 0; i < nmsgs; i++) { - struct message *m = va_arg(ap, struct message *); - - off += strlen(m->raw); - - if (m->upgrade) { - off -= strlen(m->upgrade); - - /* Check the portion of the response after its specified upgrade */ - if (!check_str_eq(m, "upgrade", body + off, body + nread)) { - abort(); - } - - /* Fix up the response so that message_eq() will verify the beginning - * of the upgrade */ - *(body + nread + strlen(m->upgrade)) = '\0'; - messages[num_messages -1 ].upgrade = body + nread; - - va_end(ap); - return; - } - } - - va_end(ap); - printf("\n\n*** Error: expected a message with upgrade ***\n"); - - abort(); -} - -static void -print_error (const char *raw, size_t error_location) -{ - fprintf(stderr, "\n*** %s ***\n\n", - http_errno_description(HTTP_PARSER_ERRNO(&parser))); - - int this_line = 0, char_len = 0; - size_t i, j, len = strlen(raw), error_location_line = 0; - for (i = 0; i < len; i++) { - if (i == error_location) this_line = 1; - switch (raw[i]) { - case '\r': - char_len = 2; - fprintf(stderr, "\\r"); - break; - - case '\n': - fprintf(stderr, "\\n\n"); - - if (this_line) goto print; - - error_location_line = 0; - continue; - - default: - char_len = 1; - fputc(raw[i], stderr); - break; - } - if (!this_line) error_location_line += char_len; - } - - fprintf(stderr, "[eof]\n"); - - print: - for (j = 0; j < error_location_line; j++) { - fputc(' ', stderr); - } - fprintf(stderr, "^\n\nerror location: %u\n", (unsigned int)error_location); -} - -void -test_preserve_data (void) -{ - char my_data[] = "application-specific data"; - http_parser parser; - parser.data = my_data; - http_parser_init(&parser, HTTP_REQUEST); - if (parser.data != my_data) { - printf("\n*** parser.data not preserved accross http_parser_init ***\n\n"); - abort(); - } -} - -struct url_test { - const char *name; - const char *url; - int is_connect; - struct http_parser_url u; - int rv; -}; - -const struct url_test url_tests[] = -{ {.name="proxy request" - ,.url="http://hostname/" - ,.is_connect=0 - ,.u= - {.field_set=(1 << UF_SCHEMA) | (1 << UF_HOST) | (1 << UF_PATH) - ,.port=0 - ,.field_data= - {{ 0, 4 } /* UF_SCHEMA */ - ,{ 7, 8 } /* UF_HOST */ - ,{ 0, 0 } /* UF_PORT */ - ,{ 15, 1 } /* UF_PATH */ - ,{ 0, 0 } /* UF_QUERY */ - ,{ 0, 0 } /* UF_FRAGMENT */ - ,{ 0, 0 } /* UF_USERINFO */ - } - } - ,.rv=0 - } - -, {.name="proxy request with port" - ,.url="http://hostname:444/" - ,.is_connect=0 - ,.u= - {.field_set=(1 << UF_SCHEMA) | (1 << UF_HOST) | (1 << UF_PORT) | (1 << UF_PATH) - ,.port=444 - ,.field_data= - {{ 0, 4 } /* UF_SCHEMA */ - ,{ 7, 8 } /* UF_HOST */ - ,{ 16, 3 } /* UF_PORT */ - ,{ 19, 1 } /* UF_PATH */ - ,{ 0, 0 } /* UF_QUERY */ - ,{ 0, 0 } /* UF_FRAGMENT */ - ,{ 0, 0 } /* UF_USERINFO */ - } - } - ,.rv=0 - } - -, {.name="CONNECT request" - ,.url="hostname:443" - ,.is_connect=1 - ,.u= - {.field_set=(1 << UF_HOST) | (1 << UF_PORT) - ,.port=443 - ,.field_data= - {{ 0, 0 } /* UF_SCHEMA */ - ,{ 0, 8 } /* UF_HOST */ - ,{ 9, 3 } /* UF_PORT */ - ,{ 0, 0 } /* UF_PATH */ - ,{ 0, 0 } /* UF_QUERY */ - ,{ 0, 0 } /* UF_FRAGMENT */ - ,{ 0, 0 } /* UF_USERINFO */ - } - } - ,.rv=0 - } - -, {.name="CONNECT request but not connect" - ,.url="hostname:443" - ,.is_connect=0 - ,.rv=1 - } - -, {.name="proxy ipv6 request" - ,.url="http://[1:2::3:4]/" - ,.is_connect=0 - ,.u= - {.field_set=(1 << UF_SCHEMA) | (1 << UF_HOST) | (1 << UF_PATH) - ,.port=0 - ,.field_data= - {{ 0, 4 } /* UF_SCHEMA */ - ,{ 8, 8 } /* UF_HOST */ - ,{ 0, 0 } /* UF_PORT */ - ,{ 17, 1 } /* UF_PATH */ - ,{ 0, 0 } /* UF_QUERY */ - ,{ 0, 0 } /* UF_FRAGMENT */ - ,{ 0, 0 } /* UF_USERINFO */ - } - } - ,.rv=0 - } - -, {.name="proxy ipv6 request with port" - ,.url="http://[1:2::3:4]:67/" - ,.is_connect=0 - ,.u= - {.field_set=(1 << UF_SCHEMA) | (1 << UF_HOST) | (1 << UF_PORT) | (1 << UF_PATH) - ,.port=67 - ,.field_data= - {{ 0, 4 } /* UF_SCHEMA */ - ,{ 8, 8 } /* UF_HOST */ - ,{ 18, 2 } /* UF_PORT */ - ,{ 20, 1 } /* UF_PATH */ - ,{ 0, 0 } /* UF_QUERY */ - ,{ 0, 0 } /* UF_FRAGMENT */ - ,{ 0, 0 } /* UF_USERINFO */ - } - } - ,.rv=0 - } - -, {.name="CONNECT ipv6 address" - ,.url="[1:2::3:4]:443" - ,.is_connect=1 - ,.u= - {.field_set=(1 << UF_HOST) | (1 << UF_PORT) - ,.port=443 - ,.field_data= - {{ 0, 0 } /* UF_SCHEMA */ - ,{ 1, 8 } /* UF_HOST */ - ,{ 11, 3 } /* UF_PORT */ - ,{ 0, 0 } /* UF_PATH */ - ,{ 0, 0 } /* UF_QUERY */ - ,{ 0, 0 } /* UF_FRAGMENT */ - ,{ 0, 0 } /* UF_USERINFO */ - } - } - ,.rv=0 - } - -, {.name="ipv4 in ipv6 address" - ,.url="http://[2001:0000:0000:0000:0000:0000:1.9.1.1]/" - ,.is_connect=0 - ,.u= - {.field_set=(1 << UF_SCHEMA) | (1 << UF_HOST) | (1 << UF_PATH) - ,.port=0 - ,.field_data= - {{ 0, 4 } /* UF_SCHEMA */ - ,{ 8, 37 } /* UF_HOST */ - ,{ 0, 0 } /* UF_PORT */ - ,{ 46, 1 } /* UF_PATH */ - ,{ 0, 0 } /* UF_QUERY */ - ,{ 0, 0 } /* UF_FRAGMENT */ - ,{ 0, 0 } /* UF_USERINFO */ - } - } - ,.rv=0 - } - -, {.name="extra ? in query string" - ,.url="http://a.tbcdn.cn/p/fp/2010c/??fp-header-min.css,fp-base-min.css," - "fp-channel-min.css,fp-product-min.css,fp-mall-min.css,fp-category-min.css," - "fp-sub-min.css,fp-gdp4p-min.css,fp-css3-min.css,fp-misc-min.css?t=20101022.css" - ,.is_connect=0 - ,.u= - {.field_set=(1<field_set, u->port); - for (i = 0; i < UF_MAX; i++) { - if ((u->field_set & (1 << i)) == 0) { - printf("\tfield_data[%u]: unset\n", i); - continue; - } - - printf("\tfield_data[%u]: off: %u len: %u part: \"%.*s\n\"", - i, - u->field_data[i].off, - u->field_data[i].len, - u->field_data[i].len, - url + u->field_data[i].off); - } -} - -void -test_parse_url (void) -{ - struct http_parser_url u; - const struct url_test *test; - unsigned int i; - int rv; - - for (i = 0; i < (sizeof(url_tests) / sizeof(url_tests[0])); i++) { - test = &url_tests[i]; - memset(&u, 0, sizeof(u)); - - rv = http_parser_parse_url(test->url, - test->url ? strlen(test->url) : 0, - test->is_connect, - &u); - - if (test->rv == 0) { - if (rv != 0) { - printf("\n*** http_parser_parse_url(\"%s\") \"%s\" test failed, " - "unexpected rv %d ***\n\n", test->url, test->name, rv); - abort(); - } - - if (memcmp(&u, &test->u, sizeof(u)) != 0) { - printf("\n*** http_parser_parse_url(\"%s\") \"%s\" failed ***\n", - test->url, test->name); - - printf("target http_parser_url:\n"); - dump_url(test->url, &test->u); - printf("result http_parser_url:\n"); - dump_url(test->url, &u); - - abort(); - } - } else { - /* test->rv != 0 */ - if (rv == 0) { - printf("\n*** http_parser_parse_url(\"%s\") \"%s\" test failed, " - "unexpected rv %d ***\n\n", test->url, test->name, rv); - abort(); - } - } - } -} - -void -test_method_str (void) -{ - assert(0 == strcmp("GET", http_method_str(HTTP_GET))); - assert(0 == strcmp("", http_method_str(1337))); -} - -void -test_status_str (void) -{ - assert(0 == strcmp("OK", http_status_str(HTTP_STATUS_OK))); - assert(0 == strcmp("Not Found", http_status_str(HTTP_STATUS_NOT_FOUND))); - assert(0 == strcmp("", http_status_str(1337))); -} - -void -test_message (const struct message *message) -{ - size_t raw_len = strlen(message->raw); - size_t msg1len; - for (msg1len = 0; msg1len < raw_len; msg1len++) { - parser_init(message->type); - - size_t read; - const char *msg1 = message->raw; - const char *msg2 = msg1 + msg1len; - size_t msg2len = raw_len - msg1len; - - if (msg1len) { - assert(num_messages == 0); - messages[0].headers_complete_cb_called = FALSE; - - read = parse(msg1, msg1len); - - if (!messages[0].headers_complete_cb_called && parser.nread != read) { - assert(parser.nread == read); - print_error(msg1, read); - abort(); - } - - if (message->upgrade && parser.upgrade && num_messages > 0) { - messages[num_messages - 1].upgrade = msg1 + read; - goto test; - } - - if (read != msg1len) { - print_error(msg1, read); - abort(); - } - } - - - read = parse(msg2, msg2len); - - if (message->upgrade && parser.upgrade) { - messages[num_messages - 1].upgrade = msg2 + read; - goto test; - } - - if (read != msg2len) { - print_error(msg2, read); - abort(); - } - - read = parse(NULL, 0); - - if (read != 0) { - print_error(message->raw, read); - abort(); - } - - test: - - if (num_messages != 1) { - printf("\n*** num_messages != 1 after testing '%s' ***\n\n", message->name); - abort(); - } - - if(!message_eq(0, 0, message)) abort(); - } -} - -void -test_message_count_body (const struct message *message) -{ - parser_init(message->type); - - size_t read; - size_t l = strlen(message->raw); - size_t i, toread; - size_t chunk = 4024; - - for (i = 0; i < l; i+= chunk) { - toread = MIN(l-i, chunk); - read = parse_count_body(message->raw + i, toread); - if (read != toread) { - print_error(message->raw, read); - abort(); - } - } - - - read = parse_count_body(NULL, 0); - if (read != 0) { - print_error(message->raw, read); - abort(); - } - - if (num_messages != 1) { - printf("\n*** num_messages != 1 after testing '%s' ***\n\n", message->name); - abort(); - } - - if(!message_eq(0, 0, message)) abort(); -} - -void -test_simple_type (const char *buf, - enum http_errno err_expected, - enum http_parser_type type) -{ - parser_init(type); - - enum http_errno err; - - parse(buf, strlen(buf)); - err = HTTP_PARSER_ERRNO(&parser); - parse(NULL, 0); - - /* In strict mode, allow us to pass with an unexpected HPE_STRICT as - * long as the caller isn't expecting success. - */ -#if HTTP_PARSER_STRICT - if (err_expected != err && err_expected != HPE_OK && err != HPE_STRICT) { -#else - if (err_expected != err) { -#endif - fprintf(stderr, "\n*** test_simple expected %s, but saw %s ***\n\n%s\n", - http_errno_name(err_expected), http_errno_name(err), buf); - abort(); - } -} - -void -test_simple (const char *buf, enum http_errno err_expected) -{ - test_simple_type(buf, err_expected, HTTP_REQUEST); -} - -void -test_invalid_header_content (int req, const char* str) -{ - http_parser parser; - http_parser_init(&parser, req ? HTTP_REQUEST : HTTP_RESPONSE); - size_t parsed; - const char *buf; - buf = req ? - "GET / HTTP/1.1\r\n" : - "HTTP/1.1 200 OK\r\n"; - parsed = http_parser_execute(&parser, &settings_null, buf, strlen(buf)); - assert(parsed == strlen(buf)); - - buf = str; - size_t buflen = strlen(buf); - - parsed = http_parser_execute(&parser, &settings_null, buf, buflen); - if (parsed != buflen) { - assert(HTTP_PARSER_ERRNO(&parser) == HPE_INVALID_HEADER_TOKEN); - return; - } - - fprintf(stderr, - "\n*** Error expected but none in invalid header content test ***\n"); - abort(); -} - -void -test_invalid_header_field_content_error (int req) -{ - test_invalid_header_content(req, "Foo: F\01ailure"); - test_invalid_header_content(req, "Foo: B\02ar"); -} - -void -test_invalid_header_field (int req, const char* str) -{ - http_parser parser; - http_parser_init(&parser, req ? HTTP_REQUEST : HTTP_RESPONSE); - size_t parsed; - const char *buf; - buf = req ? - "GET / HTTP/1.1\r\n" : - "HTTP/1.1 200 OK\r\n"; - parsed = http_parser_execute(&parser, &settings_null, buf, strlen(buf)); - assert(parsed == strlen(buf)); - - buf = str; - size_t buflen = strlen(buf); - - parsed = http_parser_execute(&parser, &settings_null, buf, buflen); - if (parsed != buflen) { - assert(HTTP_PARSER_ERRNO(&parser) == HPE_INVALID_HEADER_TOKEN); - return; - } - - fprintf(stderr, - "\n*** Error expected but none in invalid header token test ***\n"); - abort(); -} - -void -test_invalid_header_field_token_error (int req) -{ - test_invalid_header_field(req, "Fo@: Failure"); - test_invalid_header_field(req, "Foo\01\test: Bar"); -} - -void -test_double_content_length_error (int req) -{ - http_parser parser; - http_parser_init(&parser, req ? HTTP_REQUEST : HTTP_RESPONSE); - size_t parsed; - const char *buf; - buf = req ? - "GET / HTTP/1.1\r\n" : - "HTTP/1.1 200 OK\r\n"; - parsed = http_parser_execute(&parser, &settings_null, buf, strlen(buf)); - assert(parsed == strlen(buf)); - - buf = "Content-Length: 0\r\nContent-Length: 1\r\n\r\n"; - size_t buflen = strlen(buf); - - parsed = http_parser_execute(&parser, &settings_null, buf, buflen); - if (parsed != buflen) { - assert(HTTP_PARSER_ERRNO(&parser) == HPE_UNEXPECTED_CONTENT_LENGTH); - return; - } - - fprintf(stderr, - "\n*** Error expected but none in double content-length test ***\n"); - abort(); -} - -void -test_chunked_content_length_error (int req) -{ - http_parser parser; - http_parser_init(&parser, req ? HTTP_REQUEST : HTTP_RESPONSE); - size_t parsed; - const char *buf; - buf = req ? - "GET / HTTP/1.1\r\n" : - "HTTP/1.1 200 OK\r\n"; - parsed = http_parser_execute(&parser, &settings_null, buf, strlen(buf)); - assert(parsed == strlen(buf)); - - buf = "Transfer-Encoding: anything\r\nContent-Length: 1\r\n\r\n"; - size_t buflen = strlen(buf); - - parsed = http_parser_execute(&parser, &settings_null, buf, buflen); - if (parsed != buflen) { - assert(HTTP_PARSER_ERRNO(&parser) == HPE_UNEXPECTED_CONTENT_LENGTH); - return; - } - - fprintf(stderr, - "\n*** Error expected but none in chunked content-length test ***\n"); - abort(); -} - -void -test_header_cr_no_lf_error (int req) -{ - http_parser parser; - http_parser_init(&parser, req ? HTTP_REQUEST : HTTP_RESPONSE); - size_t parsed; - const char *buf; - buf = req ? - "GET / HTTP/1.1\r\n" : - "HTTP/1.1 200 OK\r\n"; - parsed = http_parser_execute(&parser, &settings_null, buf, strlen(buf)); - assert(parsed == strlen(buf)); - - buf = "Foo: 1\rBar: 1\r\n\r\n"; - size_t buflen = strlen(buf); - - parsed = http_parser_execute(&parser, &settings_null, buf, buflen); - if (parsed != buflen) { - assert(HTTP_PARSER_ERRNO(&parser) == HPE_LF_EXPECTED); - return; - } - - fprintf(stderr, - "\n*** Error expected but none in header whitespace test ***\n"); - abort(); -} - -void -test_no_overflow_parse_url (void) -{ - int rv; - struct http_parser_url u; - - http_parser_url_init(&u); - rv = http_parser_parse_url("http://example.com:8001", 22, 0, &u); - - if (rv != 0) { - fprintf(stderr, - "\n*** test_no_overflow_parse_url invalid return value=%d\n", - rv); - abort(); - } - - if (u.port != 800) { - fprintf(stderr, - "\n*** test_no_overflow_parse_url invalid port number=%d\n", - u.port); - abort(); - } -} - -void -test_header_overflow_error (int req) -{ - http_parser parser; - http_parser_init(&parser, req ? HTTP_REQUEST : HTTP_RESPONSE); - size_t parsed; - const char *buf; - buf = req ? "GET / HTTP/1.1\r\n" : "HTTP/1.0 200 OK\r\n"; - parsed = http_parser_execute(&parser, &settings_null, buf, strlen(buf)); - assert(parsed == strlen(buf)); - - buf = "header-key: header-value\r\n"; - size_t buflen = strlen(buf); - - int i; - for (i = 0; i < 10000; i++) { - parsed = http_parser_execute(&parser, &settings_null, buf, buflen); - if (parsed != buflen) { - //fprintf(stderr, "error found on iter %d\n", i); - assert(HTTP_PARSER_ERRNO(&parser) == HPE_HEADER_OVERFLOW); - return; - } - } - - fprintf(stderr, "\n*** Error expected but none in header overflow test ***\n"); - abort(); -} - - -void -test_header_nread_value () -{ - http_parser parser; - http_parser_init(&parser, HTTP_REQUEST); - size_t parsed; - const char *buf; - buf = "GET / HTTP/1.1\r\nheader: value\nhdr: value\r\n"; - parsed = http_parser_execute(&parser, &settings_null, buf, strlen(buf)); - assert(parsed == strlen(buf)); - - assert(parser.nread == strlen(buf)); -} - - -static void -test_content_length_overflow (const char *buf, size_t buflen, int expect_ok) -{ - http_parser parser; - http_parser_init(&parser, HTTP_RESPONSE); - http_parser_execute(&parser, &settings_null, buf, buflen); - - if (expect_ok) - assert(HTTP_PARSER_ERRNO(&parser) == HPE_OK); - else - assert(HTTP_PARSER_ERRNO(&parser) == HPE_INVALID_CONTENT_LENGTH); -} - -void -test_header_content_length_overflow_error (void) -{ -#define X(size) \ - "HTTP/1.1 200 OK\r\n" \ - "Content-Length: " #size "\r\n" \ - "\r\n" - const char a[] = X(1844674407370955160); /* 2^64 / 10 - 1 */ - const char b[] = X(18446744073709551615); /* 2^64-1 */ - const char c[] = X(18446744073709551616); /* 2^64 */ -#undef X - test_content_length_overflow(a, sizeof(a) - 1, 1); /* expect ok */ - test_content_length_overflow(b, sizeof(b) - 1, 0); /* expect failure */ - test_content_length_overflow(c, sizeof(c) - 1, 0); /* expect failure */ -} - -void -test_chunk_content_length_overflow_error (void) -{ -#define X(size) \ - "HTTP/1.1 200 OK\r\n" \ - "Transfer-Encoding: chunked\r\n" \ - "\r\n" \ - #size "\r\n" \ - "..." - const char a[] = X(FFFFFFFFFFFFFFE); /* 2^64 / 16 - 1 */ - const char b[] = X(FFFFFFFFFFFFFFFF); /* 2^64-1 */ - const char c[] = X(10000000000000000); /* 2^64 */ -#undef X - test_content_length_overflow(a, sizeof(a) - 1, 1); /* expect ok */ - test_content_length_overflow(b, sizeof(b) - 1, 0); /* expect failure */ - test_content_length_overflow(c, sizeof(c) - 1, 0); /* expect failure */ -} - -void -test_no_overflow_long_body (int req, size_t length) -{ - http_parser parser; - http_parser_init(&parser, req ? HTTP_REQUEST : HTTP_RESPONSE); - size_t parsed; - size_t i; - char buf1[3000]; - size_t buf1len = sprintf(buf1, "%s\r\nConnection: Keep-Alive\r\nContent-Length: %lu\r\n\r\n", - req ? "POST / HTTP/1.0" : "HTTP/1.0 200 OK", (unsigned long)length); - parsed = http_parser_execute(&parser, &settings_null, buf1, buf1len); - if (parsed != buf1len) - goto err; - - for (i = 0; i < length; i++) { - char foo = 'a'; - parsed = http_parser_execute(&parser, &settings_null, &foo, 1); - if (parsed != 1) - goto err; - } - - parsed = http_parser_execute(&parser, &settings_null, buf1, buf1len); - if (parsed != buf1len) goto err; - return; - - err: - fprintf(stderr, - "\n*** error in test_no_overflow_long_body %s of length %lu ***\n", - req ? "REQUEST" : "RESPONSE", - (unsigned long)length); - abort(); -} - -void -test_multiple3 (const struct message *r1, const struct message *r2, const struct message *r3) -{ - int message_count = count_parsed_messages(3, r1, r2, r3); - - char total[ strlen(r1->raw) - + strlen(r2->raw) - + strlen(r3->raw) - + 1 - ]; - total[0] = '\0'; - - strcat(total, r1->raw); - strcat(total, r2->raw); - strcat(total, r3->raw); - - parser_init(r1->type); - - size_t read; - - read = parse(total, strlen(total)); - - if (parser.upgrade) { - upgrade_message_fix(total, read, 3, r1, r2, r3); - goto test; - } - - if (read != strlen(total)) { - print_error(total, read); - abort(); - } - - read = parse(NULL, 0); - - if (read != 0) { - print_error(total, read); - abort(); - } - -test: - - if (message_count != num_messages) { - fprintf(stderr, "\n\n*** Parser didn't see 3 messages only %d *** \n", num_messages); - abort(); - } - - if (!message_eq(0, 0, r1)) abort(); - if (message_count > 1 && !message_eq(1, 0, r2)) abort(); - if (message_count > 2 && !message_eq(2, 0, r3)) abort(); -} - -/* SCAN through every possible breaking to make sure the - * parser can handle getting the content in any chunks that - * might come from the socket - */ -void -test_scan (const struct message *r1, const struct message *r2, const struct message *r3) -{ - char total[80*1024] = "\0"; - char buf1[80*1024] = "\0"; - char buf2[80*1024] = "\0"; - char buf3[80*1024] = "\0"; - - strcat(total, r1->raw); - strcat(total, r2->raw); - strcat(total, r3->raw); - - size_t read; - - int total_len = strlen(total); - - int total_ops = 2 * (total_len - 1) * (total_len - 2) / 2; - int ops = 0 ; - - size_t buf1_len, buf2_len, buf3_len; - int message_count = count_parsed_messages(3, r1, r2, r3); - - int i,j,type_both; - for (type_both = 0; type_both < 2; type_both ++ ) { - for (j = 2; j < total_len; j ++ ) { - for (i = 1; i < j; i ++ ) { - - if (ops % 1000 == 0) { - printf("\b\b\b\b%3.0f%%", 100 * (float)ops /(float)total_ops); - fflush(stdout); - } - ops += 1; - - parser_init(type_both ? HTTP_BOTH : r1->type); - - buf1_len = i; - strlncpy(buf1, sizeof(buf1), total, buf1_len); - buf1[buf1_len] = 0; - - buf2_len = j - i; - strlncpy(buf2, sizeof(buf1), total+i, buf2_len); - buf2[buf2_len] = 0; - - buf3_len = total_len - j; - strlncpy(buf3, sizeof(buf1), total+j, buf3_len); - buf3[buf3_len] = 0; - - assert(num_messages == 0); - messages[0].headers_complete_cb_called = FALSE; - - read = parse(buf1, buf1_len); - - if (!messages[0].headers_complete_cb_called && parser.nread != read) { - print_error(buf1, read); - goto error; - } - - if (parser.upgrade) goto test; - - if (read != buf1_len) { - print_error(buf1, read); - goto error; - } - - read += parse(buf2, buf2_len); - - if (parser.upgrade) goto test; - - if (read != buf1_len + buf2_len) { - print_error(buf2, read); - goto error; - } - - read += parse(buf3, buf3_len); - - if (parser.upgrade) goto test; - - if (read != buf1_len + buf2_len + buf3_len) { - print_error(buf3, read); - goto error; - } - - parse(NULL, 0); - -test: - if (parser.upgrade) { - upgrade_message_fix(total, read, 3, r1, r2, r3); - } - - if (message_count != num_messages) { - fprintf(stderr, "\n\nParser didn't see %d messages only %d\n", - message_count, num_messages); - goto error; - } - - if (!message_eq(0, 0, r1)) { - fprintf(stderr, "\n\nError matching messages[0] in test_scan.\n"); - goto error; - } - - if (message_count > 1 && !message_eq(1, 0, r2)) { - fprintf(stderr, "\n\nError matching messages[1] in test_scan.\n"); - goto error; - } - - if (message_count > 2 && !message_eq(2, 0, r3)) { - fprintf(stderr, "\n\nError matching messages[2] in test_scan.\n"); - goto error; - } - } - } - } - puts("\b\b\b\b100%"); - return; - - error: - fprintf(stderr, "i=%d j=%d\n", i, j); - fprintf(stderr, "buf1 (%u) %s\n\n", (unsigned int)buf1_len, buf1); - fprintf(stderr, "buf2 (%u) %s\n\n", (unsigned int)buf2_len , buf2); - fprintf(stderr, "buf3 (%u) %s\n", (unsigned int)buf3_len, buf3); - abort(); -} - -// user required to free the result -// string terminated by \0 -char * -create_large_chunked_message (int body_size_in_kb, const char* headers) -{ - int i; - size_t wrote = 0; - size_t headers_len = strlen(headers); - size_t bufsize = headers_len + (5+1024+2)*body_size_in_kb + 6; - char * buf = malloc(bufsize); - - memcpy(buf, headers, headers_len); - wrote += headers_len; - - for (i = 0; i < body_size_in_kb; i++) { - // write 1kb chunk into the body. - memcpy(buf + wrote, "400\r\n", 5); - wrote += 5; - memset(buf + wrote, 'C', 1024); - wrote += 1024; - strcpy(buf + wrote, "\r\n"); - wrote += 2; - } - - memcpy(buf + wrote, "0\r\n\r\n", 6); - wrote += 6; - assert(wrote == bufsize); - - return buf; -} - -/* Verify that we can pause parsing at any of the bytes in the - * message and still get the result that we're expecting. */ -void -test_message_pause (const struct message *msg) -{ - char *buf = (char*) msg->raw; - size_t buflen = strlen(msg->raw); - size_t nread; - - parser_init(msg->type); - - do { - nread = parse_pause(buf, buflen); - - // We can only set the upgrade buffer once we've gotten our message - // completion callback. - if (messages[0].message_complete_cb_called && - msg->upgrade && - parser.upgrade) { - messages[0].upgrade = buf + nread; - goto test; - } - - if (nread < buflen) { - - // Not much do to if we failed a strict-mode check - if (HTTP_PARSER_ERRNO(&parser) == HPE_STRICT) { - return; - } - - assert (HTTP_PARSER_ERRNO(&parser) == HPE_PAUSED); - } - - buf += nread; - buflen -= nread; - http_parser_pause(&parser, 0); - } while (buflen > 0); - - nread = parse_pause(NULL, 0); - assert (nread == 0); - -test: - if (num_messages != 1) { - printf("\n*** num_messages != 1 after testing '%s' ***\n\n", msg->name); - abort(); - } - - if(!message_eq(0, 0, msg)) abort(); -} - -/* Verify that body and next message won't be parsed in responses to CONNECT */ -void -test_message_connect (const struct message *msg) -{ - char *buf = (char*) msg->raw; - size_t buflen = strlen(msg->raw); - - parser_init(msg->type); - - parse_connect(buf, buflen); - - if (num_messages != 1) { - printf("\n*** num_messages != 1 after testing '%s' ***\n\n", msg->name); - abort(); - } - - if(!message_eq(0, 1, msg)) abort(); -} - -int -main (void) -{ - unsigned i, j, k; - unsigned long version; - unsigned major; - unsigned minor; - unsigned patch; - - version = http_parser_version(); - major = (version >> 16) & 255; - minor = (version >> 8) & 255; - patch = version & 255; - printf("http_parser v%u.%u.%u (0x%06lx)\n", major, minor, patch, version); - - printf("sizeof(http_parser) = %u\n", (unsigned int)sizeof(http_parser)); - assert(sizeof(http_parser) == 4 + 4 + 8 + 2 + 2 + 4 + sizeof(void *)); - - //// API - test_preserve_data(); - test_parse_url(); - test_method_str(); - test_status_str(); - - //// NREAD - test_header_nread_value(); - - //// OVERFLOW CONDITIONS - test_no_overflow_parse_url(); - - test_header_overflow_error(HTTP_REQUEST); - test_no_overflow_long_body(HTTP_REQUEST, 1000); - test_no_overflow_long_body(HTTP_REQUEST, 100000); - - test_header_overflow_error(HTTP_RESPONSE); - test_no_overflow_long_body(HTTP_RESPONSE, 1000); - test_no_overflow_long_body(HTTP_RESPONSE, 100000); - - test_header_content_length_overflow_error(); - test_chunk_content_length_overflow_error(); - - //// HEADER FIELD CONDITIONS - test_double_content_length_error(HTTP_REQUEST); - test_chunked_content_length_error(HTTP_REQUEST); - test_header_cr_no_lf_error(HTTP_REQUEST); - test_invalid_header_field_token_error(HTTP_REQUEST); - test_invalid_header_field_content_error(HTTP_REQUEST); - test_double_content_length_error(HTTP_RESPONSE); - test_chunked_content_length_error(HTTP_RESPONSE); - test_header_cr_no_lf_error(HTTP_RESPONSE); - test_invalid_header_field_token_error(HTTP_RESPONSE); - test_invalid_header_field_content_error(HTTP_RESPONSE); - - test_simple_type( - "POST / HTTP/1.1\r\n" - "Content-Length:\r\n" // empty - "\r\n", - HPE_INVALID_CONTENT_LENGTH, - HTTP_REQUEST); - - test_simple_type( - "POST / HTTP/1.1\r\n" - "Content-Length: 42 \r\n" // Note the surrounding whitespace. - "\r\n", - HPE_OK, - HTTP_REQUEST); - - test_simple_type( - "POST / HTTP/1.1\r\n" - "Content-Length: 4 2\r\n" - "\r\n", - HPE_INVALID_CONTENT_LENGTH, - HTTP_REQUEST); - - test_simple_type( - "POST / HTTP/1.1\r\n" - "Content-Length: 13 37\r\n" - "\r\n", - HPE_INVALID_CONTENT_LENGTH, - HTTP_REQUEST); - - test_simple_type( - "POST / HTTP/1.1\r\n" - "Content-Length: 42\r\n" - " Hello world!\r\n", - HPE_INVALID_CONTENT_LENGTH, - HTTP_REQUEST); - - test_simple_type( - "POST / HTTP/1.1\r\n" - "Content-Length: 42\r\n" - " \r\n", - HPE_OK, - HTTP_REQUEST); - - //// RESPONSES - - test_simple_type("HTP/1.1 200 OK\r\n\r\n", HPE_INVALID_VERSION, HTTP_RESPONSE); - test_simple_type("HTTP/01.1 200 OK\r\n\r\n", HPE_INVALID_VERSION, HTTP_RESPONSE); - test_simple_type("HTTP/11.1 200 OK\r\n\r\n", HPE_INVALID_VERSION, HTTP_RESPONSE); - test_simple_type("HTTP/1.01 200 OK\r\n\r\n", HPE_INVALID_VERSION, HTTP_RESPONSE); - test_simple_type("HTTP/1.1\t200 OK\r\n\r\n", HPE_INVALID_VERSION, HTTP_RESPONSE); - test_simple_type("\rHTTP/1.1\t200 OK\r\n\r\n", HPE_INVALID_VERSION, HTTP_RESPONSE); - - for (i = 0; i < ARRAY_SIZE(responses); i++) { - test_message(&responses[i]); - } - - for (i = 0; i < ARRAY_SIZE(responses); i++) { - test_message_pause(&responses[i]); - } - - for (i = 0; i < ARRAY_SIZE(responses); i++) { - test_message_connect(&responses[i]); - } - - for (i = 0; i < ARRAY_SIZE(responses); i++) { - if (!responses[i].should_keep_alive) continue; - for (j = 0; j < ARRAY_SIZE(responses); j++) { - if (!responses[j].should_keep_alive) continue; - for (k = 0; k < ARRAY_SIZE(responses); k++) { - test_multiple3(&responses[i], &responses[j], &responses[k]); - } - } - } - - test_message_count_body(&responses[NO_HEADERS_NO_BODY_404]); - test_message_count_body(&responses[TRAILING_SPACE_ON_CHUNKED_BODY]); - - // test very large chunked response - { - char * msg = create_large_chunked_message(31337, - "HTTP/1.0 200 OK\r\n" - "Transfer-Encoding: chunked\r\n" - "Content-Type: text/plain\r\n" - "\r\n"); - struct message large_chunked = - {.name= "large chunked" - ,.type= HTTP_RESPONSE - ,.raw= msg - ,.should_keep_alive= FALSE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 0 - ,.status_code= 200 - ,.response_status= "OK" - ,.num_headers= 2 - ,.headers= - { { "Transfer-Encoding", "chunked" } - , { "Content-Type", "text/plain" } - } - ,.body_size= 31337*1024 - ,.num_chunks_complete= 31338 - }; - for (i = 0; i < MAX_CHUNKS; i++) { - large_chunked.chunk_lengths[i] = 1024; - } - test_message_count_body(&large_chunked); - free(msg); - } - - - - printf("response scan 1/2 "); - test_scan( &responses[TRAILING_SPACE_ON_CHUNKED_BODY] - , &responses[NO_BODY_HTTP10_KA_204] - , &responses[NO_REASON_PHRASE] - ); - - printf("response scan 2/2 "); - test_scan( &responses[BONJOUR_MADAME_FR] - , &responses[UNDERSTORE_HEADER_KEY] - , &responses[NO_CARRIAGE_RET] - ); - - puts("responses okay"); - - - /// REQUESTS - - test_simple("GET / IHTTP/1.0\r\n\r\n", HPE_INVALID_CONSTANT); - test_simple("GET / ICE/1.0\r\n\r\n", HPE_INVALID_CONSTANT); - test_simple("GET / HTP/1.1\r\n\r\n", HPE_INVALID_VERSION); - test_simple("GET / HTTP/01.1\r\n\r\n", HPE_INVALID_VERSION); - test_simple("GET / HTTP/11.1\r\n\r\n", HPE_INVALID_VERSION); - test_simple("GET / HTTP/1.01\r\n\r\n", HPE_INVALID_VERSION); - - test_simple("GET / HTTP/1.0\r\nHello: w\1rld\r\n\r\n", HPE_INVALID_HEADER_TOKEN); - test_simple("GET / HTTP/1.0\r\nHello: woooo\2rld\r\n\r\n", HPE_INVALID_HEADER_TOKEN); - - // Extended characters - see nodejs/test/parallel/test-http-headers-obstext.js - test_simple("GET / HTTP/1.1\r\n" - "Test: Düsseldorf\r\n", - HPE_OK); - - // Well-formed but incomplete - test_simple("GET / HTTP/1.1\r\n" - "Content-Type: text/plain\r\n" - "Content-Length: 6\r\n" - "\r\n" - "fooba", - HPE_OK); - - // Unknown Transfer-Encoding in request - test_simple("GET / HTTP/1.1\r\n" - "Transfer-Encoding: unknown\r\n" - "\r\n", - HPE_INVALID_TRANSFER_ENCODING); - - static const char *all_methods[] = { - "DELETE", - "GET", - "HEAD", - "POST", - "PUT", - //"CONNECT", //CONNECT can't be tested like other methods, it's a tunnel - "OPTIONS", - "TRACE", - "COPY", - "LOCK", - "MKCOL", - "MOVE", - "PROPFIND", - "PROPPATCH", - "SEARCH", - "UNLOCK", - "BIND", - "REBIND", - "UNBIND", - "ACL", - "REPORT", - "MKACTIVITY", - "CHECKOUT", - "MERGE", - "M-SEARCH", - "NOTIFY", - "SUBSCRIBE", - "UNSUBSCRIBE", - "PATCH", - "PURGE", - "MKCALENDAR", - "LINK", - "UNLINK", - 0 }; - const char **this_method; - for (this_method = all_methods; *this_method; this_method++) { - char buf[200]; - sprintf(buf, "%s / HTTP/1.1\r\n\r\n", *this_method); - test_simple(buf, HPE_OK); - } - - static const char *bad_methods[] = { - "ASDF", - "C******", - "COLA", - "GEM", - "GETA", - "M****", - "MKCOLA", - "PROPPATCHA", - "PUN", - "PX", - "SA", - "hello world", - 0 }; - for (this_method = bad_methods; *this_method; this_method++) { - char buf[200]; - sprintf(buf, "%s / HTTP/1.1\r\n\r\n", *this_method); - test_simple(buf, HPE_INVALID_METHOD); - } - - // illegal header field name line folding - test_simple("GET / HTTP/1.1\r\n" - "name\r\n" - " : value\r\n" - "\r\n", - HPE_INVALID_HEADER_TOKEN); - - const char *dumbluck2 = - "GET / HTTP/1.1\r\n" - "X-SSL-Nonsense: -----BEGIN CERTIFICATE-----\r\n" - "\tMIIFbTCCBFWgAwIBAgICH4cwDQYJKoZIhvcNAQEFBQAwcDELMAkGA1UEBhMCVUsx\r\n" - "\tETAPBgNVBAoTCGVTY2llbmNlMRIwEAYDVQQLEwlBdXRob3JpdHkxCzAJBgNVBAMT\r\n" - "\tAkNBMS0wKwYJKoZIhvcNAQkBFh5jYS1vcGVyYXRvckBncmlkLXN1cHBvcnQuYWMu\r\n" - "\tdWswHhcNMDYwNzI3MTQxMzI4WhcNMDcwNzI3MTQxMzI4WjBbMQswCQYDVQQGEwJV\r\n" - "\tSzERMA8GA1UEChMIZVNjaWVuY2UxEzARBgNVBAsTCk1hbmNoZXN0ZXIxCzAJBgNV\r\n" - "\tBAcTmrsogriqMWLAk1DMRcwFQYDVQQDEw5taWNoYWVsIHBhcmQYJKoZIhvcNAQEB\r\n" - "\tBQADggEPADCCAQoCggEBANPEQBgl1IaKdSS1TbhF3hEXSl72G9J+WC/1R64fAcEF\r\n" - "\tW51rEyFYiIeZGx/BVzwXbeBoNUK41OK65sxGuflMo5gLflbwJtHBRIEKAfVVp3YR\r\n" - "\tgW7cMA/s/XKgL1GEC7rQw8lIZT8RApukCGqOVHSi/F1SiFlPDxuDfmdiNzL31+sL\r\n" - "\t0iwHDdNkGjy5pyBSB8Y79dsSJtCW/iaLB0/n8Sj7HgvvZJ7x0fr+RQjYOUUfrePP\r\n" - "\tu2MSpFyf+9BbC/aXgaZuiCvSR+8Snv3xApQY+fULK/xY8h8Ua51iXoQ5jrgu2SqR\r\n" - "\twgA7BUi3G8LFzMBl8FRCDYGUDy7M6QaHXx1ZWIPWNKsCAwEAAaOCAiQwggIgMAwG\r\n" - "\tA1UdEwEB/wQCMAAwEQYJYIZIAYb4QgHTTPAQDAgWgMA4GA1UdDwEB/wQEAwID6DAs\r\n" - "\tBglghkgBhvhCAQ0EHxYdVUsgZS1TY2llbmNlIFVzZXIgQ2VydGlmaWNhdGUwHQYD\r\n" - "\tVR0OBBYEFDTt/sf9PeMaZDHkUIldrDYMNTBZMIGaBgNVHSMEgZIwgY+AFAI4qxGj\r\n" - "\tloCLDdMVKwiljjDastqooXSkcjBwMQswCQYDVQQGEwJVSzERMA8GA1UEChMIZVNj\r\n" - "\taWVuY2UxEjAQBgNVBAsTCUF1dGhvcml0eTELMAkGA1UEAxMCQ0ExLTArBgkqhkiG\r\n" - "\t9w0BCQEWHmNhLW9wZXJhdG9yQGdyaWQtc3VwcG9ydC5hYy51a4IBADApBgNVHRIE\r\n" - "\tIjAggR5jYS1vcGVyYXRvckBncmlkLXN1cHBvcnQuYWMudWswGQYDVR0gBBIwEDAO\r\n" - "\tBgwrBgEEAdkvAQEBAQYwPQYJYIZIAYb4QgEEBDAWLmh0dHA6Ly9jYS5ncmlkLXN1\r\n" - "\tcHBvcnQuYWMudmT4sopwqlBWsvcHViL2NybC9jYWNybC5jcmwwPQYJYIZIAYb4QgEDBDAWLmh0\r\n" - "\tdHA6Ly9jYS5ncmlkLXN1cHBvcnQuYWMudWsvcHViL2NybC9jYWNybC5jcmwwPwYD\r\n" - "\tVR0fBDgwNjA0oDKgMIYuaHR0cDovL2NhLmdyaWQt5hYy51ay9wdWIv\r\n" - "\tY3JsL2NhY3JsLmNybDANBgkqhkiG9w0BAQUFAAOCAQEAS/U4iiooBENGW/Hwmmd3\r\n" - "\tXCy6Zrt08YjKCzGNjorT98g8uGsqYjSxv/hmi0qlnlHs+k/3Iobc3LjS5AMYr5L8\r\n" - "\tUO7OSkgFFlLHQyC9JzPfmLCAugvzEbyv4Olnsr8hbxF1MbKZoQxUZtMVu29wjfXk\r\n" - "\thTeApBv7eaKCWpSp7MCbvgzm74izKhu3vlDk9w6qVrxePfGgpKPqfHiOoGhFnbTK\r\n" - "\twTC6o2xq5y0qZ03JonF7OJspEd3I5zKY3E+ov7/ZhW6DqT8UFvsAdjvQbXyhV8Eu\r\n" - "\tYhixw1aKEPzNjNowuIseVogKOLXxWI5vAi5HgXdS0/ES5gDGsABo4fqovUKlgop3\r\n" - "\tRA==\r\n" - "\t-----END CERTIFICATE-----\r\n" - "\r\n"; - test_simple(dumbluck2, HPE_OK); - - const char *corrupted_connection = - "GET / HTTP/1.1\r\n" - "Host: www.example.com\r\n" - "Connection\r\033\065\325eep-Alive\r\n" - "Accept-Encoding: gzip\r\n" - "\r\n"; - test_simple(corrupted_connection, HPE_INVALID_HEADER_TOKEN); - - const char *corrupted_header_name = - "GET / HTTP/1.1\r\n" - "Host: www.example.com\r\n" - "X-Some-Header\r\033\065\325eep-Alive\r\n" - "Accept-Encoding: gzip\r\n" - "\r\n"; - test_simple(corrupted_header_name, HPE_INVALID_HEADER_TOKEN); - -#if 0 - // NOTE(Wed Nov 18 11:57:27 CET 2009) this seems okay. we just read body - // until EOF. - // - // no content-length - // error if there is a body without content length - const char *bad_get_no_headers_no_body = "GET /bad_get_no_headers_no_body/world HTTP/1.1\r\n" - "Accept: */*\r\n" - "\r\n" - "HELLO"; - test_simple(bad_get_no_headers_no_body, 0); -#endif - /* TODO sending junk and large headers gets rejected */ - - - /* check to make sure our predefined requests are okay */ - for (i = 0; i < ARRAY_SIZE(requests); i++) { - test_message(&requests[i]); - } - - for (i = 0; i < ARRAY_SIZE(requests); i++) { - test_message_pause(&requests[i]); - } - - for (i = 0; i < ARRAY_SIZE(requests); i++) { - if (!requests[i].should_keep_alive) continue; - for (j = 0; j < ARRAY_SIZE(requests); j++) { - if (!requests[j].should_keep_alive) continue; - for (k = 0; k < ARRAY_SIZE(requests); k++) { - test_multiple3(&requests[i], &requests[j], &requests[k]); - } - } - } - - printf("request scan 1/4 "); - test_scan( &requests[GET_NO_HEADERS_NO_BODY] - , &requests[GET_ONE_HEADER_NO_BODY] - , &requests[GET_NO_HEADERS_NO_BODY] - ); - - printf("request scan 2/4 "); - test_scan( &requests[POST_CHUNKED_ALL_YOUR_BASE] - , &requests[POST_IDENTITY_BODY_WORLD] - , &requests[GET_FUNKY_CONTENT_LENGTH] - ); - - printf("request scan 3/4 "); - test_scan( &requests[TWO_CHUNKS_MULT_ZERO_END] - , &requests[CHUNKED_W_TRAILING_HEADERS] - , &requests[CHUNKED_W_NONSENSE_AFTER_LENGTH] - ); - - printf("request scan 4/4 "); - test_scan( &requests[QUERY_URL_WITH_QUESTION_MARK_GET] - , &requests[PREFIX_NEWLINE_GET ] - , &requests[CONNECT_REQUEST] - ); - - puts("requests okay"); - - return 0; -} diff --git a/third_party/python/async_timeout/async_timeout-3.0.1.dist-info/LICENSE b/third_party/python/async_timeout/async_timeout-3.0.1.dist-info/LICENSE deleted file mode 100644 index 8dada3edaf50..000000000000 --- a/third_party/python/async_timeout/async_timeout-3.0.1.dist-info/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "{}" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright {yyyy} {name of copyright owner} - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/third_party/python/async_timeout/async_timeout-3.0.1.dist-info/METADATA b/third_party/python/async_timeout/async_timeout-3.0.1.dist-info/METADATA deleted file mode 100644 index 5ec05a278598..000000000000 --- a/third_party/python/async_timeout/async_timeout-3.0.1.dist-info/METADATA +++ /dev/null @@ -1,165 +0,0 @@ -Metadata-Version: 2.1 -Name: async-timeout -Version: 3.0.1 -Summary: Timeout context manager for asyncio programs -Home-page: https://github.com/aio-libs/async_timeout/ -Author: Andrew Svetlov -Author-email: andrew.svetlov@gmail.com -License: Apache 2 -Platform: UNKNOWN -Classifier: License :: OSI Approved :: Apache Software License -Classifier: Intended Audience :: Developers -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Topic :: Internet :: WWW/HTTP -Classifier: Framework :: AsyncIO -Requires-Python: >=3.5.3 - -async-timeout -============= -.. image:: https://travis-ci.org/aio-libs/async-timeout.svg?branch=master - :target: https://travis-ci.org/aio-libs/async-timeout -.. image:: https://codecov.io/gh/aio-libs/async-timeout/branch/master/graph/badge.svg - :target: https://codecov.io/gh/aio-libs/async-timeout -.. image:: https://img.shields.io/pypi/v/async-timeout.svg - :target: https://pypi.python.org/pypi/async-timeout -.. image:: https://badges.gitter.im/Join%20Chat.svg - :target: https://gitter.im/aio-libs/Lobby - :alt: Chat on Gitter - -asyncio-compatible timeout context manager. - - -Usage example -------------- - - -The context manager is useful in cases when you want to apply timeout -logic around block of code or in cases when ``asyncio.wait_for()`` is -not suitable. Also it's much faster than ``asyncio.wait_for()`` -because ``timeout`` doesn't create a new task. - -The ``timeout(timeout, *, loop=None)`` call returns a context manager -that cancels a block on *timeout* expiring:: - - async with timeout(1.5): - await inner() - -1. If ``inner()`` is executed faster than in ``1.5`` seconds nothing - happens. -2. Otherwise ``inner()`` is cancelled internally by sending - ``asyncio.CancelledError`` into but ``asyncio.TimeoutError`` is - raised outside of context manager scope. - -*timeout* parameter could be ``None`` for skipping timeout functionality. - - -Context manager has ``.expired`` property for check if timeout happens -exactly in context manager:: - - async with timeout(1.5) as cm: - await inner() - print(cm.expired) - -The property is ``True`` if ``inner()`` execution is cancelled by -timeout context manager. - -If ``inner()`` call explicitly raises ``TimeoutError`` ``cm.expired`` -is ``False``. - -Installation ------------- - -:: - - $ pip install async-timeout - -The library is Python 3 only! - - - -Authors and License -------------------- - -The module is written by Andrew Svetlov. - -It's *Apache 2* licensed and freely available. - - -CHANGES -======= - -3.0.1 (2018-10-09) ------------------- - -- More aggressive typing (#48) - -3.0.0 (2018-05-05) ------------------- - -- Drop Python 3.4, the minimal supported version is Python 3.5.3 - -- Provide type annotations - -2.0.1 (2018-03-13) ------------------- - -* Fix ``PendingDeprecationWarning`` on Python 3.7 (#33) - - -2.0.0 (2017-10-09) ------------------- - -* Changed `timeout <= 0` behaviour - - * Backward incompatibility change, prior this version `0` was - shortcut for `None` - * when timeout <= 0 `TimeoutError` raised faster - -1.4.0 (2017-09-09) ------------------- - -* Implement `remaining` property (#20) - - * If timeout is not started yet or started unconstrained: - `remaining` is `None` - * If timeout is expired: `remaining` is `0.0` - * All others: roughly amount of time before `TimeoutError` is triggered - -1.3.0 (2017-08-23) ------------------- - -* Don't suppress nested exception on timeout. Exception context points - on cancelled line with suspended `await` (#13) - -* Introduce `.timeout` property (#16) - -* Add methods for using as async context manager (#9) - -1.2.1 (2017-05-02) ------------------- - -* Support unpublished event loop's "current_task" api. - - -1.2.0 (2017-03-11) ------------------- - -* Extra check on context manager exit - -* 0 is no-op timeout - - -1.1.0 (2016-10-20) ------------------- - -* Rename to `async-timeout` - -1.0.0 (2016-09-09) ------------------- - -* The first release. - - diff --git a/third_party/python/async_timeout/async_timeout-3.0.1.dist-info/RECORD b/third_party/python/async_timeout/async_timeout-3.0.1.dist-info/RECORD deleted file mode 100644 index 8979fc35aca7..000000000000 --- a/third_party/python/async_timeout/async_timeout-3.0.1.dist-info/RECORD +++ /dev/null @@ -1,7 +0,0 @@ -async_timeout/__init__.py,sha256=mGvWOoRqLtScEU3kmzqtTSH7EQsHvu8zhgHxOTXCn7c,3654 -async_timeout/py.typed,sha256=9LJP7QJ0oxYYrBtmXuFirzMbS3D9_3Tz-d3tyUtNp0U,11 -async_timeout-3.0.1.dist-info/LICENSE,sha256=tAkwu8-AdEyGxGoSvJ2gVmQdcicWw3j1ZZueVV74M-E,11357 -async_timeout-3.0.1.dist-info/METADATA,sha256=_3ByJ8L0-cU5wWu75_Rl8n0ZkbSCgW15fMAu_DzwTm0,4013 -async_timeout-3.0.1.dist-info/WHEEL,sha256=-ZFxwj8mZJPIVcZGLrsQ8UGRcxVAOExzPLVBGR7u7bE,92 -async_timeout-3.0.1.dist-info/top_level.txt,sha256=9oM4e7Twq8iD_7_Q3Mz0E6GPIB6vJvRFo-UBwUQtBDU,14 -async_timeout-3.0.1.dist-info/RECORD,, diff --git a/third_party/python/async_timeout/async_timeout-3.0.1.dist-info/WHEEL b/third_party/python/async_timeout/async_timeout-3.0.1.dist-info/WHEEL deleted file mode 100644 index f87af075c08e..000000000000 --- a/third_party/python/async_timeout/async_timeout-3.0.1.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.32.1) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/third_party/python/async_timeout/async_timeout-3.0.1.dist-info/top_level.txt b/third_party/python/async_timeout/async_timeout-3.0.1.dist-info/top_level.txt deleted file mode 100644 index ad29955ef909..000000000000 --- a/third_party/python/async_timeout/async_timeout-3.0.1.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -async_timeout diff --git a/third_party/python/async_timeout/async_timeout/__init__.py b/third_party/python/async_timeout/async_timeout/__init__.py deleted file mode 100644 index dcc55f0ceaec..000000000000 --- a/third_party/python/async_timeout/async_timeout/__init__.py +++ /dev/null @@ -1,115 +0,0 @@ -import asyncio -import sys - -from types import TracebackType -from typing import Optional, Type, Any # noqa - - -__version__ = '3.0.1' - -PY_37 = sys.version_info >= (3, 7) - - -class timeout: - """timeout context manager. - - Useful in cases when you want to apply timeout logic around block - of code or in cases when asyncio.wait_for is not suitable. For example: - - >>> with timeout(0.001): - ... async with aiohttp.get('https://github.com') as r: - ... await r.text() - - - timeout - value in seconds or None to disable timeout logic - loop - asyncio compatible event loop - """ - def __init__(self, timeout: Optional[float], - *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None: - self._timeout = timeout - if loop is None: - loop = asyncio.get_event_loop() - self._loop = loop - self._task = None # type: Optional[asyncio.Task[Any]] - self._cancelled = False - self._cancel_handler = None # type: Optional[asyncio.Handle] - self._cancel_at = None # type: Optional[float] - - def __enter__(self) -> 'timeout': - return self._do_enter() - - def __exit__(self, - exc_type: Type[BaseException], - exc_val: BaseException, - exc_tb: TracebackType) -> Optional[bool]: - self._do_exit(exc_type) - return None - - async def __aenter__(self) -> 'timeout': - return self._do_enter() - - async def __aexit__(self, - exc_type: Type[BaseException], - exc_val: BaseException, - exc_tb: TracebackType) -> None: - self._do_exit(exc_type) - - @property - def expired(self) -> bool: - return self._cancelled - - @property - def remaining(self) -> Optional[float]: - if self._cancel_at is not None: - return max(self._cancel_at - self._loop.time(), 0.0) - else: - return None - - def _do_enter(self) -> 'timeout': - # Support Tornado 5- without timeout - # Details: https://github.com/python/asyncio/issues/392 - if self._timeout is None: - return self - - self._task = current_task(self._loop) - if self._task is None: - raise RuntimeError('Timeout context manager should be used ' - 'inside a task') - - if self._timeout <= 0: - self._loop.call_soon(self._cancel_task) - return self - - self._cancel_at = self._loop.time() + self._timeout - self._cancel_handler = self._loop.call_at( - self._cancel_at, self._cancel_task) - return self - - def _do_exit(self, exc_type: Type[BaseException]) -> None: - if exc_type is asyncio.CancelledError and self._cancelled: - self._cancel_handler = None - self._task = None - raise asyncio.TimeoutError - if self._timeout is not None and self._cancel_handler is not None: - self._cancel_handler.cancel() - self._cancel_handler = None - self._task = None - return None - - def _cancel_task(self) -> None: - if self._task is not None: - self._task.cancel() - self._cancelled = True - - -def current_task(loop: asyncio.AbstractEventLoop) -> 'asyncio.Task[Any]': - if PY_37: - task = asyncio.current_task(loop=loop) # type: ignore - else: - task = asyncio.Task.current_task(loop=loop) - if task is None: - # this should be removed, tokio must use register_task and family API - if hasattr(loop, 'current_task'): - task = loop.current_task() # type: ignore - - return task diff --git a/third_party/python/async_timeout/async_timeout/py.typed b/third_party/python/async_timeout/async_timeout/py.typed deleted file mode 100644 index f6e0339af6f3..000000000000 --- a/third_party/python/async_timeout/async_timeout/py.typed +++ /dev/null @@ -1 +0,0 @@ -Placeholder \ No newline at end of file diff --git a/third_party/python/cbor2/.gitignore b/third_party/python/cbor2/.gitignore new file mode 100644 index 000000000000..067698fa627e --- /dev/null +++ b/third_party/python/cbor2/.gitignore @@ -0,0 +1,13 @@ +.project +.pydevproject +.idea/ +.coverage +.cache/ +.tox/ +.eggs/ +*.egg-info/ +*.pyc +dist/ +docs/_build/ +build/ +virtualenv/ diff --git a/third_party/python/cbor2/.travis.yml b/third_party/python/cbor2/.travis.yml new file mode 100644 index 000000000000..82cce5a9f05b --- /dev/null +++ b/third_party/python/cbor2/.travis.yml @@ -0,0 +1,28 @@ +sudo: false + +language: python + +python: + - "2.7" + - "3.3" + - "3.4" + - "3.5" + - "3.6" + - "pypy" + +install: pip install tox-travis coveralls + +script: tox + +after_success: coveralls + +deploy: + provider: pypi + user: agronholm + password: + secure: QZ5qoxsrzns/b27adWNzh/OAJp86yRuxTyAFhvas/pbkiALdlT/+PGyhJBnpe+7WBTWnkIXl+YU//voJ0btf6DJcWwgRavMsy22LJJPkvvK+2DHiZ//DbpLbqKWc74y4moce29BCajFTm9JkVwcL2dgN9WuZt+Tay0efcP4sESLxo5lIGdlaQbu+9zVs61Z4Ov+yyEMO/j3LeKshNmUq+84CveQWMiXndXBfJX5TWwjahmUNDp5fMctJxr4fqgL4HCTVQhU79dPc00yDEGS45QkpP8JDrF1DQvU5Ht4COz/Lvzt11pwsAvws2ddclqBUCQsGaWvEWH5rxZTYx/MaMVdTctaUVNoT0wnFUsXXZkomQV0x8vb5RtRLDrKwXosXlSEqnRyiKhdgHGoswHvB7XF5BtQ5RmydRX77pwEGmFd3lqRif2bos0MEeOJA8Xds0TGOKO4PyokBnj/a0tjT2LEVxObmTT6grz5QPXi386AWgxbNl0Lp7cnkSpCqC1hEHVqrDlbtu7uvfGwwe/sYlEcQ07PNCvFoR2GXJawbeHmJRfz+KXjffrt2yCzc671FL1goUysHKdBCppvUInI8FCMQpVWEh5MmQJKB4IpDrhqfo0VS+NNZgZ8lFStq27Pmwqf1HUTGlaDi9VQ0Vo7tW5j4JbD/JvOQSb3j9DjUFps= + distributions: sdist bdist_wheel + on: + tags: true + python: "3.5" + repo: agronholm/cbor2 diff --git a/third_party/python/cbor2/LICENSE.txt b/third_party/python/cbor2/LICENSE.txt new file mode 100644 index 000000000000..07806f8af9dd --- /dev/null +++ b/third_party/python/cbor2/LICENSE.txt @@ -0,0 +1,19 @@ +This is the MIT license: http://www.opensource.org/licenses/mit-license.php + +Copyright (c) Alex Grönholm + +Permission is hereby granted, free of charge, to any person obtaining a copy of this +software and associated documentation files (the "Software"), to deal in the Software +without restriction, including without limitation the rights to use, copy, modify, merge, +publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons +to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or +substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, +INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR +PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE +FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff --git a/third_party/python/cbor2/PKG-INFO b/third_party/python/cbor2/PKG-INFO new file mode 100644 index 000000000000..cc419299d0b5 --- /dev/null +++ b/third_party/python/cbor2/PKG-INFO @@ -0,0 +1,45 @@ +Metadata-Version: 1.1 +Name: cbor2 +Version: 4.0.1 +Summary: Pure Python CBOR (de)serializer with extensive tag support +Home-page: https://github.com/agronholm/cbor2 +Author: Alex Grönholm +Author-email: alex.gronholm@nextday.fi +License: MIT +Description: .. image:: https://travis-ci.org/agronholm/cbor2.svg?branch=master + :target: https://travis-ci.org/agronholm/cbor2 + :alt: Build Status + .. image:: https://coveralls.io/repos/github/agronholm/cbor2/badge.svg?branch=master + :target: https://coveralls.io/github/agronholm/cbor2?branch=master + :alt: Code Coverage + + This library provides encoding and decoding for the Concise Binary Object Representation (CBOR) + (`RFC 7049`_) serialization format. + + There exists another Python CBOR implementation (cbor) which is faster on CPython due to its C + extensions. On PyPy, cbor2 and cbor are almost identical in performance. The other implementation + also lacks documentation and a comprehensive test suite, does not support most standard extension + tags and is known to crash (segfault) when passed a cyclic structure (say, a list containing + itself). + + .. _RFC 7049: https://tools.ietf.org/html/rfc7049 + + Project links + ------------- + + * `Documentation `_ + * `Source code `_ + * `Issue tracker `_ + +Keywords: serialization cbor +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 diff --git a/third_party/python/cbor2/cbor2-4.0.1.dist-info/DESCRIPTION.rst b/third_party/python/cbor2/README.rst similarity index 99% rename from third_party/python/cbor2/cbor2-4.0.1.dist-info/DESCRIPTION.rst rename to third_party/python/cbor2/README.rst index 734481b638a5..c1131fd63cf2 100644 --- a/third_party/python/cbor2/cbor2-4.0.1.dist-info/DESCRIPTION.rst +++ b/third_party/python/cbor2/README.rst @@ -22,5 +22,3 @@ Project links * `Documentation `_ * `Source code `_ * `Issue tracker `_ - - diff --git a/third_party/python/cbor2/cbor2-4.0.1.dist-info/METADATA b/third_party/python/cbor2/cbor2-4.0.1.dist-info/METADATA deleted file mode 100644 index c7f42ac60f27..000000000000 --- a/third_party/python/cbor2/cbor2-4.0.1.dist-info/METADATA +++ /dev/null @@ -1,50 +0,0 @@ -Metadata-Version: 2.0 -Name: cbor2 -Version: 4.0.1 -Summary: Pure Python CBOR (de)serializer with extensive tag support -Home-page: https://github.com/agronholm/cbor2 -Author: Alex Grönholm -Author-email: alex.gronholm@nextday.fi -License: MIT -Keywords: serialization cbor -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Provides-Extra: testing -Requires-Dist: pytest; extra == 'testing' -Requires-Dist: pytest-cov; extra == 'testing' - -.. image:: https://travis-ci.org/agronholm/cbor2.svg?branch=master - :target: https://travis-ci.org/agronholm/cbor2 - :alt: Build Status -.. image:: https://coveralls.io/repos/github/agronholm/cbor2/badge.svg?branch=master - :target: https://coveralls.io/github/agronholm/cbor2?branch=master - :alt: Code Coverage - -This library provides encoding and decoding for the Concise Binary Object Representation (CBOR) -(`RFC 7049`_) serialization format. - -There exists another Python CBOR implementation (cbor) which is faster on CPython due to its C -extensions. On PyPy, cbor2 and cbor are almost identical in performance. The other implementation -also lacks documentation and a comprehensive test suite, does not support most standard extension -tags and is known to crash (segfault) when passed a cyclic structure (say, a list containing -itself). - -.. _RFC 7049: https://tools.ietf.org/html/rfc7049 - -Project links -------------- - -* `Documentation `_ -* `Source code `_ -* `Issue tracker `_ - - diff --git a/third_party/python/cbor2/cbor2-4.0.1.dist-info/RECORD b/third_party/python/cbor2/cbor2-4.0.1.dist-info/RECORD deleted file mode 100644 index e29279b8df8f..000000000000 --- a/third_party/python/cbor2/cbor2-4.0.1.dist-info/RECORD +++ /dev/null @@ -1,11 +0,0 @@ -cbor2/__init__.py,sha256=Si4l50bD5McrzpgQ6bEmhla2w2U910scs0lCqHzwxOo,239 -cbor2/compat.py,sha256=aBzyMrGwl061zdmlFPQrk4U1rqZQcVNl5ojRsQdG5d0,1033 -cbor2/decoder.py,sha256=6bJMq6fC8RRe5uJFrvKy9T-J3VLYKIkSF9UUmmlYj2A,11936 -cbor2/encoder.py,sha256=OimwLht642jK61Vl2X5FeIv3rHL0hd5yjQ7ajoO2hko,11496 -cbor2/types.py,sha256=I2lpvqktj8Nm8MJtUwdhOYXAUJw-UctYTQlKg0qZ9pc,1302 -cbor2-4.0.1.dist-info/DESCRIPTION.rst,sha256=1Lg57ktrF2XHHyDuGfWtKY5VZd4ydp3-7Ptr27cbWrE,1091 -cbor2-4.0.1.dist-info/METADATA,sha256=h1mC4t8mFZcyJc3cHWJFUf5wUWYVPAqh4Q4DRe0ajQg,1981 -cbor2-4.0.1.dist-info/RECORD,, -cbor2-4.0.1.dist-info/WHEEL,sha256=o2k-Qa-RMNIJmUdIc7KU6VWR_ErNRbWNlxDIpl7lm34,110 -cbor2-4.0.1.dist-info/metadata.json,sha256=lHkH6x7w_MNrQqe5ZNu9kihQi3Gg-XOQpYTTElRtKe8,1006 -cbor2-4.0.1.dist-info/top_level.txt,sha256=4Z7JYs5_QM6eqOa2Ew1n_2-uKm2SYl76j2NWTtfCChs,6 diff --git a/third_party/python/cbor2/cbor2-4.0.1.dist-info/metadata.json b/third_party/python/cbor2/cbor2-4.0.1.dist-info/metadata.json deleted file mode 100644 index 85d36a4496cd..000000000000 --- a/third_party/python/cbor2/cbor2-4.0.1.dist-info/metadata.json +++ /dev/null @@ -1 +0,0 @@ -{"classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6"], "extensions": {"python.details": {"contacts": [{"email": "alex.gronholm@nextday.fi", "name": "Alex Gr\u00f6nholm", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://github.com/agronholm/cbor2"}}}, "extras": ["testing"], "generator": "bdist_wheel (0.29.0)", "keywords": ["serialization", "cbor"], "license": "MIT", "metadata_version": "2.0", "name": "cbor2", "run_requires": [{"extra": "testing", "requires": ["pytest-cov", "pytest"]}], "summary": "Pure Python CBOR (de)serializer with extensive tag support", "version": "4.0.1"} \ No newline at end of file diff --git a/third_party/python/cbor2/cbor2-4.0.1.dist-info/top_level.txt b/third_party/python/cbor2/cbor2-4.0.1.dist-info/top_level.txt deleted file mode 100644 index 615ca8aebae0..000000000000 --- a/third_party/python/cbor2/cbor2-4.0.1.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -cbor2 diff --git a/third_party/python/cbor2/docs/conf.py b/third_party/python/cbor2/docs/conf.py new file mode 100644 index 000000000000..0ddeb0719e4c --- /dev/null +++ b/third_party/python/cbor2/docs/conf.py @@ -0,0 +1,33 @@ +# coding: utf-8 +#!/usr/bin/env python +import pkg_resources + + +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.intersphinx' +] + +templates_path = ['_templates'] +source_suffix = '.rst' +master_doc = 'index' +project = 'cbor2' +author = u'Alex Grönholm' +copyright = u'2016, ' + author + +v = pkg_resources.get_distribution(project).parsed_version +version = v.base_version +release = v.public + +language = None + +exclude_patterns = ['_build'] +pygments_style = 'sphinx' +highlight_language = 'python' +todo_include_todos = False + +html_theme = 'sphinx_rtd_theme' +html_static_path = ['_static'] +htmlhelp_basename = project.replace('-', '') + 'doc' + +intersphinx_mapping = {'python': ('http://docs.python.org/', None)} diff --git a/third_party/python/cbor2/docs/customizing.rst b/third_party/python/cbor2/docs/customizing.rst new file mode 100644 index 000000000000..bf9b1b454093 --- /dev/null +++ b/third_party/python/cbor2/docs/customizing.rst @@ -0,0 +1,132 @@ +Customizing encoding and decoding +================================= + +Both the encoder and decoder can be customized to support a wider range of types. + +On the encoder side, this is accomplished by passing a callback as the ``default`` constructor +argument. This callback will receive an object that the encoder could not serialize on its own. +The callback should then return a value that the encoder can serialize on its own, although the +return value is allowed to contain objects that also require the encoder to use the callback, as +long as it won't result in an infinite loop. + +On the decoder side, you have two options: ``tag_hook`` and ``object_hook``. The former is called +by the decoder to process any semantic tags that have no predefined decoders. The latter is called +for any newly decoded ``dict`` objects, and is mostly useful for implementing a JSON compatible +custom type serialization scheme. Unless your requirements restrict you to JSON compatible types +only, it is recommended to use ``tag_hook`` for this purpose. + +JSON compatibility +------------------ + +In certain applications, it may be desirable to limit the supported types to the same ones +serializable as JSON: (unicode) string, integer, float, boolean, null, array and object (dict). +This can be done by passing the ``json_compatible`` option to the encoder. When incompatible types +are encountered, a :class:`~cbor2.encoder.CBOREncodeError` is then raised. + +For the decoder, there is no support for detecting incoming incompatible types yet. + +Using the CBOR tags for custom types +------------------------------------ + +The most common way to use ``default`` is to call :meth:`~cbor2.encoder.CBOREncoder.encode` +to add a custom tag in the data stream, with the payload as the value:: + + class Point(object): + def __init__(self, x, y): + self.x = x + self.y = y + + def default_encoder(encoder, value): + # Tag number 4000 was chosen arbitrarily + encoder.encode(CBORTag(4000, [value.x, value.y])) + +The corresponding ``tag_hook`` would be:: + + def tag_hook(decoder, tag, shareable_index=None): + if tag.tag != 4000: + return tag + + # tag.value is now the [x, y] list we serialized before + return Point(*tag.value) + +Using dicts to carry custom types +--------------------------------- + +The same could be done with ``object_hook``, except less efficiently:: + + def default_encoder(encoder, value): + encoder.encode(dict(typename='Point', x=value.x, y=value.y)) + + def object_hook(decoder, value): + if value.get('typename') != 'Point': + return value + + return Point(value['x'], value['y']) + +You should make sure that whatever way you decide to use for telling apart your "specially marked" +dicts from arbitrary data dicts won't mistake on for the other. + +Value sharing with custom types +------------------------------- + +In order to properly encode and decode cyclic references with custom types, some special care has +to be taken. Suppose you have a custom type as below, where every child object contains a reference +to its parent and the parent contains a list of children:: + + from cbor2 import dumps, loads, shareable_encoder, CBORTag + + + class MyType(object): + def __init__(self, parent=None): + self.parent = parent + self.children = [] + if parent: + self.parent.children.append(self) + +This would not normally be serializable, as it would lead to an endless loop (in the worst case) +and raise some exception (in the best case). Now, enter CBOR's extension tags 28 and 29. These tags +make it possible to add special markers into the data stream which can be later referenced and +substituted with the object marked earlier. + +To do this, in ``default`` hooks used with the encoder you will need to use the +:meth:`~cbor2.encoder.shareable_encoder` decorator on your ``default`` hook function. It will +automatically automatically add the object to the shared values registry on the encoder and prevent +it from being serialized twice (instead writing a reference to the data stream):: + + @shareable_encoder + def default_encoder(encoder, value): + # The state has to be serialized separately so that the decoder would have a chance to + # create an empty instance before the shared value references are decoded + serialized_state = encoder.encode_to_bytes(value.__dict__) + encoder.encode(CBORTag(3000, serialized_state)) + +On the decoder side, you will need to initialize an empty instance for shared value lookup before +the object's state (which may contain references to it) is decoded. +This is done with the :meth:`~cbor2.encoder.CBORDecoder.set_shareable` method:: + + def tag_hook(decoder, tag, shareable_index=None): + # Return all other tags as-is + if tag.tag != 3000: + return tag + + # Create a raw instance before initializing its state to make it possible for cyclic + # references to work + instance = MyType.__new__(MyType) + decoder.set_shareable(shareable_index, instance) + + # Separately decode the state of the new object and then apply it + state = decoder.decode_from_bytes(tag.value) + instance.__dict__.update(state) + return instance + +You could then verify that the cyclic references have been restored after deserialization:: + + parent = MyType() + child1 = MyType(parent) + child2 = MyType(parent) + serialized = dumps(parent, default=default_encoder, value_sharing=True) + + new_parent = loads(serialized, tag_hook=tag_hook) + assert new_parent.children[0].parent is new_parent + assert new_parent.children[1].parent is new_parent + diff --git a/third_party/python/cbor2/docs/index.rst b/third_party/python/cbor2/docs/index.rst new file mode 100644 index 000000000000..443c245d8619 --- /dev/null +++ b/third_party/python/cbor2/docs/index.rst @@ -0,0 +1,15 @@ +.. include:: ../README.rst + :start-line: 7 + :end-before: Project links + +Table of contents +----------------- + +.. toctree:: + :maxdepth: 2 + + usage + customizing + versionhistory + +* :ref:`API reference ` diff --git a/third_party/python/cbor2/docs/modules/decoder.rst b/third_party/python/cbor2/docs/modules/decoder.rst new file mode 100644 index 000000000000..c2c58fe9db40 --- /dev/null +++ b/third_party/python/cbor2/docs/modules/decoder.rst @@ -0,0 +1,5 @@ +:mod:`cbor2.decoder` +==================== + +.. automodule:: cbor2.decoder + :members: diff --git a/third_party/python/cbor2/docs/modules/encoder.rst b/third_party/python/cbor2/docs/modules/encoder.rst new file mode 100644 index 000000000000..c4240eeaad61 --- /dev/null +++ b/third_party/python/cbor2/docs/modules/encoder.rst @@ -0,0 +1,5 @@ +:mod:`cbor2.encoder` +==================== + +.. automodule:: cbor2.encoder + :members: diff --git a/third_party/python/cbor2/docs/modules/types.rst b/third_party/python/cbor2/docs/modules/types.rst new file mode 100644 index 000000000000..a6dedaa3e49f --- /dev/null +++ b/third_party/python/cbor2/docs/modules/types.rst @@ -0,0 +1,5 @@ +:mod:`cbor2.types` +================== + +.. automodule:: cbor2.types + :members: diff --git a/third_party/python/cbor2/docs/usage.rst b/third_party/python/cbor2/docs/usage.rst new file mode 100644 index 000000000000..54b028ee53c3 --- /dev/null +++ b/third_party/python/cbor2/docs/usage.rst @@ -0,0 +1,80 @@ +Basic usage +=========== + +Serializing and deserializing with cbor2 is pretty straightforward:: + + from cbor2 import dumps, loads + + # Serialize an object as a bytestring + data = dumps(['hello', 'world']) + + # Deserialize a bytestring + obj = loads(data) + + # Efficiently deserialize from a file + with open('input.cbor', 'rb') as fp: + obj = load(fp) + + # Efficiently serialize an object to a file + with open('output.cbor', 'wb') as fp: + dump(obj, fp) + +Some data types, however, require extra considerations, as detailed below. + +String/bytes handling on Python 2 +--------------------------------- + +The ``str`` type is encoded as binary on Python 2. If you want to encode strings as text on +Python 2, use unicode strings instead. + +Date/time handling +------------------ + +The CBOR specification does not support naïve datetimes (that is, datetimes where ``tzinfo`` is +missing). When the encoder encounters such a datetime, it needs to know which timezone it belongs +to. To this end, you can specify a default timezone by passing a :class:`~datetime.tzinfo` instance +to :func:`~cbor2.encoder.dump`/:func:`~cbor2.encoder.dumps` call as the ``timezone`` argument. +Decoded datetimes are always timezone aware. + +By default, datetimes are serialized in a manner that retains their timezone offsets. You can +optimize the data stream size by passing ``datetime_as_timestamp=False`` to +:func:`~cbor2.encoder.dump`/:func:`~cbor2.encoder.dumps`, but this causes the timezone offset +information to be lost. + +Cyclic (recursive) data structures +---------------------------------- + +If the encoder encounters a shareable object (ie. list or dict) that it has been before, it will +by default raise :exc:`~cbor2.encoder.CBOREncodeError` indicating that a cyclic reference has been +detected and value sharing was not enabled. CBOR has, however, an extension specification that +allows the encoder to reference a previously encoded value without processing it again. This makes +it possible to serialize such cyclic references, but value sharing has to be enabled by passing +``value_sharing=True`` to :func:`~cbor2.encoder.dump`/:func:`~cbor2.encoder.dumps`. + +.. warning:: Support for value sharing is rare in other CBOR implementations, so think carefully + whether you want to enable it. It also causes some line overhead, as all potentially shareable + values must be tagged as such. + +Tag support +----------- + +In addition to all standard CBOR tags, this library supports many extended tags: + +=== ======================================== ==================================================== +Tag Semantics Python type(s) +=== ======================================== ==================================================== +0 Standard date/time string datetime.date / datetime.datetime +1 Epoch-based date/time datetime.date / datetime.datetime +2 Positive bignum int / long +3 Negative bignum int / long +4 Decimal fraction decimal.Decimal +5 Bigfloat decimal.Decimal +28 Mark shared value N/A +29 Reference shared value N/A +30 Rational number fractions.Fraction +35 Regular expression ``_sre.SRE_Pattern`` (result of ``re.compile(...)``) +36 MIME message email.message.Message +37 Binary UUID uuid.UUID +=== ======================================== ==================================================== + +Arbitary tags can be represented with the :class:`~cbor2.types.CBORTag` class. diff --git a/third_party/python/cbor2/docs/versionhistory.rst b/third_party/python/cbor2/docs/versionhistory.rst new file mode 100644 index 000000000000..246e43bb832a --- /dev/null +++ b/third_party/python/cbor2/docs/versionhistory.rst @@ -0,0 +1,73 @@ +Version history +=============== + +This library adheres to `Semantic Versioning `_. + +**4.0.1.** (2017-08-21) + +- Fixed silent truncation of decoded data if there are not enough bytes in the stream for an exact + read (``CBORDecodeError`` is now raised instead) + +**4.0.0** (2017-04-24) + +- **BACKWARD INCOMPATIBLE** Value sharing has been disabled by default, for better compatibility + with other implementations and better performance (since it is rarely needed) +- **BACKWARD INCOMPATIBLE** Replaced the ``semantic_decoders`` decoder option with the ``tag_hook`` + option +- **BACKWARD INCOMPATIBLE** Replaced the ``encoders`` encoder option with the ``default`` option +- **BACKWARD INCOMPATIBLE** Factored out the file object argument (``fp``) from all callbacks +- **BACKWARD INCOMPATIBLE** The encoder no longer supports every imaginable type implementing the + ``Sequence`` or ``Map`` interface, as they turned out to be too broad +- Added the ``object_hook`` option for decoding dicts into complex objects + (intended for situations where JSON compatibility is required and semantic tags cannot be used) +- Added encoding and decoding of simple values (``CBORSimpleValue``) + (contributed by Jerry Lundström) +- Replaced the decoder for bignums with a simpler and faster version (contributed by orent) +- Made all relevant classes and functions available directly in the ``cbor2`` namespace +- Added proper documentation + +**3.0.4** (2016-09-24) + +- Fixed TypeError when trying to encode extension types (regression introduced in 3.0.3) + +**3.0.3** (2016-09-23) + +- No changes, just re-releasing due to git tagging screw-up + +**3.0.2** (2016-09-23) + +- Fixed decoding failure for datetimes with microseconds (tag 0) + +**3.0.1** (2016-08-08) + +- Fixed error in the cyclic structure detection code that could mistake one container for + another, sometimes causing a bogus error about cyclic data structures where there was none + +**3.0.0** (2016-07-03) + +- **BACKWARD INCOMPATIBLE** Encoder callbacks now receive three arguments: the encoder instance, + the value to encode and a file-like object. The callback must must now either write directly to + the file-like object or call another encoder callback instead of returning an iterable. +- **BACKWARD INCOMPATIBLE** Semantic decoder callbacks now receive four arguments: the decoder + instance, the primitive value, a file-like object and the shareable index for the decoded value. + Decoders that support value sharing must now set the raw value at the given index in + ``decoder.shareables``. +- **BACKWARD INCOMPATIBLE** Removed support for iterative encoding (``CBOREncoder.encode()`` is no + longer a generator function and always returns ``None``) +- Significantly improved performance (encoder ~30 % faster, decoder ~60 % faster) +- Fixed serialization round-trip for ``undefined`` (simple type #23) +- Added proper support for value sharing in callbacks + +**2.0.0** (2016-06-11) + +- **BACKWARD INCOMPATIBLE** Deserialize unknown tags as ``CBORTag`` objects so as not to lose + information +- Fixed error messages coming from nested structures + +**1.1.0** (2016-06-10) + +- Fixed deserialization of cyclic structures + +**1.0.0** (2016-06-08) + +- Initial release diff --git a/third_party/python/cbor2/setup.cfg b/third_party/python/cbor2/setup.cfg new file mode 100644 index 000000000000..fd6c5cb2cf28 --- /dev/null +++ b/third_party/python/cbor2/setup.cfg @@ -0,0 +1,21 @@ +[tool:pytest] +addopts = -rsx --cov --tb=short +testpaths = tests + +[coverage:run] +source = cbor2 + +[coverage:report] +show_missing = true + +[flake8] +max-line-length = 99 +exclude = .tox,build,docs + +[bdist_wheel] +universal = 1 + +[egg_info] +tag_build = +tag_date = 0 + diff --git a/third_party/python/cbor2/setup.py b/third_party/python/cbor2/setup.py new file mode 100644 index 000000000000..530fd8e1d521 --- /dev/null +++ b/third_party/python/cbor2/setup.py @@ -0,0 +1,43 @@ +# coding: utf-8 +import os.path + +from setuptools import setup, find_packages + + +here = os.path.dirname(__file__) +readme_path = os.path.join(here, 'README.rst') +readme = open(readme_path).read() + +setup( + name='cbor2', + use_scm_version={ + 'version_scheme': 'post-release', + 'local_scheme': 'dirty-tag' + }, + description='Pure Python CBOR (de)serializer with extensive tag support', + long_description=readme, + author=u'Alex Grönholm', + author_email='alex.gronholm@nextday.fi', + url='https://github.com/agronholm/cbor2', + classifiers=[ + 'Development Status :: 5 - Production/Stable', + 'Intended Audience :: Developers', + 'License :: OSI Approved :: MIT License', + 'Programming Language :: Python', + 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.3', + 'Programming Language :: Python :: 3.4', + 'Programming Language :: Python :: 3.5', + 'Programming Language :: Python :: 3.6' + ], + keywords='serialization cbor', + license='MIT', + packages=find_packages(exclude=['tests']), + setup_requires=[ + 'setuptools_scm' + ], + extras_require={ + 'testing': ['pytest', 'pytest-cov'] + } +) diff --git a/third_party/python/cbor2/tests/test_decoder.py b/third_party/python/cbor2/tests/test_decoder.py new file mode 100644 index 000000000000..4d281ad3ab32 --- /dev/null +++ b/third_party/python/cbor2/tests/test_decoder.py @@ -0,0 +1,327 @@ +from __future__ import division + +import math +import re +from binascii import unhexlify +from datetime import datetime, timedelta +from decimal import Decimal +from email.message import Message +from fractions import Fraction +from io import BytesIO +from uuid import UUID + +import pytest + +from cbor2.compat import timezone +from cbor2.decoder import loads, CBORDecodeError, load, CBORDecoder +from cbor2.types import CBORTag, undefined, CBORSimpleValue + + +@pytest.mark.parametrize('payload, expected', [ + ('00', 0), + ('01', 1), + ('0a', 10), + ('17', 23), + ('1818', 24), + ('1819', 25), + ('1864', 100), + ('1903e8', 1000), + ('1a000f4240', 1000000), + ('1b000000e8d4a51000', 1000000000000), + ('1bffffffffffffffff', 18446744073709551615), + ('c249010000000000000000', 18446744073709551616), + ('3bffffffffffffffff', -18446744073709551616), + ('c349010000000000000000', -18446744073709551617), + ('20', -1), + ('29', -10), + ('3863', -100), + ('3903e7', -1000) +]) +def test_integer(payload, expected): + decoded = loads(unhexlify(payload)) + assert decoded == expected + + +def test_invalid_integer_subtype(): + exc = pytest.raises(CBORDecodeError, loads, b'\x1c') + assert str(exc.value).endswith('unknown unsigned integer subtype 0x1c') + + +@pytest.mark.parametrize('payload, expected', [ + ('f90000', 0.0), + ('f98000', -0.0), + ('f93c00', 1.0), + ('fb3ff199999999999a', 1.1), + ('f93e00', 1.5), + ('f97bff', 65504.0), + ('fa47c35000', 100000.0), + ('fa7f7fffff', 3.4028234663852886e+38), + ('fb7e37e43c8800759c', 1.0e+300), + ('f90001', 5.960464477539063e-8), + ('f90400', 0.00006103515625), + ('f9c400', -4.0), + ('fbc010666666666666', -4.1), + ('f97c00', float('inf')), + ('f9fc00', float('-inf')), + ('fa7f800000', float('inf')), + ('faff800000', float('-inf')), + ('fb7ff0000000000000', float('inf')), + ('fbfff0000000000000', float('-inf')) +]) +def test_float(payload, expected): + decoded = loads(unhexlify(payload)) + assert decoded == expected + + +@pytest.mark.parametrize('payload', ['f97e00', 'fa7fc00000', 'fb7ff8000000000000']) +def test_float_nan(payload): + decoded = loads(unhexlify(payload)) + assert math.isnan(decoded) + + +@pytest.mark.parametrize('payload, expected', [ + ('f4', False), + ('f5', True), + ('f6', None), + ('f7', undefined) +]) +def test_special(payload, expected): + decoded = loads(unhexlify(payload)) + assert decoded is expected + + +@pytest.mark.parametrize('payload, expected', [ + ('40', b''), + ('4401020304', b'\x01\x02\x03\x04'), +]) +def test_binary(payload, expected): + decoded = loads(unhexlify(payload)) + assert decoded == expected + + +@pytest.mark.parametrize('payload, expected', [ + ('60', u''), + ('6161', u'a'), + ('6449455446', u'IETF'), + ('62225c', u'\"\\'), + ('62c3bc', u'\u00fc'), + ('63e6b0b4', u'\u6c34') +]) +def test_string(payload, expected): + decoded = loads(unhexlify(payload)) + assert decoded == expected + + +@pytest.mark.parametrize('payload, expected', [ + ('80', []), + ('83010203', [1, 2, 3]), + ('8301820203820405', [1, [2, 3], [4, 5]]), + ('98190102030405060708090a0b0c0d0e0f101112131415161718181819', list(range(1, 26))) +]) +def test_array(payload, expected): + decoded = loads(unhexlify(payload)) + assert decoded == expected + + +@pytest.mark.parametrize('payload, expected', [ + ('a0', {}), + ('a201020304', {1: 2, 3: 4}) +]) +def test_map(payload, expected): + decoded = loads(unhexlify(payload)) + assert decoded == expected + + +@pytest.mark.parametrize('payload, expected', [ + ('a26161016162820203', {'a': 1, 'b': [2, 3]}), + ('826161a161626163', ['a', {'b': 'c'}]), + ('a56161614161626142616361436164614461656145', + {'a': 'A', 'b': 'B', 'c': 'C', 'd': 'D', 'e': 'E'}) +]) +def test_mixed_array_map(payload, expected): + decoded = loads(unhexlify(payload)) + assert decoded == expected + + +@pytest.mark.parametrize('payload, expected', [ + ('5f42010243030405ff', b'\x01\x02\x03\x04\x05'), + ('7f657374726561646d696e67ff', 'streaming'), + ('9fff', []), + ('9f018202039f0405ffff', [1, [2, 3], [4, 5]]), + ('9f01820203820405ff', [1, [2, 3], [4, 5]]), + ('83018202039f0405ff', [1, [2, 3], [4, 5]]), + ('83019f0203ff820405', [1, [2, 3], [4, 5]]), + ('9f0102030405060708090a0b0c0d0e0f101112131415161718181819ff', list(range(1, 26))), + ('bf61610161629f0203ffff', {'a': 1, 'b': [2, 3]}), + ('826161bf61626163ff', ['a', {'b': 'c'}]), + ('bf6346756ef563416d7421ff', {'Fun': True, 'Amt': -2}), +]) +def test_streaming(payload, expected): + decoded = loads(unhexlify(payload)) + assert decoded == expected + + +@pytest.mark.parametrize('payload, expected', [ + ('e0', 0), + ('e2', 2), + ('f3', 19), + ('f820', 32), + ('e0', CBORSimpleValue(0)), + ('e2', CBORSimpleValue(2)), + ('f3', CBORSimpleValue(19)), + ('f820', CBORSimpleValue(32)) +]) +def test_simple_value(payload, expected): + decoded = loads(unhexlify(payload)) + assert decoded == expected + + +# +# Tests for extension tags +# + + +@pytest.mark.parametrize('payload, expected', [ + ('c074323031332d30332d32315432303a30343a30305a', + datetime(2013, 3, 21, 20, 4, 0, tzinfo=timezone.utc)), + ('c0781b323031332d30332d32315432303a30343a30302e3338303834315a', + datetime(2013, 3, 21, 20, 4, 0, 380841, tzinfo=timezone.utc)), + ('c07819323031332d30332d32315432323a30343a30302b30323a3030', + datetime(2013, 3, 21, 22, 4, 0, tzinfo=timezone(timedelta(hours=2)))), + ('c11a514b67b0', datetime(2013, 3, 21, 20, 4, 0, tzinfo=timezone.utc)), + ('c11a514b67b0', datetime(2013, 3, 21, 22, 4, 0, tzinfo=timezone(timedelta(hours=2)))) +], ids=['datetime/utc', 'datetime+micro/utc', 'datetime/eet', 'timestamp/utc', 'timestamp/eet']) +def test_datetime(payload, expected): + decoded = loads(unhexlify(payload)) + assert decoded == expected + + +def test_bad_datetime(): + exc = pytest.raises(CBORDecodeError, loads, unhexlify('c06b303030302d3132332d3031')) + assert str(exc.value).endswith('invalid datetime string: 0000-123-01') + + +def test_fraction(): + decoded = loads(unhexlify('c48221196ab3')) + assert decoded == Decimal('273.15') + + +def test_bigfloat(): + decoded = loads(unhexlify('c5822003')) + assert decoded == Decimal('1.5') + + +def test_rational(): + decoded = loads(unhexlify('d81e820205')) + assert decoded == Fraction(2, 5) + + +def test_regex(): + decoded = loads(unhexlify('d8236d68656c6c6f2028776f726c6429')) + expr = re.compile(u'hello (world)') + assert decoded == expr + + +def test_mime(): + decoded = loads(unhexlify( + 'd824787b436f6e74656e742d547970653a20746578742f706c61696e3b20636861727365743d2269736f2d38' + '3835392d3135220a4d494d452d56657273696f6e3a20312e300a436f6e74656e742d5472616e736665722d45' + '6e636f64696e673a2071756f7465642d7072696e7461626c650a0a48656c6c6f203d413475726f')) + assert isinstance(decoded, Message) + assert decoded.get_payload() == 'Hello =A4uro' + + +def test_uuid(): + decoded = loads(unhexlify('d825505eaffac8b51e480581277fdcc7842faf')) + assert decoded == UUID(hex='5eaffac8b51e480581277fdcc7842faf') + + +def test_bad_shared_reference(): + exc = pytest.raises(CBORDecodeError, loads, unhexlify('d81d05')) + assert str(exc.value).endswith('shared reference 5 not found') + + +def test_uninitialized_shared_reference(): + fp = BytesIO(unhexlify('d81d00')) + decoder = CBORDecoder(fp) + decoder._shareables.append(None) + exc = pytest.raises(CBORDecodeError, decoder.decode) + assert str(exc.value).endswith('shared value 0 has not been initialized') + + +def test_cyclic_array(): + decoded = loads(unhexlify('d81c81d81d00')) + assert decoded == [decoded] + + +def test_cyclic_map(): + decoded = loads(unhexlify('d81ca100d81d00')) + assert decoded == {0: decoded} + + +def test_unhandled_tag(): + """ + Test that a tag is simply ignored and its associated value returned if there is no special + handling available for it. + + """ + decoded = loads(unhexlify('d917706548656c6c6f')) + assert decoded == CBORTag(6000, u'Hello') + + +def test_premature_end_of_stream(): + """ + Test that the decoder detects a situation where read() returned fewer than expected bytes. + + """ + exc = pytest.raises(CBORDecodeError, loads, unhexlify('437879')) + exc.match('premature end of stream \(expected to read 3 bytes, got 2 instead\)') + + +def test_tag_hook(): + def reverse(decoder, tag, fp, shareable_index=None): + return tag.value[::-1] + + decoded = loads(unhexlify('d917706548656c6c6f'), tag_hook=reverse) + assert decoded == u'olleH' + + +def test_tag_hook_cyclic(): + class DummyType(object): + def __init__(self, value): + self.value = value + + def unmarshal_dummy(decoder, tag, shareable_index=None): + instance = DummyType.__new__(DummyType) + decoder.set_shareable(shareable_index, instance) + instance.value = decoder.decode_from_bytes(tag.value) + return instance + + decoded = loads(unhexlify('D81CD90BB849D81CD90BB843D81D00'), tag_hook=unmarshal_dummy) + assert isinstance(decoded, DummyType) + assert decoded.value.value is decoded + + +def test_object_hook(): + class DummyType(object): + def __init__(self, state): + self.state = state + + payload = unhexlify('A2616103616205') + decoded = loads(payload, object_hook=lambda decoder, value: DummyType(value)) + assert isinstance(decoded, DummyType) + assert decoded.state == {'a': 3, 'b': 5} + + +def test_error_major_type(): + exc = pytest.raises(CBORDecodeError, loads, b'') + assert str(exc.value).startswith('error reading major type at index 0: ') + + +def test_load_from_file(tmpdir): + path = tmpdir.join('testdata.cbor') + path.write_binary(b'\x82\x01\x0a') + with path.open('rb') as fp: + obj = load(fp) + + assert obj == [1, 10] diff --git a/third_party/python/cbor2/tests/test_encoder.py b/third_party/python/cbor2/tests/test_encoder.py new file mode 100644 index 000000000000..b2537519ba85 --- /dev/null +++ b/third_party/python/cbor2/tests/test_encoder.py @@ -0,0 +1,260 @@ +import re +from binascii import unhexlify +from datetime import datetime, timedelta, date +from decimal import Decimal +from email.mime.text import MIMEText +from fractions import Fraction +from uuid import UUID + +import pytest + +from cbor2.compat import timezone +from cbor2.encoder import dumps, CBOREncodeError, dump, shareable_encoder +from cbor2.types import CBORTag, undefined, CBORSimpleValue + + +@pytest.mark.parametrize('value, expected', [ + (0, '00'), + (1, '01'), + (10, '0a'), + (23, '17'), + (24, '1818'), + (100, '1864'), + (1000, '1903e8'), + (1000000, '1a000f4240'), + (1000000000000, '1b000000e8d4a51000'), + (18446744073709551615, '1bffffffffffffffff'), + (18446744073709551616, 'c249010000000000000000'), + (-18446744073709551616, '3bffffffffffffffff'), + (-18446744073709551617, 'c349010000000000000000'), + (-1, '20'), + (-10, '29'), + (-100, '3863'), + (-1000, '3903e7') +]) +def test_integer(value, expected): + expected = unhexlify(expected) + assert dumps(value) == expected + + +@pytest.mark.parametrize('value, expected', [ + (1.1, 'fb3ff199999999999a'), + (1.0e+300, 'fb7e37e43c8800759c'), + (-4.1, 'fbc010666666666666'), + (float('inf'), 'f97c00'), + (float('nan'), 'f97e00'), + (float('-inf'), 'f9fc00') +]) +def test_float(value, expected): + expected = unhexlify(expected) + assert dumps(value) == expected + + +@pytest.mark.parametrize('value, expected', [ + (b'', '40'), + (b'\x01\x02\x03\x04', '4401020304'), +]) +def test_bytestring(value, expected): + expected = unhexlify(expected) + assert dumps(value) == expected + + +def test_bytearray(): + expected = unhexlify('4401020304') + assert dumps(bytearray(b'\x01\x02\x03\x04')) == expected + + +@pytest.mark.parametrize('value, expected', [ + (u'', '60'), + (u'a', '6161'), + (u'IETF', '6449455446'), + (u'"\\', '62225c'), + (u'\u00fc', '62c3bc'), + (u'\u6c34', '63e6b0b4') +]) +def test_string(value, expected): + expected = unhexlify(expected) + assert dumps(value) == expected + + +@pytest.mark.parametrize('value, expected', [ + (False, 'f4'), + (True, 'f5'), + (None, 'f6'), + (undefined, 'f7') +], ids=['false', 'true', 'null', 'undefined']) +def test_special(value, expected): + expected = unhexlify(expected) + assert dumps(value) == expected + + +@pytest.mark.parametrize('value, expected', [ + (CBORSimpleValue(0), 'e0'), + (CBORSimpleValue(2), 'e2'), + (CBORSimpleValue(19), 'f3'), + (CBORSimpleValue(32), 'f820') +]) +def test_simple_value(value, expected): + expected = unhexlify(expected) + assert dumps(value) == expected + + +# +# Tests for extension tags +# + +@pytest.mark.parametrize('value, as_timestamp, expected', [ + (datetime(2013, 3, 21, 20, 4, 0, tzinfo=timezone.utc), False, + 'c074323031332d30332d32315432303a30343a30305a'), + (datetime(2013, 3, 21, 20, 4, 0, 380841, tzinfo=timezone.utc), False, + 'c0781b323031332d30332d32315432303a30343a30302e3338303834315a'), + (datetime(2013, 3, 21, 22, 4, 0, tzinfo=timezone(timedelta(hours=2))), False, + 'c07819323031332d30332d32315432323a30343a30302b30323a3030'), + (datetime(2013, 3, 21, 20, 4, 0), False, 'c074323031332d30332d32315432303a30343a30305a'), + (datetime(2013, 3, 21, 20, 4, 0, tzinfo=timezone.utc), True, 'c11a514b67b0'), + (datetime(2013, 3, 21, 22, 4, 0, tzinfo=timezone(timedelta(hours=2))), True, 'c11a514b67b0') +], ids=['datetime/utc', 'datetime+micro/utc', 'datetime/eet', 'naive', 'timestamp/utc', + 'timestamp/eet']) +def test_datetime(value, as_timestamp, expected): + expected = unhexlify(expected) + assert dumps(value, datetime_as_timestamp=as_timestamp, timezone=timezone.utc) == expected + + +def test_date(): + expected = unhexlify('c074323031332d30332d32315430303a30303a30305a') + assert dumps(date(2013, 3, 21), timezone=timezone.utc) == expected + + +def test_naive_datetime(): + """Test that naive datetimes are gracefully rejected when no timezone has been set.""" + exc = pytest.raises(CBOREncodeError, dumps, datetime(2013, 3, 21)) + exc.match('naive datetime encountered and no default timezone has been set') + + +@pytest.mark.parametrize('value, expected', [ + (Decimal('14.123'), 'c4822219372b'), + (Decimal('NaN'), 'f97e00'), + (Decimal('Infinity'), 'f97c00'), + (Decimal('-Infinity'), 'f9fc00') +], ids=['normal', 'nan', 'inf', 'neginf']) +def test_decimal(value, expected): + expected = unhexlify(expected) + assert dumps(value) == expected + + +def test_rational(): + expected = unhexlify('d81e820205') + assert dumps(Fraction(2, 5)) == expected + + +def test_regex(): + expected = unhexlify('d8236d68656c6c6f2028776f726c6429') + assert dumps(re.compile(u'hello (world)')) == expected + + +def test_mime(): + expected = unhexlify( + 'd824787b436f6e74656e742d547970653a20746578742f706c61696e3b20636861727365743d2269736f2d38' + '3835392d3135220a4d494d452d56657273696f6e3a20312e300a436f6e74656e742d5472616e736665722d456' + 'e636f64696e673a2071756f7465642d7072696e7461626c650a0a48656c6c6f203d413475726f') + message = MIMEText(u'Hello \u20acuro', 'plain', 'iso-8859-15') + assert dumps(message) == expected + + +def test_uuid(): + expected = unhexlify('d825505eaffac8b51e480581277fdcc7842faf') + assert dumps(UUID(hex='5eaffac8b51e480581277fdcc7842faf')) == expected + + +def test_custom_tag(): + expected = unhexlify('d917706548656c6c6f') + assert dumps(CBORTag(6000, u'Hello')) == expected + + +def test_cyclic_array(): + """Test that an array that contains itself can be serialized with value sharing enabled.""" + expected = unhexlify('d81c81d81c81d81d00') + a = [[]] + a[0].append(a) + assert dumps(a, value_sharing=True) == expected + + +def test_cyclic_array_nosharing(): + """Test that serializing a cyclic structure w/o value sharing will blow up gracefully.""" + a = [] + a.append(a) + exc = pytest.raises(CBOREncodeError, dumps, a) + exc.match('cyclic data structure detected but value sharing is disabled') + + +def test_cyclic_map(): + """Test that a dict that contains itself can be serialized with value sharing enabled.""" + expected = unhexlify('d81ca100d81d00') + a = {} + a[0] = a + assert dumps(a, value_sharing=True) == expected + + +def test_cyclic_map_nosharing(): + """Test that serializing a cyclic structure w/o value sharing will fail gracefully.""" + a = {} + a[0] = a + exc = pytest.raises(CBOREncodeError, dumps, a) + exc.match('cyclic data structure detected but value sharing is disabled') + + +@pytest.mark.parametrize('value_sharing, expected', [ + (False, '828080'), + (True, 'd81c82d81c80d81d01') +], ids=['nosharing', 'sharing']) +def test_not_cyclic_same_object(value_sharing, expected): + """Test that the same shareable object can be included twice if not in a cyclic structure.""" + expected = unhexlify(expected) + a = [] + b = [a, a] + assert dumps(b, value_sharing=value_sharing) == expected + + +def test_unsupported_type(): + exc = pytest.raises(CBOREncodeError, dumps, lambda: None) + exc.match('cannot serialize type function') + + +def test_default(): + class DummyType(object): + def __init__(self, state): + self.state = state + + def default_encoder(encoder, value): + encoder.encode(value.state) + + expected = unhexlify('820305') + obj = DummyType([3, 5]) + serialized = dumps(obj, default=default_encoder) + assert serialized == expected + + +def test_default_cyclic(): + class DummyType(object): + def __init__(self, value=None): + self.value = value + + @shareable_encoder + def default_encoder(encoder, value): + state = encoder.encode_to_bytes(value.value) + encoder.encode(CBORTag(3000, state)) + + expected = unhexlify('D81CD90BB849D81CD90BB843D81D00') + obj = DummyType() + obj2 = DummyType(obj) + obj.value = obj2 + serialized = dumps(obj, value_sharing=True, default=default_encoder) + assert serialized == expected + + +def test_dump_to_file(tmpdir): + path = tmpdir.join('testdata.cbor') + with path.open('wb') as fp: + dump([1, 10], fp) + + assert path.read_binary() == b'\x82\x01\x0a' diff --git a/third_party/python/cbor2/tests/test_types.py b/third_party/python/cbor2/tests/test_types.py new file mode 100644 index 000000000000..e5eea5fbdcab --- /dev/null +++ b/third_party/python/cbor2/tests/test_types.py @@ -0,0 +1,36 @@ +import pytest + +from cbor2.types import CBORTag, CBORSimpleValue + + +def test_tag_repr(): + assert repr(CBORTag(600, 'blah')) == "CBORTag(600, 'blah')" + + +def test_tag_equals(): + tag1 = CBORTag(500, ['foo']) + tag2 = CBORTag(500, ['foo']) + tag3 = CBORTag(500, ['bar']) + assert tag1 == tag2 + assert not tag1 == tag3 + assert not tag1 == 500 + + +def test_simple_value_repr(): + assert repr(CBORSimpleValue(1)) == "CBORSimpleValue(1)" + + +def test_simple_value_equals(): + tag1 = CBORSimpleValue(1) + tag2 = CBORSimpleValue(1) + tag3 = CBORSimpleValue(21) + assert tag1 == tag2 + assert tag1 == 1 + assert not tag1 == tag3 + assert not tag1 == 21 + assert not tag2 == "21" + + +def test_simple_value_too_big(): + exc = pytest.raises(TypeError, CBORSimpleValue, 256) + assert str(exc.value) == 'simple value too big' diff --git a/third_party/python/cbor2/tox.ini b/third_party/python/cbor2/tox.ini new file mode 100644 index 000000000000..8f272bf698f8 --- /dev/null +++ b/third_party/python/cbor2/tox.ini @@ -0,0 +1,20 @@ +[tox] +envlist = py27, py33, py34, py35, py36, pypy, flake8 +skip_missing_interpreters = true + +[tox:travis] +2.7 = py27 +3.3 = py33 +3.4 = py34 +3.5 = py35 +3.6 = py36, flake8 +pypy = pypy + +[testenv] +commands = python -m pytest {posargs} +extras = testing + +[testenv:flake8] +deps = flake8 +commands = flake8 cbor2 tests +skip_install = true diff --git a/third_party/python/cram/cram-0.7.data/scripts/cram b/third_party/python/cram/cram-0.7.data/scripts/cram deleted file mode 100755 index 806c69978208..000000000000 --- a/third_party/python/cram/cram-0.7.data/scripts/cram +++ /dev/null @@ -1,9 +0,0 @@ -#!python -import sys - -import cram - -try: - sys.exit(cram.main(sys.argv[1:])) -except KeyboardInterrupt: - pass diff --git a/third_party/python/cram/cram-0.7.dist-info/DESCRIPTION.rst b/third_party/python/cram/cram-0.7.dist-info/DESCRIPTION.rst deleted file mode 100644 index 0a6577392cb8..000000000000 --- a/third_party/python/cram/cram-0.7.dist-info/DESCRIPTION.rst +++ /dev/null @@ -1,227 +0,0 @@ -====================== - Cram: It's test time -====================== - -Cram is a functional testing framework for command line applications. -Cram tests look like snippets of interactive shell sessions. Cram runs -each command and compares the command output in the test with the -command's actual output. - -Here's a snippet from `Cram's own test suite`_:: - - The $PYTHON environment variable should be set when running this test - from Python. - - $ [ -n "$PYTHON" ] || PYTHON="`which python`" - $ [ -n "$PYTHONPATH" ] || PYTHONPATH="$TESTDIR/.." && export PYTHONPATH - $ if [ -n "$COVERAGE" ]; then - > coverage erase - > alias cram="`which coverage` run --branch -a $TESTDIR/../scripts/cram" - > else - > alias cram="$PYTHON $TESTDIR/../scripts/cram" - > fi - $ command -v md5 > /dev/null || alias md5=md5sum - - Usage: - - $ cram -h - [Uu]sage: cram \[OPTIONS\] TESTS\.\.\. (re) - - [Oo]ptions: (re) - -h, --help show this help message and exit - -V, --version show version information and exit - -q, --quiet don't print diffs - -v, --verbose show filenames and test status - -i, --interactive interactively merge changed test output - -d, --debug write script output directly to the terminal - -y, --yes answer yes to all questions - -n, --no answer no to all questions - -E, --preserve-env don't reset common environment variables - --keep-tmpdir keep temporary directories - --shell=PATH shell to use for running tests (default: /bin/sh) - --shell-opts=OPTS arguments to invoke shell with - --indent=NUM number of spaces to use for indentation (default: 2) - --xunit-file=PATH path to write xUnit XML output - -The format in a nutshell: - -* Cram tests use the ``.t`` file extension. - -* Lines beginning with two spaces, a dollar sign, and a space are run - in the shell. - -* Lines beginning with two spaces, a greater than sign, and a space - allow multi-line commands. - -* All other lines beginning with two spaces are considered command - output. - -* Output lines ending with a space and the keyword ``(re)`` are - matched as `Perl-compatible regular expressions`_. - -* Lines ending with a space and the keyword ``(glob)`` are matched - with a glob-like syntax. The only special characters supported are - ``*`` and ``?``. Both characters can be escaped using ``\``, and the - backslash can be escaped itself. - -* Output lines ending with either of the above keywords are always - first matched literally with actual command output. - -* Lines ending with a space and the keyword ``(no-eol)`` will match - actual output that doesn't end in a newline. - -* Actual output lines containing unprintable characters are escaped - and suffixed with a space and the keyword ``(esc)``. Lines matching - unprintable output must also contain the keyword. - -* Anything else is a comment. - -.. _Cram's own test suite: https://bitbucket.org/brodie/cram/src/default/tests/cram.t -.. _Perl-compatible regular expressions: https://en.wikipedia.org/wiki/Perl_Compatible_Regular_Expressions - - -Download --------- - -* `cram-0.7.tar.gz`_ (32 KB, requires Python 2.4-2.7 or Python 3.1 or newer) - -.. _cram-0.7.tar.gz: https://bitheap.org/cram/cram-0.7.tar.gz - - -Installation ------------- - -Install Cram using make:: - - $ wget https://bitheap.org/cram/cram-0.7.tar.gz - $ tar zxvf cram-0.7.tar.gz - $ cd cram-0.7 - $ make install - - -Usage ------ - -Cram will print a dot for each passing test. If a test fails, a -`unified context diff`_ is printed showing the test's expected output -and the actual output. Skipped tests (empty tests and tests that exit -with return code ``80``) are marked with ``s`` instead of a dot. - -For example, if we run Cram on `its own example tests`_:: - - .s.! - --- examples/fail.t - +++ examples/fail.t.err - @@ -3,21 +3,22 @@ - $ echo 1 - 1 - $ echo 1 - - 2 - + 1 - $ echo 1 - 1 - - Invalid regex: - - $ echo 1 - - +++ (re) - + 1 - - Offset regular expression: - - $ printf 'foo\nbar\nbaz\n\n1\nA\n@\n' - foo - + bar - baz - - \d (re) - [A-Z] (re) - - # - + @ - s. - # Ran 6 tests, 2 skipped, 1 failed. - -Cram will also write the test with its actual output to -``examples/fail.t.err``, allowing you to use other diff tools. This -file is automatically removed the next time the test passes. - -When you're first writing a test, you might just write the commands -and run the test to see what happens. If you run Cram with ``-i`` or -``--interactive``, you'll be prompted to merge the actual output back -into the test. This makes it easy to quickly prototype new tests. - -You can specify a default set of options by creating a ``.cramrc`` -file. For example:: - - [cram] - verbose = True - indent = 4 - -Is the same as invoking Cram with ``--verbose`` and ``--indent=4``. - -To change what configuration file Cram loads, you can set the -``CRAMRC`` environment variable. You can also specify command line -options in the ``CRAM`` environment variable. - -Note that the following environment variables are reset before tests -are run: - -* ``TMPDIR``, ``TEMP``, and ``TMP`` are set to the test runner's - ``tmp`` directory. - -* ``LANG``, ``LC_ALL``, and ``LANGUAGE`` are set to ``C``. - -* ``TZ`` is set to ``GMT``. - -* ``COLUMNS`` is set to ``80``. (Note: When using ``--shell=zsh``, - this cannot be reset. It will reflect the actual terminal's width.) - -* ``CDPATH`` and ``GREP_OPTIONS`` are set to an empty string. - -Cram also provides the following environment variables to tests: - -* ``CRAMTMP``, set to the test runner's temporary directory. - -* ``TESTDIR``, set to the directory containing the test file. - -* ``TESTFILE``, set to the basename of the current test file. - -* ``TESTSHELL``, set to the value specified by ``--shell``. - -Also note that care should be taken with commands that close the test -shell's ``stdin``. For example, if you're trying to invoke ``ssh`` in -a test, try adding the ``-n`` option to prevent it from closing -``stdin``. Similarly, if you invoke a daemon process that inherits -``stdout`` and fails to close it, it may cause Cram to hang while -waiting for the test shell's ``stdout`` to be fully closed. - -.. _unified context diff: https://en.wikipedia.org/wiki/Diff#Unified_format -.. _its own example tests: https://bitbucket.org/brodie/cram/src/default/examples/ - - -Development ------------ - -Download the official development repository using Mercurial_:: - - hg clone https://bitbucket.org/brodie/cram - -Or Git_:: - - git clone https://github.com/brodie/cram.git - -Test Cram using Cram:: - - pip install -r requirements.txt - make test - -Visit Bitbucket_ or GitHub_ if you'd like to fork the project, watch -for new changes, or report issues. - -.. _Mercurial: http://mercurial.selenic.com/ -.. _Git: http://git-scm.com/ -.. _coverage.py: http://nedbatchelder.com/code/coverage/ -.. _Bitbucket: https://bitbucket.org/brodie/cram -.. _GitHub: https://github.com/brodie/cram - - diff --git a/third_party/python/cram/cram-0.7.dist-info/METADATA b/third_party/python/cram/cram-0.7.dist-info/METADATA deleted file mode 100644 index 0edb62d168a1..000000000000 --- a/third_party/python/cram/cram-0.7.dist-info/METADATA +++ /dev/null @@ -1,250 +0,0 @@ -Metadata-Version: 2.0 -Name: cram -Version: 0.7 -Summary: A simple testing framework for command line applications -Home-page: https://bitheap.org/cram/ -Author: Brodie Rao -Author-email: brodie@bitheap.org -License: GNU GPLv2 or any later version -Download-URL: https://bitheap.org/cram/cram-0.7.tar.gz -Keywords: automatic functional test framework -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: GNU General Public License (GPL) -Classifier: License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+) -Classifier: Natural Language :: English -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Unix Shell -Classifier: Topic :: Software Development :: Testing - -====================== - Cram: It's test time -====================== - -Cram is a functional testing framework for command line applications. -Cram tests look like snippets of interactive shell sessions. Cram runs -each command and compares the command output in the test with the -command's actual output. - -Here's a snippet from `Cram's own test suite`_:: - - The $PYTHON environment variable should be set when running this test - from Python. - - $ [ -n "$PYTHON" ] || PYTHON="`which python`" - $ [ -n "$PYTHONPATH" ] || PYTHONPATH="$TESTDIR/.." && export PYTHONPATH - $ if [ -n "$COVERAGE" ]; then - > coverage erase - > alias cram="`which coverage` run --branch -a $TESTDIR/../scripts/cram" - > else - > alias cram="$PYTHON $TESTDIR/../scripts/cram" - > fi - $ command -v md5 > /dev/null || alias md5=md5sum - - Usage: - - $ cram -h - [Uu]sage: cram \[OPTIONS\] TESTS\.\.\. (re) - - [Oo]ptions: (re) - -h, --help show this help message and exit - -V, --version show version information and exit - -q, --quiet don't print diffs - -v, --verbose show filenames and test status - -i, --interactive interactively merge changed test output - -d, --debug write script output directly to the terminal - -y, --yes answer yes to all questions - -n, --no answer no to all questions - -E, --preserve-env don't reset common environment variables - --keep-tmpdir keep temporary directories - --shell=PATH shell to use for running tests (default: /bin/sh) - --shell-opts=OPTS arguments to invoke shell with - --indent=NUM number of spaces to use for indentation (default: 2) - --xunit-file=PATH path to write xUnit XML output - -The format in a nutshell: - -* Cram tests use the ``.t`` file extension. - -* Lines beginning with two spaces, a dollar sign, and a space are run - in the shell. - -* Lines beginning with two spaces, a greater than sign, and a space - allow multi-line commands. - -* All other lines beginning with two spaces are considered command - output. - -* Output lines ending with a space and the keyword ``(re)`` are - matched as `Perl-compatible regular expressions`_. - -* Lines ending with a space and the keyword ``(glob)`` are matched - with a glob-like syntax. The only special characters supported are - ``*`` and ``?``. Both characters can be escaped using ``\``, and the - backslash can be escaped itself. - -* Output lines ending with either of the above keywords are always - first matched literally with actual command output. - -* Lines ending with a space and the keyword ``(no-eol)`` will match - actual output that doesn't end in a newline. - -* Actual output lines containing unprintable characters are escaped - and suffixed with a space and the keyword ``(esc)``. Lines matching - unprintable output must also contain the keyword. - -* Anything else is a comment. - -.. _Cram's own test suite: https://bitbucket.org/brodie/cram/src/default/tests/cram.t -.. _Perl-compatible regular expressions: https://en.wikipedia.org/wiki/Perl_Compatible_Regular_Expressions - - -Download --------- - -* `cram-0.7.tar.gz`_ (32 KB, requires Python 2.4-2.7 or Python 3.1 or newer) - -.. _cram-0.7.tar.gz: https://bitheap.org/cram/cram-0.7.tar.gz - - -Installation ------------- - -Install Cram using make:: - - $ wget https://bitheap.org/cram/cram-0.7.tar.gz - $ tar zxvf cram-0.7.tar.gz - $ cd cram-0.7 - $ make install - - -Usage ------ - -Cram will print a dot for each passing test. If a test fails, a -`unified context diff`_ is printed showing the test's expected output -and the actual output. Skipped tests (empty tests and tests that exit -with return code ``80``) are marked with ``s`` instead of a dot. - -For example, if we run Cram on `its own example tests`_:: - - .s.! - --- examples/fail.t - +++ examples/fail.t.err - @@ -3,21 +3,22 @@ - $ echo 1 - 1 - $ echo 1 - - 2 - + 1 - $ echo 1 - 1 - - Invalid regex: - - $ echo 1 - - +++ (re) - + 1 - - Offset regular expression: - - $ printf 'foo\nbar\nbaz\n\n1\nA\n@\n' - foo - + bar - baz - - \d (re) - [A-Z] (re) - - # - + @ - s. - # Ran 6 tests, 2 skipped, 1 failed. - -Cram will also write the test with its actual output to -``examples/fail.t.err``, allowing you to use other diff tools. This -file is automatically removed the next time the test passes. - -When you're first writing a test, you might just write the commands -and run the test to see what happens. If you run Cram with ``-i`` or -``--interactive``, you'll be prompted to merge the actual output back -into the test. This makes it easy to quickly prototype new tests. - -You can specify a default set of options by creating a ``.cramrc`` -file. For example:: - - [cram] - verbose = True - indent = 4 - -Is the same as invoking Cram with ``--verbose`` and ``--indent=4``. - -To change what configuration file Cram loads, you can set the -``CRAMRC`` environment variable. You can also specify command line -options in the ``CRAM`` environment variable. - -Note that the following environment variables are reset before tests -are run: - -* ``TMPDIR``, ``TEMP``, and ``TMP`` are set to the test runner's - ``tmp`` directory. - -* ``LANG``, ``LC_ALL``, and ``LANGUAGE`` are set to ``C``. - -* ``TZ`` is set to ``GMT``. - -* ``COLUMNS`` is set to ``80``. (Note: When using ``--shell=zsh``, - this cannot be reset. It will reflect the actual terminal's width.) - -* ``CDPATH`` and ``GREP_OPTIONS`` are set to an empty string. - -Cram also provides the following environment variables to tests: - -* ``CRAMTMP``, set to the test runner's temporary directory. - -* ``TESTDIR``, set to the directory containing the test file. - -* ``TESTFILE``, set to the basename of the current test file. - -* ``TESTSHELL``, set to the value specified by ``--shell``. - -Also note that care should be taken with commands that close the test -shell's ``stdin``. For example, if you're trying to invoke ``ssh`` in -a test, try adding the ``-n`` option to prevent it from closing -``stdin``. Similarly, if you invoke a daemon process that inherits -``stdout`` and fails to close it, it may cause Cram to hang while -waiting for the test shell's ``stdout`` to be fully closed. - -.. _unified context diff: https://en.wikipedia.org/wiki/Diff#Unified_format -.. _its own example tests: https://bitbucket.org/brodie/cram/src/default/examples/ - - -Development ------------ - -Download the official development repository using Mercurial_:: - - hg clone https://bitbucket.org/brodie/cram - -Or Git_:: - - git clone https://github.com/brodie/cram.git - -Test Cram using Cram:: - - pip install -r requirements.txt - make test - -Visit Bitbucket_ or GitHub_ if you'd like to fork the project, watch -for new changes, or report issues. - -.. _Mercurial: http://mercurial.selenic.com/ -.. _Git: http://git-scm.com/ -.. _coverage.py: http://nedbatchelder.com/code/coverage/ -.. _Bitbucket: https://bitbucket.org/brodie/cram -.. _GitHub: https://github.com/brodie/cram - - diff --git a/third_party/python/cram/cram-0.7.dist-info/RECORD b/third_party/python/cram/cram-0.7.dist-info/RECORD deleted file mode 100644 index 8c2ef4ae3fa8..000000000000 --- a/third_party/python/cram/cram-0.7.dist-info/RECORD +++ /dev/null @@ -1,16 +0,0 @@ -cram/__init__.py,sha256=80M3WLqeS6MAACoIZW89KZR4bOmFm7UcpoRPF6S-8jc,172 -cram/__main__.py,sha256=AUlczSWsDtiA6srk4dsmdsz8cZXb1QXMdPkobAR-Ex0,152 -cram/_cli.py,sha256=aIJE2BY0djuOqgtCHe9IVUIl7Vvvk-awsksdmMd1RNc,4345 -cram/_diff.py,sha256=pXLlKb1UgQX17ayJpPQsGoMHW7bKLcACe9KEZlnMkx0,5630 -cram/_encoding.py,sha256=PSPdcjenMvC0wabbPhWPkCxeUcohcQ6o3Rk58AC97Uo,2990 -cram/_main.py,sha256=5gwaBNSyKCq9bwkRLKqNXcsB5Okf0sfxDpousd51CO4,7728 -cram/_process.py,sha256=2JV6sRl_9p3DYu1IYN5_D-isln9vAh5ua6bAxAy8ytA,1805 -cram/_run.py,sha256=X5fOy7TKxMdBcis0JczYZkNUoQdJ5wUqlDCM2sRJDm0,2292 -cram/_test.py,sha256=9QYuf3DRuLs9O1QVP3MfoJlISBRfnC5ONhCL4uXGYG8,7904 -cram/_xunit.py,sha256=KUAUokY3HhkgPYp0IjSl2m7KvztYdbwW7p1aqdaUJgA,6247 -cram-0.7.data/scripts/cram,sha256=S3wCw9Ks2J4dtVftWZ8DU0eNtpb1ekf8Bz73Di3PvUs,112 -cram-0.7.dist-info/DESCRIPTION.rst,sha256=ejwfPio_dRLrZ2PhWnsGbLW6lPyiDTjUAejg5MPG-kg,7080 -cram-0.7.dist-info/METADATA,sha256=ExruW_6HNwqu-mVqvcCSUtund4CHxt5hb3019a3jLeo,8018 -cram-0.7.dist-info/RECORD,, -cram-0.7.dist-info/WHEEL,sha256=o2k-Qa-RMNIJmUdIc7KU6VWR_ErNRbWNlxDIpl7lm34,110 -cram-0.7.dist-info/metadata.json,sha256=cRTULRj1eXU8xWOtqLK8DMhu0vWJELulW_PI8O4ytPU,1063 diff --git a/third_party/python/cram/cram-0.7.dist-info/metadata.json b/third_party/python/cram/cram-0.7.dist-info/metadata.json deleted file mode 100644 index f2156d9e6775..000000000000 --- a/third_party/python/cram/cram-0.7.dist-info/metadata.json +++ /dev/null @@ -1 +0,0 @@ -{"classifiers": ["Development Status :: 5 - Production/Stable", "Environment :: Console", "Intended Audience :: Developers", "License :: OSI Approved :: GNU General Public License (GPL)", "License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)", "Natural Language :: English", "Operating System :: OS Independent", "Programming Language :: Python :: 2", "Programming Language :: Python :: 3", "Programming Language :: Unix Shell", "Topic :: Software Development :: Testing"], "download_url": "https://bitheap.org/cram/cram-0.7.tar.gz", "extensions": {"python.details": {"contacts": [{"email": "brodie@bitheap.org", "name": "Brodie Rao", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://bitheap.org/cram/"}}}, "generator": "bdist_wheel (0.29.0)", "keywords": ["automatic", "functional", "test", "framework"], "license": "GNU GPLv2 or any later version", "metadata_version": "2.0", "name": "cram", "summary": "A simple testing framework for command line applications", "version": "0.7"} \ No newline at end of file diff --git a/third_party/python/funcsigs/CHANGELOG b/third_party/python/funcsigs/CHANGELOG new file mode 100644 index 000000000000..e1366d2668d0 --- /dev/null +++ b/third_party/python/funcsigs/CHANGELOG @@ -0,0 +1,24 @@ +Changelog +--------- + +0.5 +``` + +* Fix binding with self as a kwarg. (Robert Collins #14) + +0.4 (2013-12-20) +```````````````` +* Fix unbound methods getting their first parameter curried +* Publish Python wheel packages + +0.3 (2013-05-29) +```````````````` +* Fix annotation formatting of builtin types on Python 2.x + +0.2 (2012-01-07) +```````````````` +* PyPy compatability + +0.1 (2012-01-06) +```````````````` +* Initial release diff --git a/third_party/python/funcsigs/LICENSE b/third_party/python/funcsigs/LICENSE new file mode 100644 index 000000000000..3e563d6fbd4d --- /dev/null +++ b/third_party/python/funcsigs/LICENSE @@ -0,0 +1,13 @@ +Copyright 2013 Aaron Iles + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/third_party/python/funcsigs/MANIFEST.in b/third_party/python/funcsigs/MANIFEST.in new file mode 100644 index 000000000000..f0abb42f04a1 --- /dev/null +++ b/third_party/python/funcsigs/MANIFEST.in @@ -0,0 +1,7 @@ +recursive-include docs * +recursive-include tests *.py +include *.py +include CHANGELOG +include LICENSE +include MANIFEST.in +include README.rst diff --git a/third_party/python/funcsigs/PKG-INFO b/third_party/python/funcsigs/PKG-INFO new file mode 100644 index 000000000000..e262a8d1b955 --- /dev/null +++ b/third_party/python/funcsigs/PKG-INFO @@ -0,0 +1,378 @@ +Metadata-Version: 1.1 +Name: funcsigs +Version: 1.0.2 +Summary: Python function signatures from PEP362 for Python 2.6, 2.7 and 3.2+ +Home-page: http://funcsigs.readthedocs.org +Author: Testing Cabal +Author-email: testing-in-python@lists.idyll.org +License: ASL +Description: .. funcsigs documentation master file, created by + sphinx-quickstart on Fri Apr 20 20:27:52 2012. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + + Introducing funcsigs + ==================== + + The Funcsigs Package + -------------------- + + ``funcsigs`` is a backport of the `PEP 362`_ function signature features from + Python 3.3's `inspect`_ module. The backport is compatible with Python 2.6, 2.7 + as well as 3.3 and up. 3.2 was supported by version 0.4, but with setuptools and + pip no longer supporting 3.2, we cannot make any statement about 3.2 + compatibility. + + Compatibility + ````````````` + + The ``funcsigs`` backport has been tested against: + + * CPython 2.6 + * CPython 2.7 + * CPython 3.3 + * CPython 3.4 + * CPython 3.5 + * CPython nightlies + * PyPy and PyPy3(currently failing CI) + + Continuous integration testing is provided by `Travis CI`_. + + Under Python 2.x there is a compatibility issue when a function is assigned to + the ``__wrapped__`` property of a class after it has been constructed. + Similiarily there under PyPy directly passing the ``__call__`` method of a + builtin is also a compatibility issues. Otherwise the functionality is + believed to be uniform between both Python2 and Python3. + + Issues + `````` + + Source code for ``funcsigs`` is hosted on `GitHub`_. Any bug reports or feature + requests can be made using GitHub's `issues system`_. |build_status| |coverage| + + Example + ------- + + To obtain a `Signature` object, pass the target function to the + ``funcsigs.signature`` function. + + .. code-block:: python + + >>> from funcsigs import signature + >>> def foo(a, b=None, *args, **kwargs): + ... pass + ... + >>> sig = signature(foo) + >>> sig + + >>> sig.parameters + OrderedDict([('a', ), ('b', ), ('args', ), ('kwargs', )]) + >>> sig.return_annotation + + + Introspecting callables with the Signature object + ------------------------------------------------- + + .. note:: + + This section of documentation is a direct reproduction of the Python + standard library documentation for the inspect module. + + The Signature object represents the call signature of a callable object and its + return annotation. To retrieve a Signature object, use the :func:`signature` + function. + + .. function:: signature(callable) + + Return a :class:`Signature` object for the given ``callable``:: + + >>> from funcsigs import signature + >>> def foo(a, *, b:int, **kwargs): + ... pass + + >>> sig = signature(foo) + + >>> str(sig) + '(a, *, b:int, **kwargs)' + + >>> str(sig.parameters['b']) + 'b:int' + + >>> sig.parameters['b'].annotation + + + Accepts a wide range of python callables, from plain functions and classes to + :func:`functools.partial` objects. + + .. note:: + + Some callables may not be introspectable in certain implementations of + Python. For example, in CPython, built-in functions defined in C provide + no metadata about their arguments. + + + .. class:: Signature + + A Signature object represents the call signature of a function and its return + annotation. For each parameter accepted by the function it stores a + :class:`Parameter` object in its :attr:`parameters` collection. + + Signature objects are *immutable*. Use :meth:`Signature.replace` to make a + modified copy. + + .. attribute:: Signature.empty + + A special class-level marker to specify absence of a return annotation. + + .. attribute:: Signature.parameters + + An ordered mapping of parameters' names to the corresponding + :class:`Parameter` objects. + + .. attribute:: Signature.return_annotation + + The "return" annotation for the callable. If the callable has no "return" + annotation, this attribute is set to :attr:`Signature.empty`. + + .. method:: Signature.bind(*args, **kwargs) + + Create a mapping from positional and keyword arguments to parameters. + Returns :class:`BoundArguments` if ``*args`` and ``**kwargs`` match the + signature, or raises a :exc:`TypeError`. + + .. method:: Signature.bind_partial(*args, **kwargs) + + Works the same way as :meth:`Signature.bind`, but allows the omission of + some required arguments (mimics :func:`functools.partial` behavior.) + Returns :class:`BoundArguments`, or raises a :exc:`TypeError` if the + passed arguments do not match the signature. + + .. method:: Signature.replace(*[, parameters][, return_annotation]) + + Create a new Signature instance based on the instance replace was invoked + on. It is possible to pass different ``parameters`` and/or + ``return_annotation`` to override the corresponding properties of the base + signature. To remove return_annotation from the copied Signature, pass in + :attr:`Signature.empty`. + + :: + + >>> def test(a, b): + ... pass + >>> sig = signature(test) + >>> new_sig = sig.replace(return_annotation="new return anno") + >>> str(new_sig) + "(a, b) -> 'new return anno'" + + + .. class:: Parameter + + Parameter objects are *immutable*. Instead of modifying a Parameter object, + you can use :meth:`Parameter.replace` to create a modified copy. + + .. attribute:: Parameter.empty + + A special class-level marker to specify absence of default values and + annotations. + + .. attribute:: Parameter.name + + The name of the parameter as a string. Must be a valid python identifier + name (with the exception of ``POSITIONAL_ONLY`` parameters, which can have + it set to ``None``). + + .. attribute:: Parameter.default + + The default value for the parameter. If the parameter has no default + value, this attribute is set to :attr:`Parameter.empty`. + + .. attribute:: Parameter.annotation + + The annotation for the parameter. If the parameter has no annotation, + this attribute is set to :attr:`Parameter.empty`. + + .. attribute:: Parameter.kind + + Describes how argument values are bound to the parameter. Possible values + (accessible via :class:`Parameter`, like ``Parameter.KEYWORD_ONLY``): + + +------------------------+----------------------------------------------+ + | Name | Meaning | + +========================+==============================================+ + | *POSITIONAL_ONLY* | Value must be supplied as a positional | + | | argument. | + | | | + | | Python has no explicit syntax for defining | + | | positional-only parameters, but many built-in| + | | and extension module functions (especially | + | | those that accept only one or two parameters)| + | | accept them. | + +------------------------+----------------------------------------------+ + | *POSITIONAL_OR_KEYWORD*| Value may be supplied as either a keyword or | + | | positional argument (this is the standard | + | | binding behaviour for functions implemented | + | | in Python.) | + +------------------------+----------------------------------------------+ + | *VAR_POSITIONAL* | A tuple of positional arguments that aren't | + | | bound to any other parameter. This | + | | corresponds to a ``*args`` parameter in a | + | | Python function definition. | + +------------------------+----------------------------------------------+ + | *KEYWORD_ONLY* | Value must be supplied as a keyword argument.| + | | Keyword only parameters are those which | + | | appear after a ``*`` or ``*args`` entry in a | + | | Python function definition. | + +------------------------+----------------------------------------------+ + | *VAR_KEYWORD* | A dict of keyword arguments that aren't bound| + | | to any other parameter. This corresponds to a| + | | ``**kwargs`` parameter in a Python function | + | | definition. | + +------------------------+----------------------------------------------+ + + Example: print all keyword-only arguments without default values:: + + >>> def foo(a, b, *, c, d=10): + ... pass + + >>> sig = signature(foo) + >>> for param in sig.parameters.values(): + ... if (param.kind == param.KEYWORD_ONLY and + ... param.default is param.empty): + ... print('Parameter:', param) + Parameter: c + + .. method:: Parameter.replace(*[, name][, kind][, default][, annotation]) + + Create a new Parameter instance based on the instance replaced was invoked + on. To override a :class:`Parameter` attribute, pass the corresponding + argument. To remove a default value or/and an annotation from a + Parameter, pass :attr:`Parameter.empty`. + + :: + + >>> from funcsigs import Parameter + >>> param = Parameter('foo', Parameter.KEYWORD_ONLY, default=42) + >>> str(param) + 'foo=42' + + >>> str(param.replace()) # Will create a shallow copy of 'param' + 'foo=42' + + >>> str(param.replace(default=Parameter.empty, annotation='spam')) + "foo:'spam'" + + + .. class:: BoundArguments + + Result of a :meth:`Signature.bind` or :meth:`Signature.bind_partial` call. + Holds the mapping of arguments to the function's parameters. + + .. attribute:: BoundArguments.arguments + + An ordered, mutable mapping (:class:`collections.OrderedDict`) of + parameters' names to arguments' values. Contains only explicitly bound + arguments. Changes in :attr:`arguments` will reflect in :attr:`args` and + :attr:`kwargs`. + + Should be used in conjunction with :attr:`Signature.parameters` for any + argument processing purposes. + + .. note:: + + Arguments for which :meth:`Signature.bind` or + :meth:`Signature.bind_partial` relied on a default value are skipped. + However, if needed, it is easy to include them. + + :: + + >>> def foo(a, b=10): + ... pass + + >>> sig = signature(foo) + >>> ba = sig.bind(5) + + >>> ba.args, ba.kwargs + ((5,), {}) + + >>> for param in sig.parameters.values(): + ... if param.name not in ba.arguments: + ... ba.arguments[param.name] = param.default + + >>> ba.args, ba.kwargs + ((5, 10), {}) + + + .. attribute:: BoundArguments.args + + A tuple of positional arguments values. Dynamically computed from the + :attr:`arguments` attribute. + + .. attribute:: BoundArguments.kwargs + + A dict of keyword arguments values. Dynamically computed from the + :attr:`arguments` attribute. + + The :attr:`args` and :attr:`kwargs` properties can be used to invoke + functions:: + + def test(a, *, b): + ... + + sig = signature(test) + ba = sig.bind(10, b=20) + test(*ba.args, **ba.kwargs) + + + .. seealso:: + + :pep:`362` - Function Signature Object. + The detailed specification, implementation details and examples. + + Copyright + --------- + + *funcsigs* is a derived work of CPython under the terms of the `PSF License + Agreement`_. The original CPython inspect module, its unit tests and + documentation are the copyright of the Python Software Foundation. The derived + work is distributed under the `Apache License Version 2.0`_. + + .. _PSF License Agreement: http://docs.python.org/3/license.html#terms-and-conditions-for-accessing-or-otherwise-using-python + .. _Apache License Version 2.0: http://opensource.org/licenses/Apache-2.0 + .. _GitHub: https://github.com/testing-cabal/funcsigs + .. _PSF License Agreement: http://docs.python.org/3/license.html#terms-and-conditions-for-accessing-or-otherwise-using-python + .. _Travis CI: http://travis-ci.org/ + .. _Read The Docs: http://funcsigs.readthedocs.org/ + .. _PEP 362: http://www.python.org/dev/peps/pep-0362/ + .. _inspect: http://docs.python.org/3/library/inspect.html#introspecting-callables-with-the-signature-object + .. _issues system: https://github.com/testing-cabal/funcsigs/issues + + .. |build_status| image:: https://secure.travis-ci.org/aliles/funcsigs.png?branch=master + :target: http://travis-ci.org/#!/aliles/funcsigs + :alt: Current build status + + .. |coverage| image:: https://coveralls.io/repos/aliles/funcsigs/badge.png?branch=master + :target: https://coveralls.io/r/aliles/funcsigs?branch=master + :alt: Coverage status + + .. |pypi_version| image:: https://pypip.in/v/funcsigs/badge.png + :target: https://crate.io/packages/funcsigs/ + :alt: Latest PyPI version + + + +Platform: UNKNOWN +Classifier: Development Status :: 4 - Beta +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Software Development :: Libraries :: Python Modules diff --git a/third_party/python/funcsigs/funcsigs-1.0.2.dist-info/DESCRIPTION.rst b/third_party/python/funcsigs/README.rst similarity index 99% rename from third_party/python/funcsigs/funcsigs-1.0.2.dist-info/DESCRIPTION.rst rename to third_party/python/funcsigs/README.rst index c9acbda168b7..5fbca27e6e6b 100644 --- a/third_party/python/funcsigs/funcsigs-1.0.2.dist-info/DESCRIPTION.rst +++ b/third_party/python/funcsigs/README.rst @@ -351,5 +351,3 @@ work is distributed under the `Apache License Version 2.0`_. :alt: Latest PyPI version - - diff --git a/third_party/python/funcsigs/docs/Makefile b/third_party/python/funcsigs/docs/Makefile new file mode 100644 index 000000000000..f7ab3d16b405 --- /dev/null +++ b/third_party/python/funcsigs/docs/Makefile @@ -0,0 +1,153 @@ +# Makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = sphinx-build +PAPER = +BUILDDIR = _build + +# Internal variables. +PAPEROPT_a4 = -D latex_paper_size=a4 +PAPEROPT_letter = -D latex_paper_size=letter +ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . +# the i18n builder cannot share the environment and doctrees with the others +I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . + +.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext + +help: + @echo "Please use \`make ' where is one of" + @echo " html to make standalone HTML files" + @echo " dirhtml to make HTML files named index.html in directories" + @echo " singlehtml to make a single large HTML file" + @echo " pickle to make pickle files" + @echo " json to make JSON files" + @echo " htmlhelp to make HTML files and a HTML help project" + @echo " qthelp to make HTML files and a qthelp project" + @echo " devhelp to make HTML files and a Devhelp project" + @echo " epub to make an epub" + @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" + @echo " latexpdf to make LaTeX files and run them through pdflatex" + @echo " text to make text files" + @echo " man to make manual pages" + @echo " texinfo to make Texinfo files" + @echo " info to make Texinfo files and run them through makeinfo" + @echo " gettext to make PO message catalogs" + @echo " changes to make an overview of all changed/added/deprecated items" + @echo " linkcheck to check all external links for integrity" + @echo " doctest to run all doctests embedded in the documentation (if enabled)" + +clean: + -rm -rf $(BUILDDIR) + +html: + $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." + +dirhtml: + $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." + +singlehtml: + $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml + @echo + @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." + +pickle: + $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle + @echo + @echo "Build finished; now you can process the pickle files." + +json: + $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json + @echo + @echo "Build finished; now you can process the JSON files." + +htmlhelp: + $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp + @echo + @echo "Build finished; now you can run HTML Help Workshop with the" \ + ".hhp project file in $(BUILDDIR)/htmlhelp." + +qthelp: + $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp + @echo + @echo "Build finished; now you can run "qcollectiongenerator" with the" \ + ".qhcp project file in $(BUILDDIR)/qthelp, like this:" + @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/funcsigs.qhcp" + @echo "To view the help file:" + @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/funcsigs.qhc" + +devhelp: + $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp + @echo + @echo "Build finished." + @echo "To view the help file:" + @echo "# mkdir -p $$HOME/.local/share/devhelp/funcsigs" + @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/funcsigs" + @echo "# devhelp" + +epub: + $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub + @echo + @echo "Build finished. The epub file is in $(BUILDDIR)/epub." + +latex: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo + @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." + @echo "Run \`make' in that directory to run these through (pdf)latex" \ + "(use \`make latexpdf' here to do that automatically)." + +latexpdf: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through pdflatex..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +text: + $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text + @echo + @echo "Build finished. The text files are in $(BUILDDIR)/text." + +man: + $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man + @echo + @echo "Build finished. The manual pages are in $(BUILDDIR)/man." + +texinfo: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo + @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." + @echo "Run \`make' in that directory to run these through makeinfo" \ + "(use \`make info' here to do that automatically)." + +info: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo "Running Texinfo files through makeinfo..." + make -C $(BUILDDIR)/texinfo info + @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." + +gettext: + $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale + @echo + @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." + +changes: + $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes + @echo + @echo "The overview file is in $(BUILDDIR)/changes." + +linkcheck: + $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck + @echo + @echo "Link check complete; look for any errors in the above output " \ + "or in $(BUILDDIR)/linkcheck/output.txt." + +doctest: + $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest + @echo "Testing of doctests in the sources finished, look at the " \ + "results in $(BUILDDIR)/doctest/output.txt." diff --git a/third_party/python/funcsigs/docs/_templates/page.html b/third_party/python/funcsigs/docs/_templates/page.html new file mode 100644 index 000000000000..5e1e00bcafaf --- /dev/null +++ b/third_party/python/funcsigs/docs/_templates/page.html @@ -0,0 +1,9 @@ +{% extends "!page.html" %} +{% block extrahead %} + + Fork me on GitHub + + {{ super() }} +{% endblock %} diff --git a/third_party/python/funcsigs/docs/conf.py b/third_party/python/funcsigs/docs/conf.py new file mode 100644 index 000000000000..c6e4194cc05c --- /dev/null +++ b/third_party/python/funcsigs/docs/conf.py @@ -0,0 +1,251 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +# +# funcsigs documentation build configuration file, created by +# sphinx-quickstart on Fri Apr 20 20:27:52 2012. +# +# This file is execfile()d with the current directory set to its containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys, os + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath('..')) + +# -- General configuration ----------------------------------------------------- + +# If your documentation needs a minimal Sphinx version, state it here. +#needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be extensions +# coming with Sphinx (named 'sphinx.ext.*') or your custom ones. +extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.viewcode'] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix of source filenames. +source_suffix = '.rst' + +# The encoding of source files. +#source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +project = 'funcsigs' +copyright = '2013, Aaron Iles' + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The short X.Y version. +from funcsigs import __version__ +version = '.'.join(__version__.split('.')[:2]) +# The full version, including alpha/beta/rc tags. +release = __version__ + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +#language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +#today = '' +# Else, today_fmt is used as the format for a strftime call. +#today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ['_build'] + +# The reST default role (used for this markup: `text`) to use for all documents. +#default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +#add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +#add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +#show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + +# A list of ignored prefixes for module index sorting. +#modindex_common_prefix = [] + + +# -- Options for HTML output --------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = 'agogo' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +#html_theme_options = {} + +# Add any paths that contain custom themes here, relative to this directory. +#html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +#html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +#html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +#html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +#html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +#html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +#html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +#html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +#html_additional_pages = {} + +# If false, no module index is generated. +#html_domain_indices = True + +# If false, no index is generated. +#html_use_index = True + +# If true, the index is split into individual pages for each letter. +#html_split_index = False + +# If true, links to the reST sources are added to the pages. +#html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +#html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +#html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +#html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +#html_file_suffix = None + +# Output file base name for HTML help builder. +htmlhelp_basename = 'funcsigsdoc' + + +# -- Options for LaTeX output -------------------------------------------------- + +latex_elements = { +# The paper size ('letterpaper' or 'a4paper'). +#'papersize': 'letterpaper', + +# The font size ('10pt', '11pt' or '12pt'). +#'pointsize': '10pt', + +# Additional stuff for the LaTeX preamble. +#'preamble': '', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, author, documentclass [howto/manual]). +latex_documents = [ + ('index', 'funcsigs.tex', 'funcsigs Documentation', + 'Aaron Iles', 'manual'), +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +#latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +#latex_use_parts = False + +# If true, show page references after internal links. +#latex_show_pagerefs = False + +# If true, show URL addresses after external links. +#latex_show_urls = False + +# Documents to append as an appendix to all manuals. +#latex_appendices = [] + +# If false, no module index is generated. +#latex_domain_indices = True + + +# -- Options for manual page output -------------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ('index', 'funcsigs', 'funcsigs Documentation', + ['Aaron Iles'], 1) +] + +# If true, show URL addresses after external links. +#man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------------ + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ('index', 'funcsigs', 'funcsigs Documentation', + 'Aaron Iles', 'funcsigs', 'One line description of project.', + 'Miscellaneous'), +] + +# Documents to append as an appendix to all manuals. +#texinfo_appendices = [] + +# If false, no module index is generated. +#texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +#texinfo_show_urls = 'footnote' + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + 'python3': ('http://docs.python.org/py3k', None), + 'python': ('http://docs.python.org/', None) +} diff --git a/third_party/python/funcsigs/funcsigs-1.0.2.dist-info/METADATA b/third_party/python/funcsigs/docs/index.rst similarity index 92% rename from third_party/python/funcsigs/funcsigs-1.0.2.dist-info/METADATA rename to third_party/python/funcsigs/docs/index.rst index d584890b947f..5fbca27e6e6b 100644 --- a/third_party/python/funcsigs/funcsigs-1.0.2.dist-info/METADATA +++ b/third_party/python/funcsigs/docs/index.rst @@ -1,29 +1,3 @@ -Metadata-Version: 2.0 -Name: funcsigs -Version: 1.0.2 -Summary: Python function signatures from PEP362 for Python 2.6, 2.7 and 3.2+ -Home-page: http://funcsigs.readthedocs.org -Author: Testing Cabal -Author-email: testing-in-python@lists.idyll.org -License: ASL -Platform: UNKNOWN -Classifier: Development Status :: 4 - Beta -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: Apache Software License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.6 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Requires-Dist: ordereddict; python_version<"2.7" - .. funcsigs documentation master file, created by sphinx-quickstart on Fri Apr 20 20:27:52 2012. You can adapt this file completely to your liking, but it should at least @@ -377,5 +351,3 @@ work is distributed under the `Apache License Version 2.0`_. :alt: Latest PyPI version - - diff --git a/third_party/python/funcsigs/funcsigs-1.0.2.dist-info/RECORD b/third_party/python/funcsigs/funcsigs-1.0.2.dist-info/RECORD deleted file mode 100644 index cb7d6ae516db..000000000000 --- a/third_party/python/funcsigs/funcsigs-1.0.2.dist-info/RECORD +++ /dev/null @@ -1,9 +0,0 @@ -funcsigs/__init__.py,sha256=GV4YulgeGW1IDS3l4__glZnbTJolLD_UGAFAhSGWOC8,30390 -funcsigs/version.py,sha256=Y3LSfRioSl2xch70pq_ULlvyECXyEtN3krVaWeGyaxk,22 -funcsigs-1.0.2.dist-info/DESCRIPTION.rst,sha256=aVg6hYTYjY6A9-oI4-lhQ9UEqeu0L3kn7LthaPgOYtY,13297 -funcsigs-1.0.2.dist-info/METADATA,sha256=fIBrN18etIHPSCZe0aq8SUSlmBwfvtl02Ce_thKVFtk,14420 -funcsigs-1.0.2.dist-info/RECORD,, -funcsigs-1.0.2.dist-info/WHEEL,sha256=o2k-Qa-RMNIJmUdIc7KU6VWR_ErNRbWNlxDIpl7lm34,110 -funcsigs-1.0.2.dist-info/metadata.json,sha256=NDDdI0osHQ-zi-4gvTivhTHSrTxME8fHKRCAfJ5NXj0,1294 -funcsigs-1.0.2.dist-info/pbr.json,sha256=TM9nSbjgR_z-OvEdNqbILWOYhlmpDbujt8yM_OHBwxM,46 -funcsigs-1.0.2.dist-info/top_level.txt,sha256=p0FFcT9rWjPboZWPK-LlnMEST6D8xzf6RvETJeNIsNs,9 diff --git a/third_party/python/funcsigs/funcsigs-1.0.2.dist-info/metadata.json b/third_party/python/funcsigs/funcsigs-1.0.2.dist-info/metadata.json deleted file mode 100644 index 0753fdfb9ff9..000000000000 --- a/third_party/python/funcsigs/funcsigs-1.0.2.dist-info/metadata.json +++ /dev/null @@ -1 +0,0 @@ -{"classifiers": ["Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Topic :: Software Development :: Libraries :: Python Modules"], "extensions": {"python.details": {"contacts": [{"email": "testing-in-python@lists.idyll.org", "name": "Testing Cabal", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "http://funcsigs.readthedocs.org"}}}, "extras": [], "generator": "bdist_wheel (0.29.0)", "license": "ASL", "metadata_version": "2.0", "name": "funcsigs", "run_requires": [{"environment": "python_version<\"2.7\"", "requires": ["ordereddict"]}], "summary": "Python function signatures from PEP362 for Python 2.6, 2.7 and 3.2+", "test_requires": [{"requires": ["unittest2"]}], "version": "1.0.2"} \ No newline at end of file diff --git a/third_party/python/funcsigs/funcsigs-1.0.2.dist-info/pbr.json b/third_party/python/funcsigs/funcsigs-1.0.2.dist-info/pbr.json deleted file mode 100644 index 16c4e27a8857..000000000000 --- a/third_party/python/funcsigs/funcsigs-1.0.2.dist-info/pbr.json +++ /dev/null @@ -1 +0,0 @@ -{"is_release": true, "git_version": "1b88d78"} \ No newline at end of file diff --git a/third_party/python/funcsigs/funcsigs-1.0.2.dist-info/top_level.txt b/third_party/python/funcsigs/funcsigs-1.0.2.dist-info/top_level.txt deleted file mode 100644 index 5ac8863d55c4..000000000000 --- a/third_party/python/funcsigs/funcsigs-1.0.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -funcsigs diff --git a/third_party/python/pystache/setup.cfg b/third_party/python/funcsigs/setup.cfg similarity index 71% rename from third_party/python/pystache/setup.cfg rename to third_party/python/funcsigs/setup.cfg index 861a9f554263..6c71b612d8d5 100644 --- a/third_party/python/pystache/setup.cfg +++ b/third_party/python/funcsigs/setup.cfg @@ -1,3 +1,6 @@ +[wheel] +universal = 1 + [egg_info] tag_build = tag_date = 0 diff --git a/third_party/python/funcsigs/setup.py b/third_party/python/funcsigs/setup.py new file mode 100644 index 000000000000..f3696888f9eb --- /dev/null +++ b/third_party/python/funcsigs/setup.py @@ -0,0 +1,52 @@ +#!/usr/bin/env python +from setuptools import setup +import re +import sys + +def load_version(filename='funcsigs/version.py'): + "Parse a __version__ number from a source file" + with open(filename) as source: + text = source.read() + match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", text) + if not match: + msg = "Unable to find version number in {}".format(filename) + raise RuntimeError(msg) + version = match.group(1) + return version + + +setup( + name="funcsigs", + version=load_version(), + packages=['funcsigs'], + zip_safe=False, + author="Testing Cabal", + author_email="testing-in-python@lists.idyll.org", + url="http://funcsigs.readthedocs.org", + description="Python function signatures from PEP362 for Python 2.6, 2.7 and 3.2+", + long_description=open('README.rst').read(), + license="ASL", + extras_require = { + ':python_version<"2.7"': ['ordereddict'], + }, + setup_requires = ["setuptools>=17.1"], + classifiers = [ + 'Development Status :: 4 - Beta', + 'Intended Audience :: Developers', + 'License :: OSI Approved :: Apache Software License', + 'Operating System :: OS Independent', + 'Programming Language :: Python', + 'Programming Language :: Python :: 2', + 'Programming Language :: Python :: 2.6', + 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.3', + 'Programming Language :: Python :: 3.4', + 'Programming Language :: Python :: 3.5', + 'Programming Language :: Python :: Implementation :: CPython', + 'Programming Language :: Python :: Implementation :: PyPy', + 'Topic :: Software Development :: Libraries :: Python Modules' + ], + tests_require = ['unittest2'], + test_suite = 'unittest2.collector', +) diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_metadata-4.6.2-py3-none-any/importlib_metadata/py.typed b/third_party/python/funcsigs/tests/__init__.py similarity index 100% rename from third_party/python/virtualenv/__virtualenv__/importlib_metadata-4.6.2-py3-none-any/importlib_metadata/py.typed rename to third_party/python/funcsigs/tests/__init__.py diff --git a/third_party/python/funcsigs/tests/test_formatannotation.py b/third_party/python/funcsigs/tests/test_formatannotation.py new file mode 100644 index 000000000000..4b98e6037d83 --- /dev/null +++ b/third_party/python/funcsigs/tests/test_formatannotation.py @@ -0,0 +1,17 @@ +import funcsigs + +import unittest2 as unittest + +class TestFormatAnnotation(unittest.TestCase): + def test_string (self): + self.assertEqual(funcsigs.formatannotation("annotation"), + "'annotation'") + + def test_builtin_type (self): + self.assertEqual(funcsigs.formatannotation(int), + "int") + + def test_user_type (self): + class dummy (object): pass + self.assertEqual(funcsigs.formatannotation(dummy), + "tests.test_formatannotation.dummy") diff --git a/third_party/python/funcsigs/tests/test_funcsigs.py b/third_party/python/funcsigs/tests/test_funcsigs.py new file mode 100644 index 000000000000..a7b9cca7679f --- /dev/null +++ b/third_party/python/funcsigs/tests/test_funcsigs.py @@ -0,0 +1,91 @@ +import unittest2 as unittest + +import doctest +import sys + +import funcsigs as inspect + + +class TestFunctionSignatures(unittest.TestCase): + + @staticmethod + def signature(func): + sig = inspect.signature(func) + return (tuple((param.name, + (Ellipsis if param.default is param.empty else param.default), + (Ellipsis if param.annotation is param.empty + else param.annotation), + str(param.kind).lower()) + for param in sig.parameters.values()), + (Ellipsis if sig.return_annotation is sig.empty + else sig.return_annotation)) + + def test_zero_arguments(self): + def test(): + pass + self.assertEqual(self.signature(test), + ((), Ellipsis)) + + def test_single_positional_argument(self): + def test(a): + pass + self.assertEqual(self.signature(test), + (((('a', Ellipsis, Ellipsis, "positional_or_keyword")),), Ellipsis)) + + def test_single_keyword_argument(self): + def test(a=None): + pass + self.assertEqual(self.signature(test), + (((('a', None, Ellipsis, "positional_or_keyword")),), Ellipsis)) + + def test_var_args(self): + def test(*args): + pass + self.assertEqual(self.signature(test), + (((('args', Ellipsis, Ellipsis, "var_positional")),), Ellipsis)) + + def test_keywords_args(self): + def test(**kwargs): + pass + self.assertEqual(self.signature(test), + (((('kwargs', Ellipsis, Ellipsis, "var_keyword")),), Ellipsis)) + + def test_multiple_arguments(self): + def test(a, b=None, *args, **kwargs): + pass + self.assertEqual(self.signature(test), (( + ('a', Ellipsis, Ellipsis, "positional_or_keyword"), + ('b', None, Ellipsis, "positional_or_keyword"), + ('args', Ellipsis, Ellipsis, "var_positional"), + ('kwargs', Ellipsis, Ellipsis, "var_keyword"), + ), Ellipsis)) + + def test_has_version(self): + self.assertTrue(inspect.__version__) + + def test_readme(self): + # XXX: This fails but doesn't fail the build. + # (and the syntax isn't valid on all pythons so that seems a little + # hard to get right. + doctest.testfile('../README.rst') + + def test_unbound_method(self): + self_kind = "positional_or_keyword" + class Test(object): + def method(self): + pass + def method_with_args(self, a): + pass + def method_with_varargs(*args): + pass + self.assertEqual( + self.signature(Test.method), + (((('self', Ellipsis, Ellipsis, self_kind)),), Ellipsis)) + self.assertEqual( + self.signature(Test.method_with_args), + ((('self', Ellipsis, Ellipsis, self_kind), + ('a', Ellipsis, Ellipsis, "positional_or_keyword"), + ), Ellipsis)) + self.assertEqual( + self.signature(Test.method_with_varargs), + ((('args', Ellipsis, Ellipsis, "var_positional"),), Ellipsis)) diff --git a/third_party/python/funcsigs/tests/test_inspect.py b/third_party/python/funcsigs/tests/test_inspect.py new file mode 100644 index 000000000000..98d6592fcc72 --- /dev/null +++ b/third_party/python/funcsigs/tests/test_inspect.py @@ -0,0 +1,1002 @@ +# Copyright 2001-2013 Python Software Foundation; All Rights Reserved +from __future__ import absolute_import, division, print_function +import collections +import functools +import sys + +import unittest2 as unittest + +import funcsigs as inspect + + +class TestSignatureObject(unittest.TestCase): + @staticmethod + def signature(func): + sig = inspect.signature(func) + return (tuple((param.name, + (Ellipsis if param.default is param.empty else param.default), + (Ellipsis if param.annotation is param.empty + else param.annotation), + str(param.kind).lower()) + for param in sig.parameters.values()), + (Ellipsis if sig.return_annotation is sig.empty + else sig.return_annotation)) + + if sys.version_info[0] > 2: + exec(""" +def test_signature_object(self): + S = inspect.Signature + P = inspect.Parameter + + self.assertEqual(str(S()), '()') + + def test(po, pk, *args, ko, **kwargs): + pass + sig = inspect.signature(test) + po = sig.parameters['po'].replace(kind=P.POSITIONAL_ONLY) + pk = sig.parameters['pk'] + args = sig.parameters['args'] + ko = sig.parameters['ko'] + kwargs = sig.parameters['kwargs'] + + S((po, pk, args, ko, kwargs)) + + with self.assertRaisesRegex(ValueError, 'wrong parameter order'): + S((pk, po, args, ko, kwargs)) + + with self.assertRaisesRegex(ValueError, 'wrong parameter order'): + S((po, args, pk, ko, kwargs)) + + with self.assertRaisesRegex(ValueError, 'wrong parameter order'): + S((args, po, pk, ko, kwargs)) + + with self.assertRaisesRegex(ValueError, 'wrong parameter order'): + S((po, pk, args, kwargs, ko)) + + kwargs2 = kwargs.replace(name='args') + with self.assertRaisesRegex(ValueError, 'duplicate parameter name'): + S((po, pk, args, kwargs2, ko)) +""") + + def test_signature_immutability(self): + def test(a): + pass + sig = inspect.signature(test) + + with self.assertRaises(AttributeError): + sig.foo = 'bar' + + # Python2 does not have MappingProxyType class + if sys.version_info[:2] < (3, 3): + return + + with self.assertRaises(TypeError): + sig.parameters['a'] = None + + def test_signature_on_noarg(self): + def test(): + pass + self.assertEqual(self.signature(test), ((), Ellipsis)) + + if sys.version_info[0] > 2: + exec(""" +def test_signature_on_wargs(self): + def test(a, b:'foo') -> 123: + pass + self.assertEqual(self.signature(test), + ((('a', Ellipsis, Ellipsis, "positional_or_keyword"), + ('b', Ellipsis, 'foo', "positional_or_keyword")), + 123)) +""") + + if sys.version_info[0] > 2: + exec(""" +def test_signature_on_wkwonly(self): + def test(*, a:float, b:str) -> int: + pass + self.assertEqual(self.signature(test), + ((('a', Ellipsis, float, "keyword_only"), + ('b', Ellipsis, str, "keyword_only")), + int)) +""") + + if sys.version_info[0] > 2: + exec(""" +def test_signature_on_complex_args(self): + def test(a, b:'foo'=10, *args:'bar', spam:'baz', ham=123, **kwargs:int): + pass + self.assertEqual(self.signature(test), + ((('a', Ellipsis, Ellipsis, "positional_or_keyword"), + ('b', 10, 'foo', "positional_or_keyword"), + ('args', Ellipsis, 'bar', "var_positional"), + ('spam', Ellipsis, 'baz', "keyword_only"), + ('ham', 123, Ellipsis, "keyword_only"), + ('kwargs', Ellipsis, int, "var_keyword")), + Ellipsis)) +""") + + def test_signature_on_builtin_function(self): + with self.assertRaisesRegex(ValueError, 'not supported by signature'): + inspect.signature(type) + with self.assertRaisesRegex(ValueError, 'not supported by signature'): + # support for 'wrapper_descriptor' + inspect.signature(type.__call__) + if hasattr(sys, 'pypy_version_info'): + raise ValueError('not supported by signature') + with self.assertRaisesRegex(ValueError, 'not supported by signature'): + # support for 'method-wrapper' + inspect.signature(min.__call__) + if hasattr(sys, 'pypy_version_info'): + raise ValueError('not supported by signature') + with self.assertRaisesRegex(ValueError, + 'no signature found for builtin function'): + # support for 'method-wrapper' + inspect.signature(min) + + def test_signature_on_non_function(self): + with self.assertRaisesRegex(TypeError, 'is not a callable object'): + inspect.signature(42) + + with self.assertRaisesRegex(TypeError, 'is not a Python function'): + inspect.Signature.from_function(42) + + if sys.version_info[0] > 2: + exec(""" +def test_signature_on_method(self): + class Test: + def foo(self, arg1, arg2=1) -> int: + pass + + meth = Test().foo + + self.assertEqual(self.signature(meth), + ((('arg1', Ellipsis, Ellipsis, "positional_or_keyword"), + ('arg2', 1, Ellipsis, "positional_or_keyword")), + int)) +""") + + if sys.version_info[0] > 2: + exec(""" +def test_signature_on_classmethod(self): + class Test: + @classmethod + def foo(cls, arg1, *, arg2=1): + pass + + meth = Test().foo + self.assertEqual(self.signature(meth), + ((('arg1', Ellipsis, Ellipsis, "positional_or_keyword"), + ('arg2', 1, Ellipsis, "keyword_only")), + Ellipsis)) + + meth = Test.foo + self.assertEqual(self.signature(meth), + ((('arg1', Ellipsis, Ellipsis, "positional_or_keyword"), + ('arg2', 1, Ellipsis, "keyword_only")), + Ellipsis)) +""") + + if sys.version_info[0] > 2: + exec(""" +def test_signature_on_staticmethod(self): + class Test: + @staticmethod + def foo(cls, *, arg): + pass + + meth = Test().foo + self.assertEqual(self.signature(meth), + ((('cls', Ellipsis, Ellipsis, "positional_or_keyword"), + ('arg', Ellipsis, Ellipsis, "keyword_only")), + Ellipsis)) + + meth = Test.foo + self.assertEqual(self.signature(meth), + ((('cls', Ellipsis, Ellipsis, "positional_or_keyword"), + ('arg', Ellipsis, Ellipsis, "keyword_only")), + Ellipsis)) +""") + + if sys.version_info[0] > 2: + exec(""" +def test_signature_on_partial(self): + from functools import partial + + def test(): + pass + + self.assertEqual(self.signature(partial(test)), ((), Ellipsis)) + + with self.assertRaisesRegex(ValueError, "has incorrect arguments"): + inspect.signature(partial(test, 1)) + + with self.assertRaisesRegex(ValueError, "has incorrect arguments"): + inspect.signature(partial(test, a=1)) + + def test(a, b, *, c, d): + pass + + self.assertEqual(self.signature(partial(test)), + ((('a', Ellipsis, Ellipsis, "positional_or_keyword"), + ('b', Ellipsis, Ellipsis, "positional_or_keyword"), + ('c', Ellipsis, Ellipsis, "keyword_only"), + ('d', Ellipsis, Ellipsis, "keyword_only")), + Ellipsis)) + + self.assertEqual(self.signature(partial(test, 1)), + ((('b', Ellipsis, Ellipsis, "positional_or_keyword"), + ('c', Ellipsis, Ellipsis, "keyword_only"), + ('d', Ellipsis, Ellipsis, "keyword_only")), + Ellipsis)) + + self.assertEqual(self.signature(partial(test, 1, c=2)), + ((('b', Ellipsis, Ellipsis, "positional_or_keyword"), + ('c', 2, Ellipsis, "keyword_only"), + ('d', Ellipsis, Ellipsis, "keyword_only")), + Ellipsis)) + + self.assertEqual(self.signature(partial(test, b=1, c=2)), + ((('a', Ellipsis, Ellipsis, "positional_or_keyword"), + ('b', 1, Ellipsis, "positional_or_keyword"), + ('c', 2, Ellipsis, "keyword_only"), + ('d', Ellipsis, Ellipsis, "keyword_only")), + Ellipsis)) + + self.assertEqual(self.signature(partial(test, 0, b=1, c=2)), + ((('b', 1, Ellipsis, "positional_or_keyword"), + ('c', 2, Ellipsis, "keyword_only"), + ('d', Ellipsis, Ellipsis, "keyword_only"),), + Ellipsis)) + + def test(a, *args, b, **kwargs): + pass + + self.assertEqual(self.signature(partial(test, 1)), + ((('args', Ellipsis, Ellipsis, "var_positional"), + ('b', Ellipsis, Ellipsis, "keyword_only"), + ('kwargs', Ellipsis, Ellipsis, "var_keyword")), + Ellipsis)) + + self.assertEqual(self.signature(partial(test, 1, 2, 3)), + ((('args', Ellipsis, Ellipsis, "var_positional"), + ('b', Ellipsis, Ellipsis, "keyword_only"), + ('kwargs', Ellipsis, Ellipsis, "var_keyword")), + Ellipsis)) + + + self.assertEqual(self.signature(partial(test, 1, 2, 3, test=True)), + ((('args', Ellipsis, Ellipsis, "var_positional"), + ('b', Ellipsis, Ellipsis, "keyword_only"), + ('kwargs', Ellipsis, Ellipsis, "var_keyword")), + Ellipsis)) + + self.assertEqual(self.signature(partial(test, 1, 2, 3, test=1, b=0)), + ((('args', Ellipsis, Ellipsis, "var_positional"), + ('b', 0, Ellipsis, "keyword_only"), + ('kwargs', Ellipsis, Ellipsis, "var_keyword")), + Ellipsis)) + + self.assertEqual(self.signature(partial(test, b=0)), + ((('a', Ellipsis, Ellipsis, "positional_or_keyword"), + ('args', Ellipsis, Ellipsis, "var_positional"), + ('b', 0, Ellipsis, "keyword_only"), + ('kwargs', Ellipsis, Ellipsis, "var_keyword")), + Ellipsis)) + + self.assertEqual(self.signature(partial(test, b=0, test=1)), + ((('a', Ellipsis, Ellipsis, "positional_or_keyword"), + ('args', Ellipsis, Ellipsis, "var_positional"), + ('b', 0, Ellipsis, "keyword_only"), + ('kwargs', Ellipsis, Ellipsis, "var_keyword")), + Ellipsis)) + + def test(a, b, c:int) -> 42: + pass + + sig = test.__signature__ = inspect.signature(test) + + self.assertEqual(self.signature(partial(partial(test, 1))), + ((('b', Ellipsis, Ellipsis, "positional_or_keyword"), + ('c', Ellipsis, int, "positional_or_keyword")), + 42)) + + self.assertEqual(self.signature(partial(partial(test, 1), 2)), + ((('c', Ellipsis, int, "positional_or_keyword"),), + 42)) + + psig = inspect.signature(partial(partial(test, 1), 2)) + + def foo(a): + return a + _foo = partial(partial(foo, a=10), a=20) + self.assertEqual(self.signature(_foo), + ((('a', 20, Ellipsis, "positional_or_keyword"),), + Ellipsis)) + # check that we don't have any side-effects in signature(), + # and the partial object is still functioning + self.assertEqual(_foo(), 20) + + def foo(a, b, c): + return a, b, c + _foo = partial(partial(foo, 1, b=20), b=30) + self.assertEqual(self.signature(_foo), + ((('b', 30, Ellipsis, "positional_or_keyword"), + ('c', Ellipsis, Ellipsis, "positional_or_keyword")), + Ellipsis)) + self.assertEqual(_foo(c=10), (1, 30, 10)) + _foo = partial(_foo, 2) # now 'b' has two values - + # positional and keyword + with self.assertRaisesRegex(ValueError, "has incorrect arguments"): + inspect.signature(_foo) + + def foo(a, b, c, *, d): + return a, b, c, d + _foo = partial(partial(foo, d=20, c=20), b=10, d=30) + self.assertEqual(self.signature(_foo), + ((('a', Ellipsis, Ellipsis, "positional_or_keyword"), + ('b', 10, Ellipsis, "positional_or_keyword"), + ('c', 20, Ellipsis, "positional_or_keyword"), + ('d', 30, Ellipsis, "keyword_only")), + Ellipsis)) + ba = inspect.signature(_foo).bind(a=200, b=11) + self.assertEqual(_foo(*ba.args, **ba.kwargs), (200, 11, 20, 30)) + + def foo(a=1, b=2, c=3): + return a, b, c + _foo = partial(foo, a=10, c=13) + ba = inspect.signature(_foo).bind(11) + self.assertEqual(_foo(*ba.args, **ba.kwargs), (11, 2, 13)) + ba = inspect.signature(_foo).bind(11, 12) + self.assertEqual(_foo(*ba.args, **ba.kwargs), (11, 12, 13)) + ba = inspect.signature(_foo).bind(11, b=12) + self.assertEqual(_foo(*ba.args, **ba.kwargs), (11, 12, 13)) + ba = inspect.signature(_foo).bind(b=12) + self.assertEqual(_foo(*ba.args, **ba.kwargs), (10, 12, 13)) + _foo = partial(_foo, b=10) + ba = inspect.signature(_foo).bind(12, 14) + self.assertEqual(_foo(*ba.args, **ba.kwargs), (12, 14, 13)) +""") + + if sys.version_info[0] > 2: + exec(""" +def test_signature_on_decorated(self): + import functools + + def decorator(func): + @functools.wraps(func) + def wrapper(*args, **kwargs) -> int: + return func(*args, **kwargs) + return wrapper + + class Foo: + @decorator + def bar(self, a, b): + pass + + self.assertEqual(self.signature(Foo.bar), + ((('self', Ellipsis, Ellipsis, "positional_or_keyword"), + ('a', Ellipsis, Ellipsis, "positional_or_keyword"), + ('b', Ellipsis, Ellipsis, "positional_or_keyword")), + Ellipsis)) + + self.assertEqual(self.signature(Foo().bar), + ((('a', Ellipsis, Ellipsis, "positional_or_keyword"), + ('b', Ellipsis, Ellipsis, "positional_or_keyword")), + Ellipsis)) + + # Test that we handle method wrappers correctly + def decorator(func): + @functools.wraps(func) + def wrapper(*args, **kwargs) -> int: + return func(42, *args, **kwargs) + sig = inspect.signature(func) + new_params = tuple(sig.parameters.values())[1:] + wrapper.__signature__ = sig.replace(parameters=new_params) + return wrapper + + class Foo: + @decorator + def __call__(self, a, b): + pass + + self.assertEqual(self.signature(Foo.__call__), + ((('a', Ellipsis, Ellipsis, "positional_or_keyword"), + ('b', Ellipsis, Ellipsis, "positional_or_keyword")), + Ellipsis)) + + self.assertEqual(self.signature(Foo().__call__), + ((('b', Ellipsis, Ellipsis, "positional_or_keyword"),), + Ellipsis)) +""") + + if sys.version_info[0] > 2: + exec(""" +def test_signature_on_class(self): + class C: + def __init__(self, a): + pass + + self.assertEqual(self.signature(C), + ((('a', Ellipsis, Ellipsis, "positional_or_keyword"),), + Ellipsis)) + + class CM(type): + def __call__(cls, a): + pass + class C(metaclass=CM): + def __init__(self, b): + pass + + self.assertEqual(self.signature(C), + ((('a', Ellipsis, Ellipsis, "positional_or_keyword"),), + Ellipsis)) + + class CM(type): + def __new__(mcls, name, bases, dct, *, foo=1): + return super().__new__(mcls, name, bases, dct) + class C(metaclass=CM): + def __init__(self, b): + pass + + self.assertEqual(self.signature(C), + ((('b', Ellipsis, Ellipsis, "positional_or_keyword"),), + Ellipsis)) + + self.assertEqual(self.signature(CM), + ((('name', Ellipsis, Ellipsis, "positional_or_keyword"), + ('bases', Ellipsis, Ellipsis, "positional_or_keyword"), + ('dct', Ellipsis, Ellipsis, "positional_or_keyword"), + ('foo', 1, Ellipsis, "keyword_only")), + Ellipsis)) + + class CMM(type): + def __new__(mcls, name, bases, dct, *, foo=1): + return super().__new__(mcls, name, bases, dct) + def __call__(cls, nm, bs, dt): + return type(nm, bs, dt) + class CM(type, metaclass=CMM): + def __new__(mcls, name, bases, dct, *, bar=2): + return super().__new__(mcls, name, bases, dct) + class C(metaclass=CM): + def __init__(self, b): + pass + + self.assertEqual(self.signature(CMM), + ((('name', Ellipsis, Ellipsis, "positional_or_keyword"), + ('bases', Ellipsis, Ellipsis, "positional_or_keyword"), + ('dct', Ellipsis, Ellipsis, "positional_or_keyword"), + ('foo', 1, Ellipsis, "keyword_only")), + Ellipsis)) + + self.assertEqual(self.signature(CM), + ((('nm', Ellipsis, Ellipsis, "positional_or_keyword"), + ('bs', Ellipsis, Ellipsis, "positional_or_keyword"), + ('dt', Ellipsis, Ellipsis, "positional_or_keyword")), + Ellipsis)) + + self.assertEqual(self.signature(C), + ((('b', Ellipsis, Ellipsis, "positional_or_keyword"),), + Ellipsis)) + + class CM(type): + def __init__(cls, name, bases, dct, *, bar=2): + return super().__init__(name, bases, dct) + class C(metaclass=CM): + def __init__(self, b): + pass + + self.assertEqual(self.signature(CM), + ((('name', Ellipsis, Ellipsis, "positional_or_keyword"), + ('bases', Ellipsis, Ellipsis, "positional_or_keyword"), + ('dct', Ellipsis, Ellipsis, "positional_or_keyword"), + ('bar', 2, Ellipsis, "keyword_only")), + Ellipsis)) +""") + + def test_signature_on_callable_objects(self): + class Foo(object): + def __call__(self, a): + pass + + self.assertEqual(self.signature(Foo()), + ((('a', Ellipsis, Ellipsis, "positional_or_keyword"),), + Ellipsis)) + + class Spam(object): + pass + with self.assertRaisesRegex(TypeError, "is not a callable object"): + inspect.signature(Spam()) + + class Bar(Spam, Foo): + pass + + self.assertEqual(self.signature(Bar()), + ((('a', Ellipsis, Ellipsis, "positional_or_keyword"),), + Ellipsis)) + + class ToFail(object): + __call__ = type + with self.assertRaisesRegex(ValueError, "not supported by signature"): + inspect.signature(ToFail()) + + if sys.version_info[0] < 3: + return + + class Wrapped(object): + pass + Wrapped.__wrapped__ = lambda a: None + self.assertEqual(self.signature(Wrapped), + ((('a', Ellipsis, Ellipsis, "positional_or_keyword"),), + Ellipsis)) + + def test_signature_on_lambdas(self): + self.assertEqual(self.signature((lambda a=10: a)), + ((('a', 10, Ellipsis, "positional_or_keyword"),), + Ellipsis)) + + if sys.version_info[0] > 2: + exec(""" +def test_signature_equality(self): + def foo(a, *, b:int) -> float: pass + self.assertNotEqual(inspect.signature(foo), 42) + + def bar(a, *, b:int) -> float: pass + self.assertEqual(inspect.signature(foo), inspect.signature(bar)) + + def bar(a, *, b:int) -> int: pass + self.assertNotEqual(inspect.signature(foo), inspect.signature(bar)) + + def bar(a, *, b:int): pass + self.assertNotEqual(inspect.signature(foo), inspect.signature(bar)) + + def bar(a, *, b:int=42) -> float: pass + self.assertNotEqual(inspect.signature(foo), inspect.signature(bar)) + + def bar(a, *, c) -> float: pass + self.assertNotEqual(inspect.signature(foo), inspect.signature(bar)) + + def bar(a, b:int) -> float: pass + self.assertNotEqual(inspect.signature(foo), inspect.signature(bar)) + def spam(b:int, a) -> float: pass + self.assertNotEqual(inspect.signature(spam), inspect.signature(bar)) + + def foo(*, a, b, c): pass + def bar(*, c, b, a): pass + self.assertEqual(inspect.signature(foo), inspect.signature(bar)) + + def foo(*, a=1, b, c): pass + def bar(*, c, b, a=1): pass + self.assertEqual(inspect.signature(foo), inspect.signature(bar)) + + def foo(pos, *, a=1, b, c): pass + def bar(pos, *, c, b, a=1): pass + self.assertEqual(inspect.signature(foo), inspect.signature(bar)) + + def foo(pos, *, a, b, c): pass + def bar(pos, *, c, b, a=1): pass + self.assertNotEqual(inspect.signature(foo), inspect.signature(bar)) + + def foo(pos, *args, a=42, b, c, **kwargs:int): pass + def bar(pos, *args, c, b, a=42, **kwargs:int): pass + self.assertEqual(inspect.signature(foo), inspect.signature(bar)) +""") + + def test_signature_unhashable(self): + def foo(a): pass + sig = inspect.signature(foo) + with self.assertRaisesRegex(TypeError, 'unhashable type'): + hash(sig) + + + if sys.version_info[0] > 2: + exec(""" +def test_signature_str(self): + def foo(a:int=1, *, b, c=None, **kwargs) -> 42: + pass + self.assertEqual(str(inspect.signature(foo)), + '(a:int=1, *, b, c=None, **kwargs) -> 42') + + def foo(a:int=1, *args, b, c=None, **kwargs) -> 42: + pass + self.assertEqual(str(inspect.signature(foo)), + '(a:int=1, *args, b, c=None, **kwargs) -> 42') + + def foo(): + pass + self.assertEqual(str(inspect.signature(foo)), '()') +""") + + if sys.version_info[0] > 2: + exec(""" +def test_signature_str_positional_only(self): + P = inspect.Parameter + + def test(a_po, *, b, **kwargs): + return a_po, kwargs + + sig = inspect.signature(test) + new_params = list(sig.parameters.values()) + new_params[0] = new_params[0].replace(kind=P.POSITIONAL_ONLY) + test.__signature__ = sig.replace(parameters=new_params) + + self.assertEqual(str(inspect.signature(test)), + '(, *, b, **kwargs)') + + sig = inspect.signature(test) + new_params = list(sig.parameters.values()) + new_params[0] = new_params[0].replace(name=None) + test.__signature__ = sig.replace(parameters=new_params) + self.assertEqual(str(inspect.signature(test)), + '(<0>, *, b, **kwargs)') +""") + + if sys.version_info[0] > 2: + exec(""" +def test_signature_replace_anno(self): + def test() -> 42: + pass + + sig = inspect.signature(test) + sig = sig.replace(return_annotation=None) + self.assertIs(sig.return_annotation, None) + sig = sig.replace(return_annotation=sig.empty) + self.assertIs(sig.return_annotation, sig.empty) + sig = sig.replace(return_annotation=42) + self.assertEqual(sig.return_annotation, 42) + self.assertEqual(sig, inspect.signature(test)) +""") + + +class TestParameterObject(unittest.TestCase): + + def test_signature_parameter_kinds(self): + P = inspect.Parameter + self.assertTrue(P.POSITIONAL_ONLY < P.POSITIONAL_OR_KEYWORD < \ + P.VAR_POSITIONAL < P.KEYWORD_ONLY < P.VAR_KEYWORD) + + self.assertEqual(str(P.POSITIONAL_ONLY), 'POSITIONAL_ONLY') + self.assertTrue('POSITIONAL_ONLY' in repr(P.POSITIONAL_ONLY)) + + def test_signature_parameter_object(self): + p = inspect.Parameter('foo', default=10, + kind=inspect.Parameter.POSITIONAL_ONLY) + self.assertEqual(p.name, 'foo') + self.assertEqual(p.default, 10) + self.assertIs(p.annotation, p.empty) + self.assertEqual(p.kind, inspect.Parameter.POSITIONAL_ONLY) + + with self.assertRaisesRegex(ValueError, 'invalid value'): + inspect.Parameter('foo', default=10, kind='123') + + with self.assertRaisesRegex(ValueError, 'not a valid parameter name'): + inspect.Parameter('1', kind=inspect.Parameter.VAR_KEYWORD) + + with self.assertRaisesRegex(ValueError, + 'non-positional-only parameter'): + inspect.Parameter(None, kind=inspect.Parameter.VAR_KEYWORD) + + with self.assertRaisesRegex(ValueError, 'cannot have default values'): + inspect.Parameter('a', default=42, + kind=inspect.Parameter.VAR_KEYWORD) + + with self.assertRaisesRegex(ValueError, 'cannot have default values'): + inspect.Parameter('a', default=42, + kind=inspect.Parameter.VAR_POSITIONAL) + + p = inspect.Parameter('a', default=42, + kind=inspect.Parameter.POSITIONAL_OR_KEYWORD) + with self.assertRaisesRegex(ValueError, 'cannot have default values'): + p.replace(kind=inspect.Parameter.VAR_POSITIONAL) + + self.assertTrue(repr(p).startswith('') + + p = p.replace(name='1') + self.assertEqual(str(p), '<1>') + + def test_signature_parameter_immutability(self): + p = inspect.Parameter(None, kind=inspect.Parameter.POSITIONAL_ONLY) + + with self.assertRaises(AttributeError): + p.foo = 'bar' + + with self.assertRaises(AttributeError): + p.kind = 123 + + +class TestSignatureBind(unittest.TestCase): + @staticmethod + def call(func, *args, **kwargs): + sig = inspect.signature(func) + ba = sig.bind(*args, **kwargs) + return func(*ba.args, **ba.kwargs) + + def test_signature_bind_empty(self): + def test(): + return 42 + + self.assertEqual(self.call(test), 42) + with self.assertRaisesRegex(TypeError, 'too many positional arguments'): + self.call(test, 1) + with self.assertRaisesRegex(TypeError, 'too many positional arguments'): + self.call(test, 1, spam=10) + with self.assertRaisesRegex(TypeError, 'too many keyword arguments'): + self.call(test, spam=1) + + def test_signature_bind_var(self): + def test(*args, **kwargs): + return args, kwargs + + self.assertEqual(self.call(test), ((), {})) + self.assertEqual(self.call(test, 1), ((1,), {})) + self.assertEqual(self.call(test, 1, 2), ((1, 2), {})) + self.assertEqual(self.call(test, foo='bar'), ((), {'foo': 'bar'})) + self.assertEqual(self.call(test, 1, foo='bar'), ((1,), {'foo': 'bar'})) + self.assertEqual(self.call(test, args=10), ((), {'args': 10})) + self.assertEqual(self.call(test, 1, 2, foo='bar'), + ((1, 2), {'foo': 'bar'})) + + def test_signature_bind_just_args(self): + def test(a, b, c): + return a, b, c + + self.assertEqual(self.call(test, 1, 2, 3), (1, 2, 3)) + + with self.assertRaisesRegex(TypeError, 'too many positional arguments'): + self.call(test, 1, 2, 3, 4) + + with self.assertRaisesRegex(TypeError, "'b' parameter lacking default"): + self.call(test, 1) + + with self.assertRaisesRegex(TypeError, "'a' parameter lacking default"): + self.call(test) + + def test(a, b, c=10): + return a, b, c + self.assertEqual(self.call(test, 1, 2, 3), (1, 2, 3)) + self.assertEqual(self.call(test, 1, 2), (1, 2, 10)) + + def test(a=1, b=2, c=3): + return a, b, c + self.assertEqual(self.call(test, a=10, c=13), (10, 2, 13)) + self.assertEqual(self.call(test, a=10), (10, 2, 3)) + self.assertEqual(self.call(test, b=10), (1, 10, 3)) + + def test_signature_bind_varargs_order(self): + def test(*args): + return args + + self.assertEqual(self.call(test), ()) + self.assertEqual(self.call(test, 1, 2, 3), (1, 2, 3)) + + def test_signature_bind_args_and_varargs(self): + def test(a, b, c=3, *args): + return a, b, c, args + + self.assertEqual(self.call(test, 1, 2, 3, 4, 5), (1, 2, 3, (4, 5))) + self.assertEqual(self.call(test, 1, 2), (1, 2, 3, ())) + self.assertEqual(self.call(test, b=1, a=2), (2, 1, 3, ())) + self.assertEqual(self.call(test, 1, b=2), (1, 2, 3, ())) + + with self.assertRaisesRegex(TypeError, + "multiple values for argument 'c'"): + self.call(test, 1, 2, 3, c=4) + + def test_signature_bind_just_kwargs(self): + def test(**kwargs): + return kwargs + + self.assertEqual(self.call(test), {}) + self.assertEqual(self.call(test, foo='bar', spam='ham'), + {'foo': 'bar', 'spam': 'ham'}) + + def test_signature_bind_args_and_kwargs(self): + def test(a, b, c=3, **kwargs): + return a, b, c, kwargs + + self.assertEqual(self.call(test, 1, 2), (1, 2, 3, {})) + self.assertEqual(self.call(test, 1, 2, foo='bar', spam='ham'), + (1, 2, 3, {'foo': 'bar', 'spam': 'ham'})) + self.assertEqual(self.call(test, b=2, a=1, foo='bar', spam='ham'), + (1, 2, 3, {'foo': 'bar', 'spam': 'ham'})) + self.assertEqual(self.call(test, a=1, b=2, foo='bar', spam='ham'), + (1, 2, 3, {'foo': 'bar', 'spam': 'ham'})) + self.assertEqual(self.call(test, 1, b=2, foo='bar', spam='ham'), + (1, 2, 3, {'foo': 'bar', 'spam': 'ham'})) + self.assertEqual(self.call(test, 1, b=2, c=4, foo='bar', spam='ham'), + (1, 2, 4, {'foo': 'bar', 'spam': 'ham'})) + self.assertEqual(self.call(test, 1, 2, 4, foo='bar'), + (1, 2, 4, {'foo': 'bar'})) + self.assertEqual(self.call(test, c=5, a=4, b=3), + (4, 3, 5, {})) + + if sys.version_info[0] > 2: + exec(""" +def test_signature_bind_kwonly(self): + def test(*, foo): + return foo + with self.assertRaisesRegex(TypeError, + 'too many positional arguments'): + self.call(test, 1) + self.assertEqual(self.call(test, foo=1), 1) + + def test(a, *, foo=1, bar): + return foo + with self.assertRaisesRegex(TypeError, + "'bar' parameter lacking default value"): + self.call(test, 1) + + def test(foo, *, bar): + return foo, bar + self.assertEqual(self.call(test, 1, bar=2), (1, 2)) + self.assertEqual(self.call(test, bar=2, foo=1), (1, 2)) + + with self.assertRaisesRegex(TypeError, + 'too many keyword arguments'): + self.call(test, bar=2, foo=1, spam=10) + + with self.assertRaisesRegex(TypeError, + 'too many positional arguments'): + self.call(test, 1, 2) + + with self.assertRaisesRegex(TypeError, + 'too many positional arguments'): + self.call(test, 1, 2, bar=2) + + with self.assertRaisesRegex(TypeError, + 'too many keyword arguments'): + self.call(test, 1, bar=2, spam='ham') + + with self.assertRaisesRegex(TypeError, + "'bar' parameter lacking default value"): + self.call(test, 1) + + def test(foo, *, bar, **bin): + return foo, bar, bin + self.assertEqual(self.call(test, 1, bar=2), (1, 2, {})) + self.assertEqual(self.call(test, foo=1, bar=2), (1, 2, {})) + self.assertEqual(self.call(test, 1, bar=2, spam='ham'), + (1, 2, {'spam': 'ham'})) + self.assertEqual(self.call(test, spam='ham', foo=1, bar=2), + (1, 2, {'spam': 'ham'})) + with self.assertRaisesRegex(TypeError, + "'foo' parameter lacking default value"): + self.call(test, spam='ham', bar=2) + self.assertEqual(self.call(test, 1, bar=2, bin=1, spam=10), + (1, 2, {'bin': 1, 'spam': 10})) +""") +# + if sys.version_info[0] > 2: + exec(""" +def test_signature_bind_arguments(self): + def test(a, *args, b, z=100, **kwargs): + pass + sig = inspect.signature(test) + ba = sig.bind(10, 20, b=30, c=40, args=50, kwargs=60) + # we won't have 'z' argument in the bound arguments object, as we didn't + # pass it to the 'bind' + self.assertEqual(tuple(ba.arguments.items()), + (('a', 10), ('args', (20,)), ('b', 30), + ('kwargs', {'c': 40, 'args': 50, 'kwargs': 60}))) + self.assertEqual(ba.kwargs, + {'b': 30, 'c': 40, 'args': 50, 'kwargs': 60}) + self.assertEqual(ba.args, (10, 20)) +""") +# + if sys.version_info[0] > 2: + exec(""" +def test_signature_bind_positional_only(self): + P = inspect.Parameter + + def test(a_po, b_po, c_po=3, foo=42, *, bar=50, **kwargs): + return a_po, b_po, c_po, foo, bar, kwargs + + sig = inspect.signature(test) + new_params = collections.OrderedDict(tuple(sig.parameters.items())) + for name in ('a_po', 'b_po', 'c_po'): + new_params[name] = new_params[name].replace(kind=P.POSITIONAL_ONLY) + new_sig = sig.replace(parameters=new_params.values()) + test.__signature__ = new_sig + + self.assertEqual(self.call(test, 1, 2, 4, 5, bar=6), + (1, 2, 4, 5, 6, {})) + + with self.assertRaisesRegex(TypeError, "parameter is positional only"): + self.call(test, 1, 2, c_po=4) + + with self.assertRaisesRegex(TypeError, "parameter is positional only"): + self.call(test, a_po=1, b_po=2) +""") + + def test_bind_self(self): + class F: + def f(a, self): + return a, self + an_f = F() + partial_f = functools.partial(F.f, an_f) + ba = inspect.signature(partial_f).bind(self=10) + self.assertEqual((an_f, 10), partial_f(*ba.args, **ba.kwargs)) + + +class TestBoundArguments(unittest.TestCase): + + def test_signature_bound_arguments_unhashable(self): + def foo(a): pass + ba = inspect.signature(foo).bind(1) + + with self.assertRaisesRegex(TypeError, 'unhashable type'): + hash(ba) + + def test_signature_bound_arguments_equality(self): + def foo(a): pass + ba = inspect.signature(foo).bind(1) + self.assertEqual(ba, ba) + + ba2 = inspect.signature(foo).bind(1) + self.assertEqual(ba, ba2) + + ba3 = inspect.signature(foo).bind(2) + self.assertNotEqual(ba, ba3) + ba3.arguments['a'] = 1 + self.assertEqual(ba, ba3) + + def bar(b): pass + ba4 = inspect.signature(bar).bind(1) + self.assertNotEqual(ba, ba4) diff --git a/third_party/python/idna-ssl/LICENSE b/third_party/python/idna-ssl/LICENSE deleted file mode 100644 index 13ff0bb0c757..000000000000 --- a/third_party/python/idna-ssl/LICENSE +++ /dev/null @@ -1,22 +0,0 @@ -The MIT License - -Copyright (c) 2018 aio-libs team https://github.com/aio-libs/ -Copyright (c) 2017 Ocean S. A. https://ocean.io/ - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/third_party/python/idna-ssl/MANIFEST.in b/third_party/python/idna-ssl/MANIFEST.in deleted file mode 100644 index e24206f7951c..000000000000 --- a/third_party/python/idna-ssl/MANIFEST.in +++ /dev/null @@ -1,4 +0,0 @@ -include README.rst -include LICENSE -recursive-exclude * __pycache__ -recursive-exclude * *.py[co] diff --git a/third_party/python/idna-ssl/PKG-INFO b/third_party/python/idna-ssl/PKG-INFO deleted file mode 100644 index 54ea22146d07..000000000000 --- a/third_party/python/idna-ssl/PKG-INFO +++ /dev/null @@ -1,81 +0,0 @@ -Metadata-Version: 1.1 -Name: idna-ssl -Version: 1.1.0 -Summary: Patch ssl.match_hostname for Unicode(idna) domains support -Home-page: https://github.com/aio-libs/idna-ssl -Author: Victor Kovtun -Author-email: hellysmile@gmail.com -License: UNKNOWN -Description: idna-ssl - ======== - - :info: Patch ssl.match_hostname for Unicode(idna) domains support - - .. image:: https://travis-ci.com/aio-libs/idna-ssl.svg?branch=master - :target: https://travis-ci.com/aio-libs/idna-ssl - - .. image:: https://img.shields.io/pypi/v/idna_ssl.svg - :target: https://pypi.python.org/pypi/idna_ssl - - .. image:: https://codecov.io/gh/aio-libs/idna-ssl/branch/master/graph/badge.svg - :target: https://codecov.io/gh/aio-libs/idna-ssl - - Installation - ------------ - - .. code-block:: shell - - pip install idna-ssl - - Usage - ----- - - .. code-block:: python - - from idna_ssl import patch_match_hostname # noqa isort:skip - patch_match_hostname() # noqa isort:skip - - import asyncio - - import aiohttp - - URL = 'https://цфоут.мвд.рф/news/item/8065038/' - - - async def main(): - async with aiohttp.ClientSession() as session: - async with session.get(URL) as response: - print(response) - - - loop = asyncio.get_event_loop() - loop.run_until_complete(main()) - - Motivation - ---------- - - * Here is 100% backward capability - * Related aiohttp `issue `_ - * Related Python `bug `_ - * Related Python `pull request `_ - * It is fixed (by January 27 2018) in upcoming Python 3.7, but `IDNA2008 `_ is still broken - - Thanks - ------ - - The library was donated by `Ocean S.A. `_ - - Thanks to the company for contribution. - -Keywords: ssl,Unicode,idna,match_hostname -Platform: UNKNOWN -Classifier: Development Status :: 4 - Beta -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 diff --git a/third_party/python/idna-ssl/README.rst b/third_party/python/idna-ssl/README.rst deleted file mode 100644 index 687e8b674a97..000000000000 --- a/third_party/python/idna-ssl/README.rst +++ /dev/null @@ -1,60 +0,0 @@ -idna-ssl -======== - -:info: Patch ssl.match_hostname for Unicode(idna) domains support - -.. image:: https://travis-ci.com/aio-libs/idna-ssl.svg?branch=master - :target: https://travis-ci.com/aio-libs/idna-ssl - -.. image:: https://img.shields.io/pypi/v/idna_ssl.svg - :target: https://pypi.python.org/pypi/idna_ssl - -.. image:: https://codecov.io/gh/aio-libs/idna-ssl/branch/master/graph/badge.svg - :target: https://codecov.io/gh/aio-libs/idna-ssl - -Installation ------------- - -.. code-block:: shell - - pip install idna-ssl - -Usage ------ - -.. code-block:: python - - from idna_ssl import patch_match_hostname # noqa isort:skip - patch_match_hostname() # noqa isort:skip - - import asyncio - - import aiohttp - - URL = 'https://цфоут.мвд.рф/news/item/8065038/' - - - async def main(): - async with aiohttp.ClientSession() as session: - async with session.get(URL) as response: - print(response) - - - loop = asyncio.get_event_loop() - loop.run_until_complete(main()) - -Motivation ----------- - -* Here is 100% backward capability -* Related aiohttp `issue `_ -* Related Python `bug `_ -* Related Python `pull request `_ -* It is fixed (by January 27 2018) in upcoming Python 3.7, but `IDNA2008 `_ is still broken - -Thanks ------- - -The library was donated by `Ocean S.A. `_ - -Thanks to the company for contribution. diff --git a/third_party/python/idna-ssl/idna_ssl.py b/third_party/python/idna-ssl/idna_ssl.py deleted file mode 100644 index 59db9bd469c6..000000000000 --- a/third_party/python/idna-ssl/idna_ssl.py +++ /dev/null @@ -1,40 +0,0 @@ -import ssl -import sys - -import idna - -__version__ = '1.1.0' - -real_match_hostname = ssl.match_hostname - -PY_370 = sys.version_info >= (3, 7, 0) - - -def patched_match_hostname(cert, hostname): - try: - hostname = idna.encode(hostname, uts46=True).decode('ascii') - except UnicodeError: - hostname = hostname.encode('idna').decode('ascii') - - return real_match_hostname(cert, hostname) - - -def patch_match_hostname(): - if PY_370: - return - - if hasattr(ssl.match_hostname, 'patched'): - return - - ssl.match_hostname = patched_match_hostname - ssl.match_hostname.patched = True - - -def reset_match_hostname(): - if PY_370: - return - - if not hasattr(ssl.match_hostname, 'patched'): - return - - ssl.match_hostname = real_match_hostname diff --git a/third_party/python/idna-ssl/setup.cfg b/third_party/python/idna-ssl/setup.cfg deleted file mode 100644 index 3eba006ab93b..000000000000 --- a/third_party/python/idna-ssl/setup.cfg +++ /dev/null @@ -1,15 +0,0 @@ -[coverage:run] -branch = True -omit = site-packages - -[isort] -known_third_party = aiohttp -known_first_party = idna_ssl - -[tool:pytest] -addopts = -s --keep-duplicates --cache-clear --verbose --no-cov-on-fail --cov=idna_ssl --cov-report=term --cov-report=html - -[egg_info] -tag_build = -tag_date = 0 - diff --git a/third_party/python/idna-ssl/setup.py b/third_party/python/idna-ssl/setup.py deleted file mode 100644 index da180db2c5b3..000000000000 --- a/third_party/python/idna-ssl/setup.py +++ /dev/null @@ -1,53 +0,0 @@ -import io -import os -import re -import sys - -from setuptools import setup - -needs_pytest = 'pytest' in set(sys.argv) - - -def get_version(): - regex = r"__version__\s=\s\'(?P[\d\.ab]+?)\'" - - path = ('idna_ssl.py',) - - return re.search(regex, read(*path)).group('version') - - -def read(*parts): - filename = os.path.join(os.path.abspath(os.path.dirname(__file__)), *parts) - - with io.open(filename, encoding='utf-8', mode='rt') as fp: - return fp.read() - - -setup( - name='idna-ssl', - version=get_version(), - author='Victor Kovtun', - author_email='hellysmile@gmail.com', - url='https://github.com/aio-libs/idna-ssl', - description='Patch ssl.match_hostname for Unicode(idna) domains support', - long_description=read('README.rst'), - setup_requires=['pytest-runner'] if needs_pytest else [], - tests_require=['pytest', 'pytest-asyncio', 'pytest-cov', 'aiohttp>2.3'], - py_modules=['idna_ssl'], - install_requires=['idna>=2.0'], - include_package_data=True, - zip_safe=False, - classifiers=[ - 'Development Status :: 4 - Beta', - 'Intended Audience :: Developers', - 'License :: OSI Approved :: MIT License', - 'Programming Language :: Python', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.3', - 'Programming Language :: Python :: 3.4', - 'Programming Language :: Python :: 3.5', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - ], - keywords=['ssl', 'Unicode', 'idna', 'match_hostname'], -) diff --git a/third_party/python/multidict/CHANGES.rst b/third_party/python/multidict/CHANGES.rst deleted file mode 100644 index c10b8c297a96..000000000000 --- a/third_party/python/multidict/CHANGES.rst +++ /dev/null @@ -1,255 +0,0 @@ -========= -Changelog -========= - -.. - You should *NOT* be adding new change log entries to this file, this - file is managed by towncrier. You *may* edit previous change logs to - fix problems like typo corrections or such. - To add a new change log entry, please see - https://pip.pypa.io/en/latest/development/#adding-a-news-entry - we named the news folder "changes". - - WARNING: Don't drop the next directive! - -.. towncrier release notes start - -5.1.0 (2020-12-03) -================== - -Features --------- - -- Support ``GenericAliases`` (``MultiDict[str]``) for Python 3.9+ - `#553 `_ - - -Bugfixes --------- - -- Synchronize the declared supported Python versions in ``setup.py`` with actually supported and tested ones. - `#552 `_ - - ----- - - -5.0.1 (2020-11-14) -================== - -Bugfixes --------- - -- Provide x86 Windows wheels - `#550 `_ - - ----- - - -5.0.0 (2020-10-12) -================== - -Features --------- - -- Provide wheels for ``aarch64``, ``i686``, ``ppc64le``, ``s390x`` architectures on Linux - as well as ``x86_64``. - `#500 `_ -- Provide wheels for Python 3.9. - `#534 `_ - -Removal -------- - -- Drop Python 3.5 support; Python 3.6 is the minimal supported Python version. - -Misc ----- - -- `#503 `_ - - ----- - - -4.7.6 (2020-05-15) -================== - -Bugfixes --------- - -- Fixed an issue with some versions of the ``wheel`` dist - failing because of being unable to detect the license file. - `#481 `_ - - ----- - - -4.7.5 (2020-02-21) -================== - -Bugfixes --------- - -- Fixed creating and updating of MultiDict from a sequence of pairs and keyword - arguments. Previously passing a list argument modified it inplace, and other sequences - caused an error. - `#457 `_ -- Fixed comparing with mapping: an exception raised in the - :py:func:`~object.__len__` method caused raising a SyntaxError. - `#459 `_ -- Fixed comparing with mapping: all exceptions raised in the - :py:func:`~object.__getitem__` method were silenced. - `#460 `_ - - ----- - - -4.7.4 (2020-01-11) -================== - -Bugfixes --------- - -- ``MultiDict.iter`` fix memory leak when used iterator over - :py:mod:`multidict` instance. - `#452 `_ - - ----- - - -4.7.3 (2019-12-30) -================== - -Features --------- - -- Implement ``__sizeof__`` function to correctly calculate all internal structures size. - `#444 `_ -- Expose ``getversion()`` function. - `#451 `_ - - -Bugfixes --------- - -- Fix crashes in ``popone``/``popall`` when default is returned. - `#450 `_ - - -Improved Documentation ----------------------- - -- Corrected the documentation for ``MultiDict.extend()`` - `#446 `_ - - ----- - - -4.7.2 (2019-12-20) -================== - -Bugfixes --------- - -- Fix crashing when multidict is used pyinstaller - `#432 `_ -- Fix typing for :py:meth:`CIMultiDict.copy` - `#434 `_ -- Fix memory leak in ``MultiDict.copy()`` - `#443 `_ - - ----- - - -4.7.1 (2019-12-12) -================== - -Bugfixes --------- - -- :py:meth:`CIMultiDictProxy.copy` return object type - :py:class:`multidict._multidict.CIMultiDict` - `#427 `_ -- Make :py:class:`CIMultiDict` subclassable again - `#416 `_ -- Fix regression, multidict can be constructed from arbitrary iterable of pairs again. - `#418 `_ -- :py:meth:`CIMultiDict.add` may be called with keyword arguments - `#421 `_ - - -Improved Documentation ----------------------- - -- Mention ``MULTIDICT_NO_EXTENSIONS`` environment variable in docs. - `#393 `_ -- Document the fact that ``istr`` preserves the casing of argument untouched but uses internal lower-cased copy for keys comparison. - `#419 `_ - - ----- - - -4.7.0 (2019-12-10) -================== - -Features --------- - -- Replace Cython optimization with pure C - `#249 `_ -- Implement ``__length_hint__()`` for iterators - `#310 `_ -- Support the MultiDict[str] generic specialization in the runtime. - `#392 `_ -- Embed pair_list_t structure into MultiDict Python object - `#395 `_ -- Embed multidict pairs for small dictionaries to amortize the memory usage. - `#396 `_ -- Support weak references to C Extension classes. - `#399 `_ -- Add docstrings to provided classes. - `#400 `_ -- Merge ``multidict._istr`` back with ``multidict._multidict``. - `#409 `_ - - -Bugfixes --------- - -- Explicitly call ``tp_free`` slot on deallocation. - `#407 `_ -- Return class from __class_getitem__ to simplify subclassing - `#413 `_ - - ----- - - -4.6.1 (2019-11-21) -==================== - -Bugfixes --------- - -- Fix PyPI link for GitHub Issues badge. - `#391 `_ - -4.6.0 (2019-11-20) -==================== - -Bugfixes --------- - -- Fix GC object tracking. - `#314 `_ -- Preserve the case of `istr` strings. - `#374 `_ -- Generate binary wheels for Python 3.8. diff --git a/third_party/python/multidict/LICENSE b/third_party/python/multidict/LICENSE deleted file mode 100644 index 99a9e21af0d1..000000000000 --- a/third_party/python/multidict/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "{}" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright 2016-2017 Andrew Svetlov - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/third_party/python/multidict/MANIFEST.in b/third_party/python/multidict/MANIFEST.in deleted file mode 100644 index c39a12f0b680..000000000000 --- a/third_party/python/multidict/MANIFEST.in +++ /dev/null @@ -1,14 +0,0 @@ -include LICENSE -include CHANGES.rst -include README.rst -include Makefile -graft multidict -graft docs -graft tests -global-exclude *.pyc -include multidict/*.c -exclude multidict/_multidict.html -exclude multidict/*.so -exclude multidict/*.pyd -exclude multidict/*.pyd -prune docs/_build diff --git a/third_party/python/multidict/Makefile b/third_party/python/multidict/Makefile deleted file mode 100644 index ca0562b04350..000000000000 --- a/third_party/python/multidict/Makefile +++ /dev/null @@ -1,108 +0,0 @@ -# Some simple testing tasks (sorry, UNIX only). -.PHONY: all build flake test vtest cov clean doc mypy - - -PYXS = $(wildcard multidict/*.pyx) -SRC = multidict tests setup.py - -all: test - -.install-deps: $(shell find requirements -type f) - pip install -r requirements/dev.txt - @touch .install-deps - -.flake: .install-deps $(shell find multidict -type f) \ - $(shell find tests -type f) - flake8 multidict tests - @if ! isort --check multidict tests; then \ - echo "Import sort errors, run 'make fmt' to fix them!!!"; \ - isort --diff --check multidict tests; \ - false; \ - fi - @touch .flake - - -isort-check: - @if ! isort --check $(SRC); then \ - echo "Import sort errors, run 'make fmt' to fix them!!!"; \ - isort --diff --check $(SRC); \ - false; \ - fi - -flake8: - flake8 $(SRC) - -black-check: - @if ! isort --check $(SRC); then \ - echo "black errors, run 'make fmt' to fix them!!!"; \ - black -t py35 --diff --check $(SRC); \ - false; \ - fi - -mypy: - mypy --show-error-codes multidict tests - -lint: flake8 black-check mypy isort-check check_changes - -fmt: - black -t py35 $(SRC) - isort $(SRC) - -check_changes: - ./tools/check_changes.py - -.develop: .install-deps $(shell find multidict -type f) .flake check_changes mypy - pip install -e . - @touch .develop - -test: .develop - @pytest -q - -vtest: .develop - @pytest -s -v - -cov-dev: .develop - @pytest --cov-report=html - @echo "open file://`pwd`/htmlcov/index.html" - -cov-ci-run: .develop - @echo "Regular run" - @pytest --cov-report=html - -cov-dev-full: cov-ci-run - @echo "open file://`pwd`/htmlcov/index.html" - -doc: - @make -C docs html SPHINXOPTS="-W -E" - @echo "open file://`pwd`/docs/_build/html/index.html" - -doc-spelling: - @make -C docs spelling SPHINXOPTS="-W -E" - -install: - @pip install -U 'pip' - @pip install -Ur requirements/dev.txt - -install-dev: .develop - - -clean: - rm -rf `find . -name __pycache__` - rm -f `find . -type f -name '*.py[co]' ` - rm -f `find . -type f -name '*~' ` - rm -f `find . -type f -name '.*~' ` - rm -f `find . -type f -name '@*' ` - rm -f `find . -type f -name '#*#' ` - rm -f `find . -type f -name '*.orig' ` - rm -f `find . -type f -name '*.rej' ` - rm -f .coverage - rm -rf coverage - rm -rf build - rm -rf cover - rm -rf htmlcov - make -C docs clean SPHINXBUILD=false - python3 setup.py clean - rm -f multidict/*.html - rm -f multidict/*.so - rm -f multidict/*.pyd - rm -rf .tox diff --git a/third_party/python/multidict/PKG-INFO b/third_party/python/multidict/PKG-INFO deleted file mode 100644 index bbd48649479a..000000000000 --- a/third_party/python/multidict/PKG-INFO +++ /dev/null @@ -1,128 +0,0 @@ -Metadata-Version: 1.2 -Name: multidict -Version: 5.1.0 -Summary: multidict implementation -Home-page: https://github.com/aio-libs/multidict -Author: Andrew Svetlov -Author-email: andrew.svetlov@gmail.com -License: Apache 2 -Project-URL: Chat: Gitter, https://gitter.im/aio-libs/Lobby -Project-URL: CI: Azure Pipelines, https://dev.azure.com/aio-libs/multidict/_build -Project-URL: Coverage: codecov, https://codecov.io/github/aio-libs/multidict -Project-URL: Docs: RTD, https://multidict.readthedocs.io -Project-URL: GitHub: issues, https://github.com/aio-libs/multidict/issues -Project-URL: GitHub: repo, https://github.com/aio-libs/multidict -Description: ========= - multidict - ========= - - .. image:: https://github.com/aio-libs/multidict/workflows/CI/badge.svg - :target: https://github.com/aio-libs/multidict/actions?query=workflow%3ACI - :alt: GitHub status for master branch - - .. image:: https://codecov.io/gh/aio-libs/multidict/branch/master/graph/badge.svg - :target: https://codecov.io/gh/aio-libs/multidict - :alt: Coverage metrics - - .. image:: https://img.shields.io/pypi/v/multidict.svg - :target: https://pypi.org/project/multidict - :alt: PyPI - - .. image:: https://readthedocs.org/projects/multidict/badge/?version=latest - :target: http://multidict.readthedocs.org/en/latest/?badge=latest - :alt: Documentationb - - .. image:: https://img.shields.io/pypi/pyversions/multidict.svg - :target: https://pypi.org/project/multidict - :alt: Python versions - - .. image:: https://badges.gitter.im/Join%20Chat.svg - :target: https://gitter.im/aio-libs/Lobby - :alt: Chat on Gitter - - Multidict is dict-like collection of *key-value pairs* where key - might be occurred more than once in the container. - - Introduction - ------------ - - *HTTP Headers* and *URL query string* require specific data structure: - *multidict*. It behaves mostly like a regular ``dict`` but it may have - several *values* for the same *key* and *preserves insertion ordering*. - - The *key* is ``str`` (or ``istr`` for case-insensitive dictionaries). - - ``multidict`` has four multidict classes: - ``MultiDict``, ``MultiDictProxy``, ``CIMultiDict`` - and ``CIMultiDictProxy``. - - Immutable proxies (``MultiDictProxy`` and - ``CIMultiDictProxy``) provide a dynamic view for the - proxied multidict, the view reflects underlying collection changes. They - implement the ``collections.abc.Mapping`` interface. - - Regular mutable (``MultiDict`` and ``CIMultiDict``) classes - implement ``collections.abc.MutableMapping`` and allows to change - their own content. - - - *Case insensitive* (``CIMultiDict`` and - ``CIMultiDictProxy``) ones assume the *keys* are case - insensitive, e.g.:: - - >>> dct = CIMultiDict(key='val') - >>> 'Key' in dct - True - >>> dct['Key'] - 'val' - - *Keys* should be ``str`` or ``istr`` instances. - - The library has optional C Extensions for sake of speed. - - - License - ------- - - Apache 2 - - Library Installation - -------------------- - - .. code-block:: bash - - $ pip install multidict - - The library is Python 3 only! - - PyPI contains binary wheels for Linux, Windows and MacOS. If you want to install - ``multidict`` on another operation system (or *Alpine Linux* inside a Docker) the - Tarball will be used to compile the library from sources. It requires C compiler and - Python headers installed. - - To skip the compilation please use `MULTIDICT_NO_EXTENSIONS` environment variable, - e.g.: - - .. code-block:: bash - - $ MULTIDICT_NO_EXTENSIONS=1 pip install multidict - - Please note, Pure Python (uncompiled) version is about 20-50 times slower depending on - the usage scenario!!! - - - - Changelog - --------- - See `RTD page `_. -Platform: UNKNOWN -Classifier: License :: OSI Approved :: Apache Software License -Classifier: Intended Audience :: Developers -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Development Status :: 5 - Production/Stable -Requires-Python: >=3.6 diff --git a/third_party/python/multidict/README.rst b/third_party/python/multidict/README.rst deleted file mode 100644 index e78e5065c25c..000000000000 --- a/third_party/python/multidict/README.rst +++ /dev/null @@ -1,103 +0,0 @@ -========= -multidict -========= - -.. image:: https://github.com/aio-libs/multidict/workflows/CI/badge.svg - :target: https://github.com/aio-libs/multidict/actions?query=workflow%3ACI - :alt: GitHub status for master branch - -.. image:: https://codecov.io/gh/aio-libs/multidict/branch/master/graph/badge.svg - :target: https://codecov.io/gh/aio-libs/multidict - :alt: Coverage metrics - -.. image:: https://img.shields.io/pypi/v/multidict.svg - :target: https://pypi.org/project/multidict - :alt: PyPI - -.. image:: https://readthedocs.org/projects/multidict/badge/?version=latest - :target: http://multidict.readthedocs.org/en/latest/?badge=latest - :alt: Documentationb - -.. image:: https://img.shields.io/pypi/pyversions/multidict.svg - :target: https://pypi.org/project/multidict - :alt: Python versions - -.. image:: https://badges.gitter.im/Join%20Chat.svg - :target: https://gitter.im/aio-libs/Lobby - :alt: Chat on Gitter - -Multidict is dict-like collection of *key-value pairs* where key -might be occurred more than once in the container. - -Introduction ------------- - -*HTTP Headers* and *URL query string* require specific data structure: -*multidict*. It behaves mostly like a regular ``dict`` but it may have -several *values* for the same *key* and *preserves insertion ordering*. - -The *key* is ``str`` (or ``istr`` for case-insensitive dictionaries). - -``multidict`` has four multidict classes: -``MultiDict``, ``MultiDictProxy``, ``CIMultiDict`` -and ``CIMultiDictProxy``. - -Immutable proxies (``MultiDictProxy`` and -``CIMultiDictProxy``) provide a dynamic view for the -proxied multidict, the view reflects underlying collection changes. They -implement the ``collections.abc.Mapping`` interface. - -Regular mutable (``MultiDict`` and ``CIMultiDict``) classes -implement ``collections.abc.MutableMapping`` and allows to change -their own content. - - -*Case insensitive* (``CIMultiDict`` and -``CIMultiDictProxy``) ones assume the *keys* are case -insensitive, e.g.:: - - >>> dct = CIMultiDict(key='val') - >>> 'Key' in dct - True - >>> dct['Key'] - 'val' - -*Keys* should be ``str`` or ``istr`` instances. - -The library has optional C Extensions for sake of speed. - - -License -------- - -Apache 2 - -Library Installation --------------------- - -.. code-block:: bash - - $ pip install multidict - -The library is Python 3 only! - -PyPI contains binary wheels for Linux, Windows and MacOS. If you want to install -``multidict`` on another operation system (or *Alpine Linux* inside a Docker) the -Tarball will be used to compile the library from sources. It requires C compiler and -Python headers installed. - -To skip the compilation please use `MULTIDICT_NO_EXTENSIONS` environment variable, -e.g.: - -.. code-block:: bash - - $ MULTIDICT_NO_EXTENSIONS=1 pip install multidict - -Please note, Pure Python (uncompiled) version is about 20-50 times slower depending on -the usage scenario!!! - - - -Changelog ---------- -See `RTD page `_. diff --git a/third_party/python/multidict/multidict/__init__.py b/third_party/python/multidict/multidict/__init__.py deleted file mode 100644 index 6b091d143165..000000000000 --- a/third_party/python/multidict/multidict/__init__.py +++ /dev/null @@ -1,48 +0,0 @@ -"""Multidict implementation. - -HTTP Headers and URL query string require specific data structure: -multidict. It behaves mostly like a dict but it can have -several values for the same key. -""" - -from ._abc import MultiMapping, MutableMultiMapping -from ._compat import USE_CYTHON_EXTENSIONS - -__all__ = ( - "MultiMapping", - "MutableMultiMapping", - "MultiDictProxy", - "CIMultiDictProxy", - "MultiDict", - "CIMultiDict", - "upstr", - "istr", - "getversion", -) - -__version__ = "5.1.0" - - -try: - if not USE_CYTHON_EXTENSIONS: - raise ImportError - from ._multidict import ( - CIMultiDict, - CIMultiDictProxy, - MultiDict, - MultiDictProxy, - getversion, - istr, - ) -except ImportError: # pragma: no cover - from ._multidict_py import ( - CIMultiDict, - CIMultiDictProxy, - MultiDict, - MultiDictProxy, - getversion, - istr, - ) - - -upstr = istr diff --git a/third_party/python/multidict/multidict/__init__.pyi b/third_party/python/multidict/multidict/__init__.pyi deleted file mode 100644 index 24ba63054bcf..000000000000 --- a/third_party/python/multidict/multidict/__init__.pyi +++ /dev/null @@ -1,152 +0,0 @@ -import abc -from typing import ( - Dict, - Generic, - Iterable, - Iterator, - List, - Mapping, - MutableMapping, - Tuple, - TypeVar, - Union, - overload, -) - -class istr(str): ... - -upstr = istr - -_S = Union[str, istr] - -_T = TypeVar("_T") - -_T_co = TypeVar("_T_co", covariant=True) - -_D = TypeVar("_D") - -class MultiMapping(Mapping[_S, _T_co]): - @overload - @abc.abstractmethod - def getall(self, key: _S) -> List[_T_co]: ... - @overload - @abc.abstractmethod - def getall(self, key: _S, default: _D) -> Union[List[_T_co], _D]: ... - @overload - @abc.abstractmethod - def getone(self, key: _S) -> _T_co: ... - @overload - @abc.abstractmethod - def getone(self, key: _S, default: _D) -> Union[_T_co, _D]: ... - -_Arg = Union[Mapping[_S, _T], Dict[_S, _T], MultiMapping[_T], Iterable[Tuple[_S, _T]]] - -class MutableMultiMapping(MultiMapping[_T], MutableMapping[_S, _T], Generic[_T]): - @abc.abstractmethod - def add(self, key: _S, value: _T) -> None: ... - @abc.abstractmethod - def extend(self, arg: _Arg[_T] = ..., **kwargs: _T) -> None: ... - @overload - @abc.abstractmethod - def popone(self, key: _S) -> _T: ... - @overload - @abc.abstractmethod - def popone(self, key: _S, default: _D) -> Union[_T, _D]: ... - @overload - @abc.abstractmethod - def popall(self, key: _S) -> List[_T]: ... - @overload - @abc.abstractmethod - def popall(self, key: _S, default: _D) -> Union[List[_T], _D]: ... - -class MultiDict(MutableMultiMapping[_T], Generic[_T]): - def __init__(self, arg: _Arg[_T] = ..., **kwargs: _T) -> None: ... - def copy(self) -> MultiDict[_T]: ... - def __getitem__(self, k: _S) -> _T: ... - def __setitem__(self, k: _S, v: _T) -> None: ... - def __delitem__(self, v: _S) -> None: ... - def __iter__(self) -> Iterator[_S]: ... - def __len__(self) -> int: ... - @overload - def getall(self, key: _S) -> List[_T]: ... - @overload - def getall(self, key: _S, default: _D) -> Union[List[_T], _D]: ... - @overload - def getone(self, key: _S) -> _T: ... - @overload - def getone(self, key: _S, default: _D) -> Union[_T, _D]: ... - def add(self, key: _S, value: _T) -> None: ... - def extend(self, arg: _Arg[_T] = ..., **kwargs: _T) -> None: ... - @overload - def popone(self, key: _S) -> _T: ... - @overload - def popone(self, key: _S, default: _D) -> Union[_T, _D]: ... - @overload - def popall(self, key: _S) -> List[_T]: ... - @overload - def popall(self, key: _S, default: _D) -> Union[List[_T], _D]: ... - -class CIMultiDict(MutableMultiMapping[_T], Generic[_T]): - def __init__(self, arg: _Arg[_T] = ..., **kwargs: _T) -> None: ... - def copy(self) -> CIMultiDict[_T]: ... - def __getitem__(self, k: _S) -> _T: ... - def __setitem__(self, k: _S, v: _T) -> None: ... - def __delitem__(self, v: _S) -> None: ... - def __iter__(self) -> Iterator[_S]: ... - def __len__(self) -> int: ... - @overload - def getall(self, key: _S) -> List[_T]: ... - @overload - def getall(self, key: _S, default: _D) -> Union[List[_T], _D]: ... - @overload - def getone(self, key: _S) -> _T: ... - @overload - def getone(self, key: _S, default: _D) -> Union[_T, _D]: ... - def add(self, key: _S, value: _T) -> None: ... - def extend(self, arg: _Arg[_T] = ..., **kwargs: _T) -> None: ... - @overload - def popone(self, key: _S) -> _T: ... - @overload - def popone(self, key: _S, default: _D) -> Union[_T, _D]: ... - @overload - def popall(self, key: _S) -> List[_T]: ... - @overload - def popall(self, key: _S, default: _D) -> Union[List[_T], _D]: ... - -class MultiDictProxy(MultiMapping[_T], Generic[_T]): - def __init__( - self, arg: Union[MultiMapping[_T], MutableMultiMapping[_T]] - ) -> None: ... - def copy(self) -> MultiDict[_T]: ... - def __getitem__(self, k: _S) -> _T: ... - def __iter__(self) -> Iterator[_S]: ... - def __len__(self) -> int: ... - @overload - def getall(self, key: _S) -> List[_T]: ... - @overload - def getall(self, key: _S, default: _D) -> Union[List[_T], _D]: ... - @overload - def getone(self, key: _S) -> _T: ... - @overload - def getone(self, key: _S, default: _D) -> Union[_T, _D]: ... - -class CIMultiDictProxy(MultiMapping[_T], Generic[_T]): - def __init__( - self, arg: Union[MultiMapping[_T], MutableMultiMapping[_T]] - ) -> None: ... - def __getitem__(self, k: _S) -> _T: ... - def __iter__(self) -> Iterator[_S]: ... - def __len__(self) -> int: ... - @overload - def getall(self, key: _S) -> List[_T]: ... - @overload - def getall(self, key: _S, default: _D) -> Union[List[_T], _D]: ... - @overload - def getone(self, key: _S) -> _T: ... - @overload - def getone(self, key: _S, default: _D) -> Union[_T, _D]: ... - def copy(self) -> CIMultiDict[_T]: ... - -def getversion( - md: Union[MultiDict[_T], CIMultiDict[_T], MultiDictProxy[_T], CIMultiDictProxy[_T]] -) -> int: ... diff --git a/third_party/python/multidict/multidict/_abc.py b/third_party/python/multidict/multidict/_abc.py deleted file mode 100644 index 0603cdd2447e..000000000000 --- a/third_party/python/multidict/multidict/_abc.py +++ /dev/null @@ -1,48 +0,0 @@ -import abc -import sys -import types -from collections.abc import Mapping, MutableMapping - - -class _TypingMeta(abc.ABCMeta): - # A fake metaclass to satisfy typing deps in runtime - # basically MultiMapping[str] and other generic-like type instantiations - # are emulated. - # Note: real type hints are provided by __init__.pyi stub file - if sys.version_info >= (3, 9): - - def __getitem__(self, key): - return types.GenericAlias(self, key) - - else: - - def __getitem__(self, key): - return self - - -class MultiMapping(Mapping, metaclass=_TypingMeta): - @abc.abstractmethod - def getall(self, key, default=None): - raise KeyError - - @abc.abstractmethod - def getone(self, key, default=None): - raise KeyError - - -class MutableMultiMapping(MultiMapping, MutableMapping): - @abc.abstractmethod - def add(self, key, value): - raise NotImplementedError - - @abc.abstractmethod - def extend(self, *args, **kwargs): - raise NotImplementedError - - @abc.abstractmethod - def popone(self, key, default=None): - raise KeyError - - @abc.abstractmethod - def popall(self, key, default=None): - raise KeyError diff --git a/third_party/python/multidict/multidict/_compat.py b/third_party/python/multidict/multidict/_compat.py deleted file mode 100644 index e659124558a9..000000000000 --- a/third_party/python/multidict/multidict/_compat.py +++ /dev/null @@ -1,14 +0,0 @@ -import os -import platform - -NO_EXTENSIONS = bool(os.environ.get("MULTIDICT_NO_EXTENSIONS")) - -PYPY = platform.python_implementation() == "PyPy" - -USE_CYTHON_EXTENSIONS = USE_CYTHON = not NO_EXTENSIONS and not PYPY - -if USE_CYTHON_EXTENSIONS: - try: - from . import _multidict # noqa - except ImportError: - USE_CYTHON_EXTENSIONS = USE_CYTHON = False diff --git a/third_party/python/multidict/multidict/_multidict.c b/third_party/python/multidict/multidict/_multidict.c deleted file mode 100644 index 5bdcc898de07..000000000000 --- a/third_party/python/multidict/multidict/_multidict.c +++ /dev/null @@ -1,1646 +0,0 @@ -#include "Python.h" -#include "structmember.h" - -// Include order important -#include "_multilib/defs.h" -#include "_multilib/istr.h" -#include "_multilib/pair_list.h" -#include "_multilib/dict.h" -#include "_multilib/iter.h" -#include "_multilib/views.h" - -static PyObject *collections_abc_mapping; -static PyObject *collections_abc_mut_mapping; -static PyObject *collections_abc_mut_multi_mapping; - -static PyTypeObject multidict_type; -static PyTypeObject cimultidict_type; -static PyTypeObject multidict_proxy_type; -static PyTypeObject cimultidict_proxy_type; - -static PyObject *repr_func; - -#define MultiDict_CheckExact(o) (Py_TYPE(o) == &multidict_type) -#define CIMultiDict_CheckExact(o) (Py_TYPE(o) == &cimultidict_type) -#define MultiDictProxy_CheckExact(o) (Py_TYPE(o) == &multidict_proxy_type) -#define CIMultiDictProxy_CheckExact(o) (Py_TYPE(o) == &cimultidict_proxy_type) - -/* Helper macro for something like isinstance(obj, Base) */ -#define _MultiDict_Check(o) \ - ((MultiDict_CheckExact(o)) || \ - (CIMultiDict_CheckExact(o)) || \ - (MultiDictProxy_CheckExact(o)) || \ - (CIMultiDictProxy_CheckExact(o))) - -/******************** Internal Methods ********************/ - -/* Forward declaration */ -static PyObject *multidict_items(MultiDictObject *self); - -static inline PyObject * -_multidict_getone(MultiDictObject *self, PyObject *key, PyObject *_default) -{ - PyObject *val = pair_list_get_one(&self->pairs, key); - - if (val == NULL && - PyErr_ExceptionMatches(PyExc_KeyError) && - _default != NULL) - { - PyErr_Clear(); - Py_INCREF(_default); - return _default; - } - - return val; -} - -static inline int -_multidict_eq(MultiDictObject *self, MultiDictObject *other) -{ - Py_ssize_t pos1 = 0, - pos2 = 0; - - Py_hash_t h1 = 0, - h2 = 0; - - PyObject *identity1 = NULL, - *identity2 = NULL, - *value1 = NULL, - *value2 = NULL; - - int cmp_identity = 0, - cmp_value = 0; - - if (self == other) { - return 1; - } - - if (pair_list_len(&self->pairs) != pair_list_len(&other->pairs)) { - return 0; - } - - while (_pair_list_next(&self->pairs, &pos1, &identity1, NULL, &value1, &h1) && - _pair_list_next(&other->pairs, &pos2, &identity2, NULL, &value2, &h2)) - { - if (h1 != h2) { - return 0; - } - cmp_identity = PyObject_RichCompareBool(identity1, identity2, Py_NE); - if (cmp_identity < 0) { - return -1; - } - cmp_value = PyObject_RichCompareBool(value1, value2, Py_NE); - if (cmp_value < 0) { - return -1; - } - if (cmp_identity || cmp_value) { - return 0; - } - } - - return 1; -} - -static inline int -_multidict_update_items(MultiDictObject *self, pair_list_t *pairs) -{ - return pair_list_update(&self->pairs, pairs); -} - -static inline int -_multidict_append_items(MultiDictObject *self, pair_list_t *pairs) -{ - PyObject *key = NULL, - *value = NULL; - - Py_ssize_t pos = 0; - - while (_pair_list_next(pairs, &pos, NULL, &key, &value, NULL)) { - if (pair_list_add(&self->pairs, key, value) < 0) { - return -1; - } - } - - return 0; -} - -static inline int -_multidict_append_items_seq(MultiDictObject *self, PyObject *arg, - const char *name) -{ - PyObject *key = NULL, - *value = NULL, - *item = NULL, - *iter = PyObject_GetIter(arg); - - if (iter == NULL) { - return -1; - } - - while ((item = PyIter_Next(iter)) != NULL) { - if (PyTuple_CheckExact(item)) { - if (PyTuple_GET_SIZE(item) != 2) { - goto invalid_type; - } - key = PyTuple_GET_ITEM(item, 0); - Py_INCREF(key); - value = PyTuple_GET_ITEM(item, 1); - Py_INCREF(value); - } - else if (PyList_CheckExact(item)) { - if (PyList_GET_SIZE(item) != 2) { - goto invalid_type; - } - key = PyList_GET_ITEM(item, 0); - Py_INCREF(key); - value = PyList_GET_ITEM(item, 1); - Py_INCREF(value); - } - else if (PySequence_Check(item)) { - if (PySequence_Size(item) != 2) { - goto invalid_type; - } - key = PySequence_GetItem(item, 0); - value = PySequence_GetItem(item, 1); - } else { - goto invalid_type; - } - - if (pair_list_add(&self->pairs, key, value) < 0) { - goto fail; - } - Py_CLEAR(key); - Py_CLEAR(value); - Py_CLEAR(item); - } - - Py_DECREF(iter); - - if (PyErr_Occurred()) { - return -1; - } - - return 0; -invalid_type: - PyErr_Format( - PyExc_TypeError, - "%s takes either dict or list of (key, value) pairs", - name, - NULL - ); - goto fail; -fail: - Py_XDECREF(key); - Py_XDECREF(value); - Py_XDECREF(item); - Py_DECREF(iter); - return -1; -} - -static inline int -_multidict_list_extend(PyObject *list, PyObject *target_list) -{ - PyObject *item = NULL, - *iter = PyObject_GetIter(target_list); - - if (iter == NULL) { - return -1; - } - - while ((item = PyIter_Next(iter)) != NULL) { - if (PyList_Append(list, item) < 0) { - Py_DECREF(item); - Py_DECREF(iter); - return -1; - } - Py_DECREF(item); - } - - Py_DECREF(iter); - - if (PyErr_Occurred()) { - return -1; - } - - return 0; -} - -static inline int -_multidict_extend_with_args(MultiDictObject *self, PyObject *arg, - PyObject *kwds, const char *name, int do_add) -{ - PyObject *arg_items = NULL, /* tracked by GC */ - *kwds_items = NULL; /* new reference */ - pair_list_t *pairs = NULL; - - int err = 0; - - if (kwds && !PyArg_ValidateKeywordArguments(kwds)) { - return -1; - } - - // TODO: mb can be refactored more clear - if (_MultiDict_Check(arg) && kwds == NULL) { - if (MultiDict_CheckExact(arg) || CIMultiDict_CheckExact(arg)) { - pairs = &((MultiDictObject*)arg)->pairs; - } else if (MultiDictProxy_CheckExact(arg) || CIMultiDictProxy_CheckExact(arg)) { - pairs = &((MultiDictProxyObject*)arg)->md->pairs; - } - - if (do_add) { - return _multidict_append_items(self, pairs); - } - - return _multidict_update_items(self, pairs); - } - - if (PyObject_HasAttrString(arg, "items")) { - if (_MultiDict_Check(arg)) { - arg_items = multidict_items((MultiDictObject*)arg); - } else { - arg_items = PyMapping_Items(arg); - } - if (arg_items == NULL) { - return -1; - } - } else { - arg_items = arg; - Py_INCREF(arg_items); - } - - if (kwds) { - PyObject *tmp = PySequence_List(arg_items); - Py_DECREF(arg_items); - arg_items = tmp; - if (arg_items == NULL) { - return -1; - } - - kwds_items = PyDict_Items(kwds); - if (kwds_items == NULL) { - Py_DECREF(arg_items); - return -1; - } - err = _multidict_list_extend(arg_items, kwds_items); - Py_DECREF(kwds_items); - if (err < 0) { - Py_DECREF(arg_items); - return -1; - } - } - - if (do_add) { - err = _multidict_append_items_seq(self, arg_items, name); - } else { - err = pair_list_update_from_seq(&self->pairs, arg_items); - } - - Py_DECREF(arg_items); - - return err; -} - -static inline int -_multidict_extend_with_kwds(MultiDictObject *self, PyObject *kwds, - const char *name, int do_add) -{ - PyObject *arg = NULL; - - int err = 0; - - if (!PyArg_ValidateKeywordArguments(kwds)) { - return -1; - } - - arg = PyDict_Items(kwds); - if (do_add) { - err = _multidict_append_items_seq(self, arg, name); - } else { - err = pair_list_update_from_seq(&self->pairs, arg); - } - - Py_DECREF(arg); - return err; -} - -static inline int -_multidict_extend(MultiDictObject *self, PyObject *args, PyObject *kwds, - const char *name, int do_add) -{ - PyObject *arg = NULL; - - if (args && PyObject_Length(args) > 1) { - PyErr_Format( - PyExc_TypeError, - "%s takes at most 1 positional argument (%zd given)", - name, PyObject_Length(args), NULL - ); - return -1; - } - - if (args && PyObject_Length(args) > 0) { - if (!PyArg_UnpackTuple(args, name, 0, 1, &arg)) { - return -1; - } - if (_multidict_extend_with_args(self, arg, kwds, name, do_add) < 0) { - return -1; - } - } else if (kwds && PyObject_Length(kwds) > 0) { - if (_multidict_extend_with_kwds(self, kwds, name, do_add) < 0) { - return -1; - } - } - - return 0; -} - -static inline PyObject * -_multidict_copy(MultiDictObject *self, PyTypeObject *multidict_tp_object) -{ - MultiDictObject *new_multidict = NULL; - - PyObject *arg_items = NULL, - *items = NULL; - - new_multidict = (MultiDictObject*)PyType_GenericNew( - multidict_tp_object, NULL, NULL); - if (new_multidict == NULL) { - return NULL; - } - - if (multidict_tp_object->tp_init( - (PyObject*)new_multidict, NULL, NULL) < 0) - { - return NULL; - } - - items = multidict_items(self); - if (items == NULL) { - goto fail; - } - - // TODO: "Implementation looks as slow as possible ..." - arg_items = PyTuple_New(1); - if (arg_items == NULL) { - goto fail; - } - - Py_INCREF(items); - PyTuple_SET_ITEM(arg_items, 0, items); - - if (_multidict_extend( - new_multidict, arg_items, NULL, "copy", 1) < 0) - { - goto fail; - } - - Py_DECREF(items); - Py_DECREF(arg_items); - - return (PyObject*)new_multidict; - -fail: - Py_XDECREF(items); - Py_XDECREF(arg_items); - - Py_DECREF(new_multidict); - - return NULL; -} - -static inline PyObject * -_multidict_proxy_copy(MultiDictProxyObject *self, PyTypeObject *type) -{ - PyObject *new_multidict = PyType_GenericNew(type, NULL, NULL); - if (new_multidict == NULL) { - goto fail; - } - if (type->tp_init(new_multidict, NULL, NULL) < 0) { - goto fail; - } - if (_multidict_extend_with_args( - (MultiDictObject*)new_multidict, (PyObject*)self, NULL, "copy", 1) < 0) - { - goto fail; - } - - return new_multidict; - -fail: - Py_XDECREF(new_multidict); - return NULL; -} - - -/******************** Base Methods ********************/ - -static inline PyObject * -multidict_getall(MultiDictObject *self, PyObject *args, PyObject *kwds) -{ - PyObject *list = NULL, - *key = NULL, - *_default = NULL; - - static char *getall_keywords[] = {"key", "default", NULL}; - - if (!PyArg_ParseTupleAndKeywords(args, kwds, "O|O:getall", - getall_keywords, &key, &_default)) - { - return NULL; - } - - list = pair_list_get_all(&self->pairs, key); - - if (list == NULL && - PyErr_ExceptionMatches(PyExc_KeyError) && - _default != NULL) - { - PyErr_Clear(); - Py_INCREF(_default); - return _default; - } - - return list; -} - -static inline PyObject * -multidict_getone(MultiDictObject *self, PyObject *args, PyObject *kwds) -{ - PyObject *key = NULL, - *_default = NULL; - - static char *getone_keywords[] = {"key", "default", NULL}; - - if (!PyArg_ParseTupleAndKeywords(args, kwds, "O|O:getone", - getone_keywords, &key, &_default)) - { - return NULL; - } - - return _multidict_getone(self, key, _default); -} - -static inline PyObject * -multidict_get(MultiDictObject *self, PyObject *args, PyObject *kwds) -{ - PyObject *key = NULL, - *_default = Py_None, - *ret; - - static char *getone_keywords[] = {"key", "default", NULL}; - - if (!PyArg_ParseTupleAndKeywords(args, kwds, "O|O:getone", - getone_keywords, &key, &_default)) - { - return NULL; - } - ret = _multidict_getone(self, key, _default); - return ret; -} - -static inline PyObject * -multidict_keys(MultiDictObject *self) -{ - return multidict_keysview_new((PyObject*)self); -} - -static inline PyObject * -multidict_items(MultiDictObject *self) -{ - return multidict_itemsview_new((PyObject*)self); -} - -static inline PyObject * -multidict_values(MultiDictObject *self) -{ - return multidict_valuesview_new((PyObject*)self); -} - -static inline PyObject * -multidict_reduce(MultiDictObject *self) -{ - PyObject *items = NULL, - *items_list = NULL, - *args = NULL, - *result = NULL; - - items = multidict_items(self); - if (items == NULL) { - goto ret; - } - - items_list = PySequence_List(items); - if (items_list == NULL) { - goto ret; - } - - args = PyTuple_Pack(1, items_list); - if (args == NULL) { - goto ret; - } - - result = PyTuple_Pack(2, Py_TYPE(self), args); - -ret: - Py_XDECREF(args); - Py_XDECREF(items_list); - Py_XDECREF(items); - - return result; -} - -static inline PyObject * -multidict_repr(PyObject *self) -{ - return PyObject_CallFunctionObjArgs( - repr_func, self, NULL); -} - -static inline Py_ssize_t -multidict_mp_len(MultiDictObject *self) -{ - return pair_list_len(&self->pairs); -} - -static inline PyObject * -multidict_mp_subscript(MultiDictObject *self, PyObject *key) -{ - return _multidict_getone(self, key, NULL); -} - -static inline int -multidict_mp_as_subscript(MultiDictObject *self, PyObject *key, PyObject *val) -{ - if (val == NULL) { - return pair_list_del(&self->pairs, key); - } else { - return pair_list_replace(&self->pairs, key, val); - } -} - -static inline int -multidict_sq_contains(MultiDictObject *self, PyObject *key) -{ - return pair_list_contains(&self->pairs, key); -} - -static inline PyObject * -multidict_tp_iter(MultiDictObject *self) -{ - return multidict_keys_iter_new(self); -} - -static inline PyObject * -multidict_tp_richcompare(PyObject *self, PyObject *other, int op) -{ - // TODO: refactoring me with love - - int cmp = 0; - - if (op != Py_EQ && op != Py_NE) { - Py_RETURN_NOTIMPLEMENTED; - } - - if (MultiDict_CheckExact(other) || CIMultiDict_CheckExact(other)) { - cmp = _multidict_eq( - (MultiDictObject*)self, - (MultiDictObject*)other - ); - if (cmp < 0) { - return NULL; - } - if (op == Py_NE) { - cmp = !cmp; - } - return PyBool_FromLong(cmp); - } - - if (MultiDictProxy_CheckExact(other) || CIMultiDictProxy_CheckExact(other)) { - cmp = _multidict_eq( - (MultiDictObject*)self, - ((MultiDictProxyObject*)other)->md - ); - if (cmp < 0) { - return NULL; - } - if (op == Py_NE) { - cmp = !cmp; - } - return PyBool_FromLong(cmp); - } - - cmp = PyObject_IsInstance(other, (PyObject*)collections_abc_mapping); - if (cmp < 0) { - return NULL; - } - - if (cmp) { - cmp = pair_list_eq_to_mapping(&((MultiDictObject*)self)->pairs, other); - if (cmp < 0) { - return NULL; - } - if (op == Py_NE) { - cmp = !cmp; - } - return PyBool_FromLong(cmp); - } - - Py_RETURN_NOTIMPLEMENTED; -} - -static inline void -multidict_tp_dealloc(MultiDictObject *self) -{ - PyObject_GC_UnTrack(self); - Py_TRASHCAN_SAFE_BEGIN(self); - if (self->weaklist != NULL) { - PyObject_ClearWeakRefs((PyObject *)self); - }; - pair_list_dealloc(&self->pairs); - Py_TYPE(self)->tp_free((PyObject *)self); - Py_TRASHCAN_SAFE_END(self); -} - -static inline int -multidict_tp_traverse(MultiDictObject *self, visitproc visit, void *arg) -{ - return pair_list_traverse(&self->pairs, visit, arg); -} - -static inline int -multidict_tp_clear(MultiDictObject *self) -{ - return pair_list_clear(&self->pairs); -} - -PyDoc_STRVAR(multidict_getall_doc, -"Return a list of all values matching the key."); - -PyDoc_STRVAR(multidict_getone_doc, -"Get first value matching the key."); - -PyDoc_STRVAR(multidict_get_doc, -"Get first value matching the key.\n\nThe method is alias for .getone()."); - -PyDoc_STRVAR(multidict_keys_doc, -"Return a new view of the dictionary's keys."); - -PyDoc_STRVAR(multidict_items_doc, -"Return a new view of the dictionary's items *(key, value) pairs)."); - -PyDoc_STRVAR(multidict_values_doc, -"Return a new view of the dictionary's values."); - -/******************** MultiDict ********************/ - -static inline int -multidict_tp_init(MultiDictObject *self, PyObject *args, PyObject *kwds) -{ - if (pair_list_init(&self->pairs) < 0) { - return -1; - } - if (_multidict_extend(self, args, kwds, "MultiDict", 1) < 0) { - return -1; - } - return 0; -} - -static inline PyObject * -multidict_add(MultiDictObject *self, PyObject *args, PyObject *kwds) -{ - PyObject *key = NULL, - *val = NULL; - - static char *kwlist[] = {"key", "value", NULL}; - if (!PyArg_ParseTupleAndKeywords(args, kwds, "OO:add", - kwlist, &key, &val)) - { - return NULL; - } - - if (pair_list_add(&self->pairs, key, val) < 0) { - return NULL; - } - - Py_RETURN_NONE; -} - -static inline PyObject * -multidict_copy(MultiDictObject *self) -{ - return _multidict_copy(self, &multidict_type); -} - -static inline PyObject * -multidict_extend(MultiDictObject *self, PyObject *args, PyObject *kwds) -{ - if (_multidict_extend(self, args, kwds, "extend", 1) < 0) { - return NULL; - } - - Py_RETURN_NONE; -} - -static inline PyObject * -multidict_clear(MultiDictObject *self) -{ - if (pair_list_clear(&self->pairs) < 0) { - return NULL; - } - - Py_RETURN_NONE; -} - -static inline PyObject * -multidict_setdefault(MultiDictObject *self, PyObject *args, PyObject *kwds) -{ - PyObject *key = NULL, - *_default = NULL; - - static char *setdefault_keywords[] = {"key", "default", NULL}; - - if (!PyArg_ParseTupleAndKeywords(args, kwds, "O|O:setdefault", - setdefault_keywords, &key, &_default)) - { - return NULL; - } - return pair_list_set_default(&self->pairs, key, _default); -} - -static inline PyObject * -multidict_popone(MultiDictObject *self, PyObject *args, PyObject *kwds) -{ - PyObject *key = NULL, - *_default = NULL, - *ret_val = NULL; - - static char *popone_keywords[] = {"key", "default", NULL}; - - if (!PyArg_ParseTupleAndKeywords(args, kwds, "O|O:popone", - popone_keywords, &key, &_default)) - { - return NULL; - } - - ret_val = pair_list_pop_one(&self->pairs, key); - - if (ret_val == NULL && - PyErr_ExceptionMatches(PyExc_KeyError) && - _default != NULL) - { - PyErr_Clear(); - Py_INCREF(_default); - return _default; - } - - return ret_val; -} - -static inline PyObject * -multidict_popall(MultiDictObject *self, PyObject *args, PyObject *kwds) -{ - PyObject *key = NULL, - *_default = NULL, - *ret_val = NULL; - - static char *popall_keywords[] = {"key", "default", NULL}; - - if (!PyArg_ParseTupleAndKeywords(args, kwds, "O|O:popall", - popall_keywords, &key, &_default)) - { - return NULL; - } - - ret_val = pair_list_pop_all(&self->pairs, key); - - if (ret_val == NULL && - PyErr_ExceptionMatches(PyExc_KeyError) && - _default != NULL) - { - PyErr_Clear(); - Py_INCREF(_default); - return _default; - } - - return ret_val; -} - -static inline PyObject * -multidict_popitem(MultiDictObject *self) -{ - return pair_list_pop_item(&self->pairs); -} - -static inline PyObject * -multidict_update(MultiDictObject *self, PyObject *args, PyObject *kwds) -{ - if (_multidict_extend(self, args, kwds, "update", 0) < 0) { - return NULL; - } - Py_RETURN_NONE; -} - -PyDoc_STRVAR(multidict_add_doc, -"Add the key and value, not overwriting any previous value."); - -PyDoc_STRVAR(multidict_copy_doc, -"Return a copy of itself."); - -PyDoc_STRVAR(multdicit_method_extend_doc, -"Extend current MultiDict with more values.\n\ -This method must be used instead of update."); - -PyDoc_STRVAR(multidict_clear_doc, -"Remove all items from MultiDict"); - -PyDoc_STRVAR(multidict_setdefault_doc, -"Return value for key, set value to default if key is not present."); - -PyDoc_STRVAR(multidict_popone_doc, -"Remove the last occurrence of key and return the corresponding value.\n\n\ -If key is not found, default is returned if given, otherwise KeyError is \ -raised.\n"); - -PyDoc_STRVAR(multidict_popall_doc, -"Remove all occurrences of key and return the list of corresponding values.\n\n\ -If key is not found, default is returned if given, otherwise KeyError is \ -raised.\n"); - -PyDoc_STRVAR(multidict_popitem_doc, -"Remove and return an arbitrary (key, value) pair."); - -PyDoc_STRVAR(multidict_update_doc, -"Update the dictionary from *other*, overwriting existing keys."); - - -#if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 9 -#define multidict_class_getitem Py_GenericAlias -#else -static inline PyObject * -multidict_class_getitem(PyObject *self, PyObject *arg) -{ - Py_INCREF(self); - return self; -} -#endif - - -PyDoc_STRVAR(sizeof__doc__, -"D.__sizeof__() -> size of D in memory, in bytes"); - -static inline PyObject * -_multidict_sizeof(MultiDictObject *self) -{ - Py_ssize_t size = sizeof(MultiDictObject); - if (self->pairs.pairs != self->pairs.buffer) { - size += (Py_ssize_t)sizeof(pair_t) * self->pairs.capacity; - } - return PyLong_FromSsize_t(size); -} - - -static PySequenceMethods multidict_sequence = { - .sq_contains = (objobjproc)multidict_sq_contains, -}; - -static PyMappingMethods multidict_mapping = { - .mp_length = (lenfunc)multidict_mp_len, - .mp_subscript = (binaryfunc)multidict_mp_subscript, - .mp_ass_subscript = (objobjargproc)multidict_mp_as_subscript, -}; - -static PyMethodDef multidict_methods[] = { - { - "getall", - (PyCFunction)multidict_getall, - METH_VARARGS | METH_KEYWORDS, - multidict_getall_doc - }, - { - "getone", - (PyCFunction)multidict_getone, - METH_VARARGS | METH_KEYWORDS, - multidict_getone_doc - }, - { - "get", - (PyCFunction)multidict_get, - METH_VARARGS | METH_KEYWORDS, - multidict_get_doc - }, - { - "keys", - (PyCFunction)multidict_keys, - METH_NOARGS, - multidict_keys_doc - }, - { - "items", - (PyCFunction)multidict_items, - METH_NOARGS, - multidict_items_doc - }, - { - "values", - (PyCFunction)multidict_values, - METH_NOARGS, - multidict_values_doc - }, - { - "add", - (PyCFunction)multidict_add, - METH_VARARGS | METH_KEYWORDS, - multidict_add_doc - }, - { - "copy", - (PyCFunction)multidict_copy, - METH_NOARGS, - multidict_copy_doc - }, - { - "extend", - (PyCFunction)multidict_extend, - METH_VARARGS | METH_KEYWORDS, - multdicit_method_extend_doc - }, - { - "clear", - (PyCFunction)multidict_clear, - METH_NOARGS, - multidict_clear_doc - }, - { - "setdefault", - (PyCFunction)multidict_setdefault, - METH_VARARGS | METH_KEYWORDS, - multidict_setdefault_doc - }, - { - "popone", - (PyCFunction)multidict_popone, - METH_VARARGS | METH_KEYWORDS, - multidict_popone_doc - }, - { - "pop", - (PyCFunction)multidict_popone, - METH_VARARGS | METH_KEYWORDS, - multidict_popone_doc - }, - { - "popall", - (PyCFunction)multidict_popall, - METH_VARARGS | METH_KEYWORDS, - multidict_popall_doc - }, - { - "popitem", - (PyCFunction)multidict_popitem, - METH_NOARGS, - multidict_popitem_doc - }, - { - "update", - (PyCFunction)multidict_update, - METH_VARARGS | METH_KEYWORDS, - multidict_update_doc - }, - { - "__reduce__", - (PyCFunction)multidict_reduce, - METH_NOARGS, - NULL, - }, - { - "__class_getitem__", - (PyCFunction)multidict_class_getitem, - METH_O | METH_CLASS, - NULL - }, - { - "__sizeof__", - (PyCFunction)_multidict_sizeof, - METH_NOARGS, - sizeof__doc__, - }, - { - NULL, - NULL - } /* sentinel */ -}; - - -PyDoc_STRVAR(MultDict_doc, -"Dictionary with the support for duplicate keys."); - - -static PyTypeObject multidict_type = { - PyVarObject_HEAD_INIT(NULL, 0) - "multidict._multidict.MultiDict", /* tp_name */ - sizeof(MultiDictObject), /* tp_basicsize */ - .tp_dealloc = (destructor)multidict_tp_dealloc, - .tp_repr = (reprfunc)multidict_repr, - .tp_as_sequence = &multidict_sequence, - .tp_as_mapping = &multidict_mapping, - .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC, - .tp_doc = MultDict_doc, - .tp_traverse = (traverseproc)multidict_tp_traverse, - .tp_clear = (inquiry)multidict_tp_clear, - .tp_richcompare = (richcmpfunc)multidict_tp_richcompare, - .tp_weaklistoffset = offsetof(MultiDictObject, weaklist), - .tp_iter = (getiterfunc)multidict_tp_iter, - .tp_methods = multidict_methods, - .tp_init = (initproc)multidict_tp_init, - .tp_alloc = PyType_GenericAlloc, - .tp_new = PyType_GenericNew, - .tp_free = PyObject_GC_Del, -}; - -/******************** CIMultiDict ********************/ - -static inline int -cimultidict_tp_init(MultiDictObject *self, PyObject *args, PyObject *kwds) -{ - if (ci_pair_list_init(&self->pairs) < 0) { - return -1; - } - if (_multidict_extend(self, args, kwds, "CIMultiDict", 1) < 0) { - return -1; - } - return 0; -} - -static inline PyObject * -cimultidict_copy(MultiDictObject *self) -{ - return _multidict_copy(self, &cimultidict_type); -} - -PyDoc_STRVAR(cimultidict_copy_doc, -"Return a copy of itself."); - -static PyMethodDef cimultidict_methods[] = { - { - "copy", - (PyCFunction)cimultidict_copy, - METH_NOARGS, - cimultidict_copy_doc - }, - { - NULL, - NULL - } /* sentinel */ -}; - -PyDoc_STRVAR(CIMultDict_doc, -"Dictionary with the support for duplicate case-insensitive keys."); - - -static PyTypeObject cimultidict_type = { - PyVarObject_HEAD_INIT(NULL, 0) - "multidict._multidict.CIMultiDict", /* tp_name */ - sizeof(MultiDictObject), /* tp_basicsize */ - .tp_dealloc = (destructor)multidict_tp_dealloc, - .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC, - .tp_doc = CIMultDict_doc, - .tp_traverse = (traverseproc)multidict_tp_traverse, - .tp_clear = (inquiry)multidict_tp_clear, - .tp_weaklistoffset = offsetof(MultiDictObject, weaklist), - .tp_methods = cimultidict_methods, - .tp_base = &multidict_type, - .tp_init = (initproc)cimultidict_tp_init, - .tp_alloc = PyType_GenericAlloc, - .tp_new = PyType_GenericNew, - .tp_free = PyObject_GC_Del, -}; - -/******************** MultiDictProxy ********************/ - -static inline int -multidict_proxy_tp_init(MultiDictProxyObject *self, PyObject *args, - PyObject *kwds) -{ - PyObject *arg = NULL; - MultiDictObject *md = NULL; - - if (!PyArg_UnpackTuple(args, "multidict._multidict.MultiDictProxy", - 0, 1, &arg)) - { - return -1; - } - if (arg == NULL) { - PyErr_Format( - PyExc_TypeError, - "__init__() missing 1 required positional argument: 'arg'" - ); - return -1; - } - if (!MultiDictProxy_CheckExact(arg) && - !CIMultiDict_CheckExact(arg) && - !MultiDict_CheckExact(arg)) - { - PyErr_Format( - PyExc_TypeError, - "ctor requires MultiDict or MultiDictProxy instance, " - "not ", - Py_TYPE(arg)->tp_name - ); - return -1; - } - - md = (MultiDictObject*)arg; - if (MultiDictProxy_CheckExact(arg)) { - md = ((MultiDictProxyObject*)arg)->md; - } - Py_INCREF(md); - self->md = md; - - return 0; -} - -static inline PyObject * -multidict_proxy_getall(MultiDictProxyObject *self, PyObject *args, - PyObject *kwds) -{ - return multidict_getall(self->md, args, kwds); -} - -static inline PyObject * -multidict_proxy_getone(MultiDictProxyObject *self, PyObject *args, - PyObject *kwds) -{ - return multidict_getone(self->md, args, kwds); -} - -static inline PyObject * -multidict_proxy_get(MultiDictProxyObject *self, PyObject *args, - PyObject *kwds) -{ - return multidict_get(self->md, args, kwds); -} - -static inline PyObject * -multidict_proxy_keys(MultiDictProxyObject *self) -{ - return multidict_keys(self->md); -} - -static inline PyObject * -multidict_proxy_items(MultiDictProxyObject *self) -{ - return multidict_items(self->md); -} - -static inline PyObject * -multidict_proxy_values(MultiDictProxyObject *self) -{ - return multidict_values(self->md); -} - -static inline PyObject * -multidict_proxy_copy(MultiDictProxyObject *self) -{ - return _multidict_proxy_copy(self, &multidict_type); -} - -static inline PyObject * -multidict_proxy_reduce(MultiDictProxyObject *self) -{ - PyErr_Format( - PyExc_TypeError, - "can't pickle %s objects", Py_TYPE(self)->tp_name - ); - - return NULL; -} - -static inline Py_ssize_t -multidict_proxy_mp_len(MultiDictProxyObject *self) -{ - return multidict_mp_len(self->md); -} - -static inline PyObject * -multidict_proxy_mp_subscript(MultiDictProxyObject *self, PyObject *key) -{ - return multidict_mp_subscript(self->md, key); -} - -static inline int -multidict_proxy_sq_contains(MultiDictProxyObject *self, PyObject *key) -{ - return multidict_sq_contains(self->md, key); -} - -static inline PyObject * -multidict_proxy_tp_iter(MultiDictProxyObject *self) -{ - return multidict_tp_iter(self->md); -} - -static inline PyObject * -multidict_proxy_tp_richcompare(MultiDictProxyObject *self, PyObject *other, - int op) -{ - return multidict_tp_richcompare((PyObject*)self->md, other, op); -} - -static inline void -multidict_proxy_tp_dealloc(MultiDictProxyObject *self) -{ - PyObject_GC_UnTrack(self); - if (self->weaklist != NULL) { - PyObject_ClearWeakRefs((PyObject *)self); - }; - Py_XDECREF(self->md); - Py_TYPE(self)->tp_free((PyObject *)self); -} - -static inline int -multidict_proxy_tp_traverse(MultiDictProxyObject *self, visitproc visit, - void *arg) -{ - Py_VISIT(self->md); - return 0; -} - -static inline int -multidict_proxy_tp_clear(MultiDictProxyObject *self) -{ - Py_CLEAR(self->md); - return 0; -} - -static PySequenceMethods multidict_proxy_sequence = { - .sq_contains = (objobjproc)multidict_proxy_sq_contains, -}; - -static PyMappingMethods multidict_proxy_mapping = { - .mp_length = (lenfunc)multidict_proxy_mp_len, - .mp_subscript = (binaryfunc)multidict_proxy_mp_subscript, -}; - -static PyMethodDef multidict_proxy_methods[] = { - { - "getall", - (PyCFunction)multidict_proxy_getall, - METH_VARARGS | METH_KEYWORDS, - multidict_getall_doc - }, - { - "getone", - (PyCFunction)multidict_proxy_getone, - METH_VARARGS | METH_KEYWORDS, - multidict_getone_doc - }, - { - "get", - (PyCFunction)multidict_proxy_get, - METH_VARARGS | METH_KEYWORDS, - multidict_get_doc - }, - { - "keys", - (PyCFunction)multidict_proxy_keys, - METH_NOARGS, - multidict_keys_doc - }, - { - "items", - (PyCFunction)multidict_proxy_items, - METH_NOARGS, - multidict_items_doc - }, - { - "values", - (PyCFunction)multidict_proxy_values, - METH_NOARGS, - multidict_values_doc - }, - { - "copy", - (PyCFunction)multidict_proxy_copy, - METH_NOARGS, - multidict_copy_doc - }, - { - "__reduce__", - (PyCFunction)multidict_proxy_reduce, - METH_NOARGS, - NULL - }, - { - "__class_getitem__", - (PyCFunction)multidict_class_getitem, - METH_O | METH_CLASS, - NULL - }, - { - NULL, - NULL - } /* sentinel */ -}; - - -PyDoc_STRVAR(MultDictProxy_doc, -"Read-only proxy for MultiDict instance."); - - -static PyTypeObject multidict_proxy_type = { - PyVarObject_HEAD_INIT(NULL, 0) - "multidict._multidict.MultiDictProxy", /* tp_name */ - sizeof(MultiDictProxyObject), /* tp_basicsize */ - .tp_dealloc = (destructor)multidict_proxy_tp_dealloc, - .tp_repr = (reprfunc)multidict_repr, - .tp_as_sequence = &multidict_proxy_sequence, - .tp_as_mapping = &multidict_proxy_mapping, - .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC, - .tp_doc = MultDictProxy_doc, - .tp_traverse = (traverseproc)multidict_proxy_tp_traverse, - .tp_clear = (inquiry)multidict_proxy_tp_clear, - .tp_richcompare = (richcmpfunc)multidict_proxy_tp_richcompare, - .tp_weaklistoffset = offsetof(MultiDictProxyObject, weaklist), - .tp_iter = (getiterfunc)multidict_proxy_tp_iter, - .tp_methods = multidict_proxy_methods, - .tp_init = (initproc)multidict_proxy_tp_init, - .tp_alloc = PyType_GenericAlloc, - .tp_new = PyType_GenericNew, - .tp_free = PyObject_GC_Del, -}; - -/******************** CIMultiDictProxy ********************/ - -static inline int -cimultidict_proxy_tp_init(MultiDictProxyObject *self, PyObject *args, - PyObject *kwds) -{ - PyObject *arg = NULL; - MultiDictObject *md = NULL; - - if (!PyArg_UnpackTuple(args, "multidict._multidict.CIMultiDictProxy", - 1, 1, &arg)) - { - return -1; - } - if (arg == NULL) { - PyErr_Format( - PyExc_TypeError, - "__init__() missing 1 required positional argument: 'arg'" - ); - return -1; - } - if (!CIMultiDictProxy_CheckExact(arg) && !CIMultiDict_CheckExact(arg)) { - PyErr_Format( - PyExc_TypeError, - "ctor requires CIMultiDict or CIMultiDictProxy instance, " - "not ", - Py_TYPE(arg)->tp_name - ); - return -1; - } - - md = (MultiDictObject*)arg; - if (CIMultiDictProxy_CheckExact(arg)) { - md = ((MultiDictProxyObject*)arg)->md; - } - Py_INCREF(md); - self->md = md; - - return 0; -} - -static inline PyObject * -cimultidict_proxy_copy(MultiDictProxyObject *self) -{ - return _multidict_proxy_copy(self, &cimultidict_type); -} - - -PyDoc_STRVAR(CIMultDictProxy_doc, -"Read-only proxy for CIMultiDict instance."); - -PyDoc_STRVAR(cimultidict_proxy_copy_doc, -"Return copy of itself"); - -static PyMethodDef cimultidict_proxy_methods[] = { - { - "copy", - (PyCFunction)cimultidict_proxy_copy, - METH_NOARGS, - cimultidict_proxy_copy_doc - }, - { - NULL, - NULL - } /* sentinel */ -}; - -static PyTypeObject cimultidict_proxy_type = { - PyVarObject_HEAD_INIT(NULL, 0) - "multidict._multidict.CIMultiDictProxy", /* tp_name */ - sizeof(MultiDictProxyObject), /* tp_basicsize */ - .tp_dealloc = (destructor)multidict_proxy_tp_dealloc, - .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC, - .tp_doc = CIMultDictProxy_doc, - .tp_traverse = (traverseproc)multidict_proxy_tp_traverse, - .tp_clear = (inquiry)multidict_proxy_tp_clear, - .tp_richcompare = (richcmpfunc)multidict_proxy_tp_richcompare, - .tp_weaklistoffset = offsetof(MultiDictProxyObject, weaklist), - .tp_methods = cimultidict_proxy_methods, - .tp_base = &multidict_proxy_type, - .tp_init = (initproc)cimultidict_proxy_tp_init, - .tp_alloc = PyType_GenericAlloc, - .tp_new = PyType_GenericNew, - .tp_free = PyObject_GC_Del, -}; - -/******************** Other functions ********************/ - -static inline PyObject * -getversion(PyObject *self, PyObject *md) -{ - pair_list_t *pairs = NULL; - if (MultiDict_CheckExact(md) || CIMultiDict_CheckExact(md)) { - pairs = &((MultiDictObject*)md)->pairs; - } else if (MultiDictProxy_CheckExact(md) || CIMultiDictProxy_CheckExact(md)) { - pairs = &((MultiDictProxyObject*)md)->md->pairs; - } else { - PyErr_Format(PyExc_TypeError, "unexpected type"); - return NULL; - } - return PyLong_FromUnsignedLong(pair_list_version(pairs)); -} - -/******************** Module ********************/ - -static inline void -module_free(void *m) -{ - Py_CLEAR(collections_abc_mapping); - Py_CLEAR(collections_abc_mut_mapping); - Py_CLEAR(collections_abc_mut_multi_mapping); -} - -static PyMethodDef multidict_module_methods[] = { - { - "getversion", - (PyCFunction)getversion, - METH_O - }, - { - NULL, - NULL - } /* sentinel */ -}; - -static PyModuleDef multidict_module = { - PyModuleDef_HEAD_INIT, /* m_base */ - "_multidict", /* m_name */ - .m_size = -1, - .m_methods = multidict_module_methods, - .m_free = (freefunc)module_free, -}; - -PyMODINIT_FUNC -PyInit__multidict() -{ - PyObject *module = NULL, - *reg_func_call_result = NULL; - -#define WITH_MOD(NAME) \ - Py_CLEAR(module); \ - module = PyImport_ImportModule(NAME); \ - if (module == NULL) { \ - goto fail; \ - } - -#define GET_MOD_ATTR(VAR, NAME) \ - VAR = PyObject_GetAttrString(module, NAME); \ - if (VAR == NULL) { \ - goto fail; \ - } - - if (multidict_views_init() < 0) { - goto fail; - } - - if (multidict_iter_init() < 0) { - goto fail; - } - - if (istr_init() < 0) { - goto fail; - } - - if (PyType_Ready(&multidict_type) < 0 || - PyType_Ready(&cimultidict_type) < 0 || - PyType_Ready(&multidict_proxy_type) < 0 || - PyType_Ready(&cimultidict_proxy_type) < 0) - { - goto fail; - } - - WITH_MOD("collections.abc"); - GET_MOD_ATTR(collections_abc_mapping, "Mapping"); - - WITH_MOD("multidict._abc"); - GET_MOD_ATTR(collections_abc_mut_mapping, "MultiMapping"); - - WITH_MOD("multidict._abc"); - GET_MOD_ATTR(collections_abc_mut_multi_mapping, "MutableMultiMapping"); - - WITH_MOD("multidict._multidict_base"); - GET_MOD_ATTR(repr_func, "_mdrepr"); - - /* Register in _abc mappings (CI)MultiDict and (CI)MultiDictProxy */ - reg_func_call_result = PyObject_CallMethod( - collections_abc_mut_mapping, - "register", "O", - (PyObject*)&multidict_proxy_type - ); - if (reg_func_call_result == NULL) { - goto fail; - } - Py_DECREF(reg_func_call_result); - - reg_func_call_result = PyObject_CallMethod( - collections_abc_mut_mapping, - "register", "O", - (PyObject*)&cimultidict_proxy_type - ); - if (reg_func_call_result == NULL) { - goto fail; - } - Py_DECREF(reg_func_call_result); - - reg_func_call_result = PyObject_CallMethod( - collections_abc_mut_multi_mapping, - "register", "O", - (PyObject*)&multidict_type - ); - if (reg_func_call_result == NULL) { - goto fail; - } - Py_DECREF(reg_func_call_result); - - reg_func_call_result = PyObject_CallMethod( - collections_abc_mut_multi_mapping, - "register", "O", - (PyObject*)&cimultidict_type - ); - if (reg_func_call_result == NULL) { - goto fail; - } - Py_DECREF(reg_func_call_result); - - /* Instantiate this module */ - module = PyModule_Create(&multidict_module); - - Py_INCREF(&istr_type); - if (PyModule_AddObject( - module, "istr", (PyObject*)&istr_type) < 0) - { - goto fail; - } - - Py_INCREF(&multidict_type); - if (PyModule_AddObject( - module, "MultiDict", (PyObject*)&multidict_type) < 0) - { - goto fail; - } - - Py_INCREF(&cimultidict_type); - if (PyModule_AddObject( - module, "CIMultiDict", (PyObject*)&cimultidict_type) < 0) - { - goto fail; - } - - Py_INCREF(&multidict_proxy_type); - if (PyModule_AddObject( - module, "MultiDictProxy", (PyObject*)&multidict_proxy_type) < 0) - { - goto fail; - } - - Py_INCREF(&cimultidict_proxy_type); - if (PyModule_AddObject( - module, "CIMultiDictProxy", (PyObject*)&cimultidict_proxy_type) < 0) - { - goto fail; - } - - return module; - -fail: - Py_XDECREF(collections_abc_mapping); - Py_XDECREF(collections_abc_mut_mapping); - Py_XDECREF(collections_abc_mut_multi_mapping); - - return NULL; - -#undef WITH_MOD -#undef GET_MOD_ATTR -} diff --git a/third_party/python/multidict/multidict/_multidict_base.py b/third_party/python/multidict/multidict/_multidict_base.py deleted file mode 100644 index 394466548cb2..000000000000 --- a/third_party/python/multidict/multidict/_multidict_base.py +++ /dev/null @@ -1,144 +0,0 @@ -from collections.abc import ItemsView, Iterable, KeysView, Set, ValuesView - - -def _abc_itemsview_register(view_cls): - ItemsView.register(view_cls) - - -def _abc_keysview_register(view_cls): - KeysView.register(view_cls) - - -def _abc_valuesview_register(view_cls): - ValuesView.register(view_cls) - - -def _viewbaseset_richcmp(view, other, op): - if op == 0: # < - if not isinstance(other, Set): - return NotImplemented - return len(view) < len(other) and view <= other - elif op == 1: # <= - if not isinstance(other, Set): - return NotImplemented - if len(view) > len(other): - return False - for elem in view: - if elem not in other: - return False - return True - elif op == 2: # == - if not isinstance(other, Set): - return NotImplemented - return len(view) == len(other) and view <= other - elif op == 3: # != - return not view == other - elif op == 4: # > - if not isinstance(other, Set): - return NotImplemented - return len(view) > len(other) and view >= other - elif op == 5: # >= - if not isinstance(other, Set): - return NotImplemented - if len(view) < len(other): - return False - for elem in other: - if elem not in view: - return False - return True - - -def _viewbaseset_and(view, other): - if not isinstance(other, Iterable): - return NotImplemented - if isinstance(view, Set): - view = set(iter(view)) - if isinstance(other, Set): - other = set(iter(other)) - if not isinstance(other, Set): - other = set(iter(other)) - return view & other - - -def _viewbaseset_or(view, other): - if not isinstance(other, Iterable): - return NotImplemented - if isinstance(view, Set): - view = set(iter(view)) - if isinstance(other, Set): - other = set(iter(other)) - if not isinstance(other, Set): - other = set(iter(other)) - return view | other - - -def _viewbaseset_sub(view, other): - if not isinstance(other, Iterable): - return NotImplemented - if isinstance(view, Set): - view = set(iter(view)) - if isinstance(other, Set): - other = set(iter(other)) - if not isinstance(other, Set): - other = set(iter(other)) - return view - other - - -def _viewbaseset_xor(view, other): - if not isinstance(other, Iterable): - return NotImplemented - if isinstance(view, Set): - view = set(iter(view)) - if isinstance(other, Set): - other = set(iter(other)) - if not isinstance(other, Set): - other = set(iter(other)) - return view ^ other - - -def _itemsview_isdisjoint(view, other): - "Return True if two sets have a null intersection." - for v in other: - if v in view: - return False - return True - - -def _itemsview_repr(view): - lst = [] - for k, v in view: - lst.append("{!r}: {!r}".format(k, v)) - body = ", ".join(lst) - return "{}({})".format(view.__class__.__name__, body) - - -def _keysview_isdisjoint(view, other): - "Return True if two sets have a null intersection." - for k in other: - if k in view: - return False - return True - - -def _keysview_repr(view): - lst = [] - for k in view: - lst.append("{!r}".format(k)) - body = ", ".join(lst) - return "{}({})".format(view.__class__.__name__, body) - - -def _valuesview_repr(view): - lst = [] - for v in view: - lst.append("{!r}".format(v)) - body = ", ".join(lst) - return "{}({})".format(view.__class__.__name__, body) - - -def _mdrepr(md): - lst = [] - for k, v in md.items(): - lst.append("'{}': {!r}".format(k, v)) - body = ", ".join(lst) - return "<{}({})>".format(md.__class__.__name__, body) diff --git a/third_party/python/multidict/multidict/_multidict_py.py b/third_party/python/multidict/multidict/_multidict_py.py deleted file mode 100644 index 1ec63da0d558..000000000000 --- a/third_party/python/multidict/multidict/_multidict_py.py +++ /dev/null @@ -1,515 +0,0 @@ -import sys -from array import array -from collections import abc - -from ._abc import MultiMapping, MutableMultiMapping - -_marker = object() - - -class istr(str): - - """Case insensitive str.""" - - __is_istr__ = True - - -upstr = istr # for relaxing backward compatibility problems - - -def getversion(md): - if not isinstance(md, _Base): - raise TypeError("Parameter should be multidict or proxy") - return md._impl._version - - -_version = array("Q", [0]) - - -class _Impl: - __slots__ = ("_items", "_version") - - def __init__(self): - self._items = [] - self.incr_version() - - def incr_version(self): - global _version - v = _version - v[0] += 1 - self._version = v[0] - - if sys.implementation.name != "pypy": - - def __sizeof__(self): - return object.__sizeof__(self) + sys.getsizeof(self._items) - - -class _Base: - def _title(self, key): - return key - - def getall(self, key, default=_marker): - """Return a list of all values matching the key.""" - identity = self._title(key) - res = [v for i, k, v in self._impl._items if i == identity] - if res: - return res - if not res and default is not _marker: - return default - raise KeyError("Key not found: %r" % key) - - def getone(self, key, default=_marker): - """Get first value matching the key.""" - identity = self._title(key) - for i, k, v in self._impl._items: - if i == identity: - return v - if default is not _marker: - return default - raise KeyError("Key not found: %r" % key) - - # Mapping interface # - - def __getitem__(self, key): - return self.getone(key) - - def get(self, key, default=None): - """Get first value matching the key. - - The method is alias for .getone(). - """ - return self.getone(key, default) - - def __iter__(self): - return iter(self.keys()) - - def __len__(self): - return len(self._impl._items) - - def keys(self): - """Return a new view of the dictionary's keys.""" - return _KeysView(self._impl) - - def items(self): - """Return a new view of the dictionary's items *(key, value) pairs).""" - return _ItemsView(self._impl) - - def values(self): - """Return a new view of the dictionary's values.""" - return _ValuesView(self._impl) - - def __eq__(self, other): - if not isinstance(other, abc.Mapping): - return NotImplemented - if isinstance(other, _Base): - lft = self._impl._items - rht = other._impl._items - if len(lft) != len(rht): - return False - for (i1, k2, v1), (i2, k2, v2) in zip(lft, rht): - if i1 != i2 or v1 != v2: - return False - return True - if len(self._impl._items) != len(other): - return False - for k, v in self.items(): - nv = other.get(k, _marker) - if v != nv: - return False - return True - - def __contains__(self, key): - identity = self._title(key) - for i, k, v in self._impl._items: - if i == identity: - return True - return False - - def __repr__(self): - body = ", ".join("'{}': {!r}".format(k, v) for k, v in self.items()) - return "<{}({})>".format(self.__class__.__name__, body) - - -class MultiDictProxy(_Base, MultiMapping): - """Read-only proxy for MultiDict instance.""" - - def __init__(self, arg): - if not isinstance(arg, (MultiDict, MultiDictProxy)): - raise TypeError( - "ctor requires MultiDict or MultiDictProxy instance" - ", not {}".format(type(arg)) - ) - - self._impl = arg._impl - - def __reduce__(self): - raise TypeError("can't pickle {} objects".format(self.__class__.__name__)) - - def copy(self): - """Return a copy of itself.""" - return MultiDict(self.items()) - - -class CIMultiDictProxy(MultiDictProxy): - """Read-only proxy for CIMultiDict instance.""" - - def __init__(self, arg): - if not isinstance(arg, (CIMultiDict, CIMultiDictProxy)): - raise TypeError( - "ctor requires CIMultiDict or CIMultiDictProxy instance" - ", not {}".format(type(arg)) - ) - - self._impl = arg._impl - - def _title(self, key): - return key.title() - - def copy(self): - """Return a copy of itself.""" - return CIMultiDict(self.items()) - - -class MultiDict(_Base, MutableMultiMapping): - """Dictionary with the support for duplicate keys.""" - - def __init__(self, *args, **kwargs): - self._impl = _Impl() - - self._extend(args, kwargs, self.__class__.__name__, self._extend_items) - - if sys.implementation.name != "pypy": - - def __sizeof__(self): - return object.__sizeof__(self) + sys.getsizeof(self._impl) - - def __reduce__(self): - return (self.__class__, (list(self.items()),)) - - def _title(self, key): - return key - - def _key(self, key): - if isinstance(key, str): - return key - else: - raise TypeError( - "MultiDict keys should be either str " "or subclasses of str" - ) - - def add(self, key, value): - identity = self._title(key) - self._impl._items.append((identity, self._key(key), value)) - self._impl.incr_version() - - def copy(self): - """Return a copy of itself.""" - cls = self.__class__ - return cls(self.items()) - - __copy__ = copy - - def extend(self, *args, **kwargs): - """Extend current MultiDict with more values. - - This method must be used instead of update. - """ - self._extend(args, kwargs, "extend", self._extend_items) - - def _extend(self, args, kwargs, name, method): - if len(args) > 1: - raise TypeError( - "{} takes at most 1 positional argument" - " ({} given)".format(name, len(args)) - ) - if args: - arg = args[0] - if isinstance(args[0], (MultiDict, MultiDictProxy)) and not kwargs: - items = arg._impl._items - else: - if hasattr(arg, "items"): - arg = arg.items() - if kwargs: - arg = list(arg) - arg.extend(list(kwargs.items())) - items = [] - for item in arg: - if not len(item) == 2: - raise TypeError( - "{} takes either dict or list of (key, value) " - "tuples".format(name) - ) - items.append((self._title(item[0]), self._key(item[0]), item[1])) - - method(items) - else: - method( - [ - (self._title(key), self._key(key), value) - for key, value in kwargs.items() - ] - ) - - def _extend_items(self, items): - for identity, key, value in items: - self.add(key, value) - - def clear(self): - """Remove all items from MultiDict.""" - self._impl._items.clear() - self._impl.incr_version() - - # Mapping interface # - - def __setitem__(self, key, value): - self._replace(key, value) - - def __delitem__(self, key): - identity = self._title(key) - items = self._impl._items - found = False - for i in range(len(items) - 1, -1, -1): - if items[i][0] == identity: - del items[i] - found = True - if not found: - raise KeyError(key) - else: - self._impl.incr_version() - - def setdefault(self, key, default=None): - """Return value for key, set value to default if key is not present.""" - identity = self._title(key) - for i, k, v in self._impl._items: - if i == identity: - return v - self.add(key, default) - return default - - def popone(self, key, default=_marker): - """Remove specified key and return the corresponding value. - - If key is not found, d is returned if given, otherwise - KeyError is raised. - - """ - identity = self._title(key) - for i in range(len(self._impl._items)): - if self._impl._items[i][0] == identity: - value = self._impl._items[i][2] - del self._impl._items[i] - self._impl.incr_version() - return value - if default is _marker: - raise KeyError(key) - else: - return default - - pop = popone # type: ignore - - def popall(self, key, default=_marker): - """Remove all occurrences of key and return the list of corresponding - values. - - If key is not found, default is returned if given, otherwise - KeyError is raised. - - """ - found = False - identity = self._title(key) - ret = [] - for i in range(len(self._impl._items) - 1, -1, -1): - item = self._impl._items[i] - if item[0] == identity: - ret.append(item[2]) - del self._impl._items[i] - self._impl.incr_version() - found = True - if not found: - if default is _marker: - raise KeyError(key) - else: - return default - else: - ret.reverse() - return ret - - def popitem(self): - """Remove and return an arbitrary (key, value) pair.""" - if self._impl._items: - i = self._impl._items.pop(0) - self._impl.incr_version() - return i[1], i[2] - else: - raise KeyError("empty multidict") - - def update(self, *args, **kwargs): - """Update the dictionary from *other*, overwriting existing keys.""" - self._extend(args, kwargs, "update", self._update_items) - - def _update_items(self, items): - if not items: - return - used_keys = {} - for identity, key, value in items: - start = used_keys.get(identity, 0) - for i in range(start, len(self._impl._items)): - item = self._impl._items[i] - if item[0] == identity: - used_keys[identity] = i + 1 - self._impl._items[i] = (identity, key, value) - break - else: - self._impl._items.append((identity, key, value)) - used_keys[identity] = len(self._impl._items) - - # drop tails - i = 0 - while i < len(self._impl._items): - item = self._impl._items[i] - identity = item[0] - pos = used_keys.get(identity) - if pos is None: - i += 1 - continue - if i >= pos: - del self._impl._items[i] - else: - i += 1 - - self._impl.incr_version() - - def _replace(self, key, value): - key = self._key(key) - identity = self._title(key) - items = self._impl._items - - for i in range(len(items)): - item = items[i] - if item[0] == identity: - items[i] = (identity, key, value) - # i points to last found item - rgt = i - self._impl.incr_version() - break - else: - self._impl._items.append((identity, key, value)) - self._impl.incr_version() - return - - # remove all tail items - i = rgt + 1 - while i < len(items): - item = items[i] - if item[0] == identity: - del items[i] - else: - i += 1 - - -class CIMultiDict(MultiDict): - """Dictionary with the support for duplicate case-insensitive keys.""" - - def _title(self, key): - return key.title() - - -class _Iter: - __slots__ = ("_size", "_iter") - - def __init__(self, size, iterator): - self._size = size - self._iter = iterator - - def __iter__(self): - return self - - def __next__(self): - return next(self._iter) - - def __length_hint__(self): - return self._size - - -class _ViewBase: - def __init__(self, impl): - self._impl = impl - self._version = impl._version - - def __len__(self): - return len(self._impl._items) - - -class _ItemsView(_ViewBase, abc.ItemsView): - def __contains__(self, item): - assert isinstance(item, tuple) or isinstance(item, list) - assert len(item) == 2 - for i, k, v in self._impl._items: - if item[0] == k and item[1] == v: - return True - return False - - def __iter__(self): - return _Iter(len(self), self._iter()) - - def _iter(self): - for i, k, v in self._impl._items: - if self._version != self._impl._version: - raise RuntimeError("Dictionary changed during iteration") - yield k, v - - def __repr__(self): - lst = [] - for item in self._impl._items: - lst.append("{!r}: {!r}".format(item[1], item[2])) - body = ", ".join(lst) - return "{}({})".format(self.__class__.__name__, body) - - -class _ValuesView(_ViewBase, abc.ValuesView): - def __contains__(self, value): - for item in self._impl._items: - if item[2] == value: - return True - return False - - def __iter__(self): - return _Iter(len(self), self._iter()) - - def _iter(self): - for item in self._impl._items: - if self._version != self._impl._version: - raise RuntimeError("Dictionary changed during iteration") - yield item[2] - - def __repr__(self): - lst = [] - for item in self._impl._items: - lst.append("{!r}".format(item[2])) - body = ", ".join(lst) - return "{}({})".format(self.__class__.__name__, body) - - -class _KeysView(_ViewBase, abc.KeysView): - def __contains__(self, key): - for item in self._impl._items: - if item[1] == key: - return True - return False - - def __iter__(self): - return _Iter(len(self), self._iter()) - - def _iter(self): - for item in self._impl._items: - if self._version != self._impl._version: - raise RuntimeError("Dictionary changed during iteration") - yield item[1] - - def __repr__(self): - lst = [] - for item in self._impl._items: - lst.append("{!r}".format(item[1])) - body = ", ".join(lst) - return "{}({})".format(self.__class__.__name__, body) diff --git a/third_party/python/multidict/multidict/_multilib/defs.h b/third_party/python/multidict/multidict/_multilib/defs.h deleted file mode 100644 index c7027c817e68..000000000000 --- a/third_party/python/multidict/multidict/_multilib/defs.h +++ /dev/null @@ -1,22 +0,0 @@ -#ifndef _MULTIDICT_DEFS_H -#define _MULTIDICT_DEFS_H - -#ifdef __cplusplus -extern "C" { -#endif - -_Py_IDENTIFIER(lower); - -/* We link this module statically for convenience. If compiled as a shared - library instead, some compilers don't allow addresses of Python objects - defined in other libraries to be used in static initializers here. The - DEFERRED_ADDRESS macro is used to tag the slots where such addresses - appear; the module init function must fill in the tagged slots at runtime. - The argument is for documentation -- the macro ignores it. -*/ -#define DEFERRED_ADDRESS(ADDR) 0 - -#ifdef __cplusplus -} -#endif -#endif diff --git a/third_party/python/multidict/multidict/_multilib/dict.h b/third_party/python/multidict/multidict/_multilib/dict.h deleted file mode 100644 index 3caf83e5b4c0..000000000000 --- a/third_party/python/multidict/multidict/_multilib/dict.h +++ /dev/null @@ -1,24 +0,0 @@ -#ifndef _MULTIDICT_C_H -#define _MULTIDICT_C_H - -#ifdef __cplusplus -extern "C" { -#endif - -typedef struct { // 16 or 24 for GC prefix - PyObject_HEAD // 16 - PyObject *weaklist; - pair_list_t pairs; -} MultiDictObject; - -typedef struct { - PyObject_HEAD - PyObject *weaklist; - MultiDictObject *md; -} MultiDictProxyObject; - -#ifdef __cplusplus -} -#endif - -#endif diff --git a/third_party/python/multidict/multidict/_multilib/istr.h b/third_party/python/multidict/multidict/_multilib/istr.h deleted file mode 100644 index 2688f48914a9..000000000000 --- a/third_party/python/multidict/multidict/_multilib/istr.h +++ /dev/null @@ -1,85 +0,0 @@ -#ifndef _MULTIDICT_ISTR_H -#define _MULTIDICT_ISTR_H - -#ifdef __cplusplus -extern "C" { -#endif - -typedef struct { - PyUnicodeObject str; - PyObject * canonical; -} istrobject; - -PyDoc_STRVAR(istr__doc__, "istr class implementation"); - -static PyTypeObject istr_type; - -static inline void -istr_dealloc(istrobject *self) -{ - Py_XDECREF(self->canonical); - PyUnicode_Type.tp_dealloc((PyObject*)self); -} - -static inline PyObject * -istr_new(PyTypeObject *type, PyObject *args, PyObject *kwds) -{ - PyObject *x = NULL; - static char *kwlist[] = {"object", "encoding", "errors", 0}; - PyObject *encoding = NULL; - PyObject *errors = NULL; - PyObject *s = NULL; - PyObject * ret = NULL; - - if (!PyArg_ParseTupleAndKeywords(args, kwds, "|OOO:str", - kwlist, &x, &encoding, &errors)) { - return NULL; - } - if (x != NULL && Py_TYPE(x) == &istr_type) { - Py_INCREF(x); - return x; - } - ret = PyUnicode_Type.tp_new(type, args, kwds); - if (!ret) { - goto fail; - } - s =_PyObject_CallMethodId(ret, &PyId_lower, NULL); - if (!s) { - goto fail; - } - ((istrobject*)ret)->canonical = s; - s = NULL; /* the reference is stollen by .canonical */ - return ret; -fail: - Py_XDECREF(ret); - return NULL; -} - -static PyTypeObject istr_type = { - PyVarObject_HEAD_INIT(DEFERRED_ADDRESS(&PyType_Type), 0) - "multidict._multidict.istr", - sizeof(istrobject), - .tp_dealloc = (destructor)istr_dealloc, - .tp_flags = Py_TPFLAGS_DEFAULT - | Py_TPFLAGS_BASETYPE - | Py_TPFLAGS_UNICODE_SUBCLASS, - .tp_doc = istr__doc__, - .tp_base = DEFERRED_ADDRESS(&PyUnicode_Type), - .tp_new = (newfunc)istr_new, -}; - - -static inline int -istr_init(void) -{ - istr_type.tp_base = &PyUnicode_Type; - if (PyType_Ready(&istr_type) < 0) { - return -1; - } - return 0; -} - -#ifdef __cplusplus -} -#endif -#endif diff --git a/third_party/python/multidict/multidict/_multilib/iter.h b/third_party/python/multidict/multidict/_multilib/iter.h deleted file mode 100644 index 4e2e32b3875f..000000000000 --- a/third_party/python/multidict/multidict/_multilib/iter.h +++ /dev/null @@ -1,238 +0,0 @@ -#ifndef _MULTIDICT_ITER_H -#define _MULTIDICT_ITER_H - -#ifdef __cplusplus -extern "C" { -#endif - -static PyTypeObject multidict_items_iter_type; -static PyTypeObject multidict_values_iter_type; -static PyTypeObject multidict_keys_iter_type; - -typedef struct multidict_iter { - PyObject_HEAD - MultiDictObject *md; // MultiDict or CIMultiDict - Py_ssize_t current; - uint64_t version; -} MultidictIter; - -static inline void -_init_iter(MultidictIter *it, MultiDictObject *md) -{ - Py_INCREF(md); - - it->md = md; - it->current = 0; - it->version = pair_list_version(&md->pairs); -} - -static inline PyObject * -multidict_items_iter_new(MultiDictObject *md) -{ - MultidictIter *it = PyObject_GC_New( - MultidictIter, &multidict_items_iter_type); - if (it == NULL) { - return NULL; - } - - _init_iter(it, md); - - PyObject_GC_Track(it); - return (PyObject *)it; -} - -static inline PyObject * -multidict_keys_iter_new(MultiDictObject *md) -{ - MultidictIter *it = PyObject_GC_New( - MultidictIter, &multidict_keys_iter_type); - if (it == NULL) { - return NULL; - } - - _init_iter(it, md); - - PyObject_GC_Track(it); - return (PyObject *)it; -} - -static inline PyObject * -multidict_values_iter_new(MultiDictObject *md) -{ - MultidictIter *it = PyObject_GC_New( - MultidictIter, &multidict_values_iter_type); - if (it == NULL) { - return NULL; - } - - _init_iter(it, md); - - PyObject_GC_Track(it); - return (PyObject *)it; -} - -static inline PyObject * -multidict_items_iter_iternext(MultidictIter *self) -{ - PyObject *key = NULL; - PyObject *value = NULL; - PyObject *ret = NULL; - - if (self->version != pair_list_version(&self->md->pairs)) { - PyErr_SetString(PyExc_RuntimeError, "Dictionary changed during iteration"); - return NULL; - } - - if (!_pair_list_next(&self->md->pairs, &self->current, NULL, &key, &value, NULL)) { - PyErr_SetNone(PyExc_StopIteration); - return NULL; - } - - ret = PyTuple_Pack(2, key, value); - if (ret == NULL) { - return NULL; - } - - return ret; -} - -static inline PyObject * -multidict_values_iter_iternext(MultidictIter *self) -{ - PyObject *value = NULL; - - if (self->version != pair_list_version(&self->md->pairs)) { - PyErr_SetString(PyExc_RuntimeError, "Dictionary changed during iteration"); - return NULL; - } - - if (!pair_list_next(&self->md->pairs, &self->current, NULL, NULL, &value)) { - PyErr_SetNone(PyExc_StopIteration); - return NULL; - } - - Py_INCREF(value); - - return value; -} - -static inline PyObject * -multidict_keys_iter_iternext(MultidictIter *self) -{ - PyObject *key = NULL; - - if (self->version != pair_list_version(&self->md->pairs)) { - PyErr_SetString(PyExc_RuntimeError, "Dictionary changed during iteration"); - return NULL; - } - - if (!pair_list_next(&self->md->pairs, &self->current, NULL, &key, NULL)) { - PyErr_SetNone(PyExc_StopIteration); - return NULL; - } - - Py_INCREF(key); - - return key; -} - -static inline void -multidict_iter_dealloc(MultidictIter *self) -{ - PyObject_GC_UnTrack(self); - Py_XDECREF(self->md); - PyObject_GC_Del(self); -} - -static inline int -multidict_iter_traverse(MultidictIter *self, visitproc visit, void *arg) -{ - Py_VISIT(self->md); - return 0; -} - -static inline int -multidict_iter_clear(MultidictIter *self) -{ - Py_CLEAR(self->md); - return 0; -} - -static inline PyObject * -multidict_iter_len(MultidictIter *self) -{ - return PyLong_FromLong(pair_list_len(&self->md->pairs)); -} - -PyDoc_STRVAR(length_hint_doc, - "Private method returning an estimate of len(list(it))."); - -static PyMethodDef multidict_iter_methods[] = { - { - "__length_hint__", - (PyCFunction)(void(*)(void))multidict_iter_len, - METH_NOARGS, - length_hint_doc - }, - { - NULL, - NULL - } /* sentinel */ -}; - -/***********************************************************************/ - -static PyTypeObject multidict_items_iter_type = { - PyVarObject_HEAD_INIT(DEFERRED_ADDRESS(&PyType_Type), 0) - "multidict._multidict._itemsiter", /* tp_name */ - sizeof(MultidictIter), /* tp_basicsize */ - .tp_dealloc = (destructor)multidict_iter_dealloc, - .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, - .tp_traverse = (traverseproc)multidict_iter_traverse, - .tp_clear = (inquiry)multidict_iter_clear, - .tp_iter = PyObject_SelfIter, - .tp_iternext = (iternextfunc)multidict_items_iter_iternext, - .tp_methods = multidict_iter_methods, -}; - -static PyTypeObject multidict_values_iter_type = { - PyVarObject_HEAD_INIT(DEFERRED_ADDRESS(&PyType_Type), 0) - "multidict._multidict._valuesiter", /* tp_name */ - sizeof(MultidictIter), /* tp_basicsize */ - .tp_dealloc = (destructor)multidict_iter_dealloc, - .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, - .tp_traverse = (traverseproc)multidict_iter_traverse, - .tp_clear = (inquiry)multidict_iter_clear, - .tp_iter = PyObject_SelfIter, - .tp_iternext = (iternextfunc)multidict_values_iter_iternext, - .tp_methods = multidict_iter_methods, -}; - -static PyTypeObject multidict_keys_iter_type = { - PyVarObject_HEAD_INIT(DEFERRED_ADDRESS(&PyType_Type), 0) - "multidict._multidict._keysiter", /* tp_name */ - sizeof(MultidictIter), /* tp_basicsize */ - .tp_dealloc = (destructor)multidict_iter_dealloc, - .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, - .tp_traverse = (traverseproc)multidict_iter_traverse, - .tp_clear = (inquiry)multidict_iter_clear, - .tp_iter = PyObject_SelfIter, - .tp_iternext = (iternextfunc)multidict_keys_iter_iternext, - .tp_methods = multidict_iter_methods, -}; - -static inline int -multidict_iter_init() -{ - if (PyType_Ready(&multidict_items_iter_type) < 0 || - PyType_Ready(&multidict_values_iter_type) < 0 || - PyType_Ready(&multidict_keys_iter_type) < 0) { - return -1; - } - return 0; -} - -#ifdef __cplusplus -} -#endif -#endif diff --git a/third_party/python/multidict/multidict/_multilib/pair_list.h b/third_party/python/multidict/multidict/_multilib/pair_list.h deleted file mode 100644 index 7eafd215b538..000000000000 --- a/third_party/python/multidict/multidict/_multilib/pair_list.h +++ /dev/null @@ -1,1244 +0,0 @@ -#ifndef _MULTIDICT_PAIR_LIST_H -#define _MULTIDICT_PAIR_LIST_H - -#ifdef __cplusplus -extern "C" { -#endif - -#include -#include -#include - -typedef PyObject * (*calc_identity_func)(PyObject *key); - -typedef struct pair { - PyObject *identity; // 8 - PyObject *key; // 8 - PyObject *value; // 8 - Py_hash_t hash; // 8 -} pair_t; - -/* Note about the structure size -With 29 pairs the MultiDict object size is slightly less than 1KiB -(1000-1008 bytes depending on Python version, -plus extra 12 bytes for memory allocator internal structures). -As the result the max reserved size is 1020 bytes at most. - -To fit into 512 bytes, the structure can contain only 13 pairs -which is too small, e.g. https://www.python.org returns 16 headers -(9 of them are caching proxy information though). - -The embedded buffer intention is to fit the vast majority of possible -HTTP headers into the buffer without allocating an extra memory block. -*/ - -#if (PY_VERSION_HEX < 0x03080000) -#define EMBEDDED_CAPACITY 28 -#else -#define EMBEDDED_CAPACITY 29 -#endif - -typedef struct pair_list { // 40 - Py_ssize_t capacity; // 8 - Py_ssize_t size; // 8 - uint64_t version; // 8 - calc_identity_func calc_identity; // 8 - pair_t *pairs; // 8 - pair_t buffer[EMBEDDED_CAPACITY]; -} pair_list_t; - -#define MIN_CAPACITY 63 -#define CAPACITY_STEP 64 - -/* Global counter used to set ma_version_tag field of dictionary. - * It is incremented each time that a dictionary is created and each - * time that a dictionary is modified. */ -static uint64_t pair_list_global_version = 0; - -#define NEXT_VERSION() (++pair_list_global_version) - - -static inline int -str_cmp(PyObject *s1, PyObject *s2) -{ - PyObject *ret = PyUnicode_RichCompare(s1, s2, Py_EQ); - if (ret == Py_True) { - Py_DECREF(ret); - return 1; - } - else if (ret == NULL) { - return -1; - } - else { - Py_DECREF(ret); - return 0; - } -} - - -static inline PyObject * -key_to_str(PyObject *key) -{ - PyObject *ret; - PyTypeObject *type = Py_TYPE(key); - if (type == &istr_type) { - ret = ((istrobject*)key)->canonical; - Py_INCREF(ret); - return ret; - } - if (PyUnicode_CheckExact(key)) { - Py_INCREF(key); - return key; - } - if (PyUnicode_Check(key)) { - return PyObject_Str(key); - } - PyErr_SetString(PyExc_TypeError, - "MultiDict keys should be either str " - "or subclasses of str"); - return NULL; -} - - -static inline PyObject * -ci_key_to_str(PyObject *key) -{ - PyObject *ret; - PyTypeObject *type = Py_TYPE(key); - if (type == &istr_type) { - ret = ((istrobject*)key)->canonical; - Py_INCREF(ret); - return ret; - } - if (PyUnicode_Check(key)) { - return _PyObject_CallMethodId(key, &PyId_lower, NULL); - } - PyErr_SetString(PyExc_TypeError, - "CIMultiDict keys should be either str " - "or subclasses of str"); - return NULL; -} - -static inline pair_t * -pair_list_get(pair_list_t *list, Py_ssize_t i) -{ - pair_t *item = list->pairs + i; - return item; -} - - -static inline int -pair_list_grow(pair_list_t *list) -{ - // Grow by one element if needed - Py_ssize_t new_capacity; - pair_t *new_pairs; - - if (list->size < list->capacity) { - return 0; - } - - if (list->pairs == list->buffer) { - new_pairs = PyMem_New(pair_t, MIN_CAPACITY); - memcpy(new_pairs, list->buffer, (size_t)list->capacity * sizeof(pair_t)); - - list->pairs = new_pairs; - list->capacity = MIN_CAPACITY; - return 0; - } else { - new_capacity = list->capacity + CAPACITY_STEP; - new_pairs = PyMem_Resize(list->pairs, pair_t, (size_t)new_capacity); - - if (NULL == new_pairs) { - // Resizing error - return -1; - } - - list->pairs = new_pairs; - list->capacity = new_capacity; - return 0; - } -} - - -static inline int -pair_list_shrink(pair_list_t *list) -{ - // Shrink by one element if needed. - // Optimization is applied to prevent jitter - // (grow-shrink-grow-shrink on adding-removing the single element - // when the buffer is full). - // To prevent this, the buffer is resized if the size is less than the capacity - // by 2*CAPACITY_STEP factor. - // The switch back to embedded buffer is never performed for both reasons: - // the code simplicity and the jitter prevention. - - pair_t *new_pairs; - Py_ssize_t new_capacity; - - if (list->capacity - list->size < 2 * CAPACITY_STEP) { - return 0; - } - new_capacity = list->capacity - CAPACITY_STEP; - if (new_capacity < MIN_CAPACITY) { - return 0; - } - - new_pairs = PyMem_Resize(list->pairs, pair_t, (size_t)new_capacity); - - if (NULL == new_pairs) { - // Resizing error - return -1; - } - - list->pairs = new_pairs; - list->capacity = new_capacity; - - return 0; -} - - -static inline int -_pair_list_init(pair_list_t *list, calc_identity_func calc_identity) -{ - list->pairs = list->buffer; - list->capacity = EMBEDDED_CAPACITY; - list->size = 0; - list->version = NEXT_VERSION(); - list->calc_identity = calc_identity; - return 0; -} - -static inline int -pair_list_init(pair_list_t *list) -{ - return _pair_list_init(list, key_to_str); -} - - -static inline int -ci_pair_list_init(pair_list_t *list) -{ - return _pair_list_init(list, ci_key_to_str); -} - - -static inline void -pair_list_dealloc(pair_list_t *list) -{ - pair_t *pair; - Py_ssize_t pos; - - for (pos = 0; pos < list->size; pos++) { - pair = pair_list_get(list, pos); - - Py_XDECREF(pair->identity); - Py_XDECREF(pair->key); - Py_XDECREF(pair->value); - } - - /* - Strictly speaking, resetting size and capacity and - assigning pairs to buffer is not necessary. - Do it to consistency and idemotency. - The cleanup doesn't hurt performance. - !!! - !!! The buffer deletion is crucial though. - !!! - */ - list->size = 0; - if (list->pairs != list->buffer) { - PyMem_Del(list->pairs); - list->pairs = list->buffer; - list->capacity = EMBEDDED_CAPACITY; - } -} - - -static inline Py_ssize_t -pair_list_len(pair_list_t *list) -{ - return list->size; -} - - -static inline int -_pair_list_add_with_hash(pair_list_t *list, - PyObject *identity, - PyObject *key, - PyObject *value, - Py_hash_t hash) -{ - pair_t *pair; - - if (pair_list_grow(list) < 0) { - return -1; - } - - pair = pair_list_get(list, list->size); - - Py_INCREF(identity); - pair->identity = identity; - - Py_INCREF(key); - pair->key = key; - - Py_INCREF(value); - pair->value = value; - - pair->hash = hash; - - list->version = NEXT_VERSION(); - list->size += 1; - - return 0; -} - - -static inline int -pair_list_add(pair_list_t *list, - PyObject *key, - PyObject *value) -{ - Py_hash_t hash; - PyObject *identity = NULL; - int ret; - - identity = list->calc_identity(key); - if (identity == NULL) { - goto fail; - } - hash = PyObject_Hash(identity); - if (hash == -1) { - goto fail; - } - ret = _pair_list_add_with_hash(list, identity, key, value, hash); - Py_DECREF(identity); - return ret; -fail: - Py_XDECREF(identity); - return -1; -} - - -static inline int -pair_list_del_at(pair_list_t *list, Py_ssize_t pos) -{ - // return 1 on success, -1 on failure - Py_ssize_t tail; - pair_t *pair; - - pair = pair_list_get(list, pos); - Py_DECREF(pair->identity); - Py_DECREF(pair->key); - Py_DECREF(pair->value); - - list->size -= 1; - list->version = NEXT_VERSION(); - - if (list->size == pos) { - // remove from tail, no need to shift body - return 0; - } - - tail = list->size - pos; - // TODO: raise an error if tail < 0 - memmove((void *)pair_list_get(list, pos), - (void *)pair_list_get(list, pos + 1), - sizeof(pair_t) * (size_t)tail); - - return pair_list_shrink(list); -} - - -static inline int -_pair_list_drop_tail(pair_list_t *list, PyObject *identity, Py_hash_t hash, - Py_ssize_t pos) -{ - // return 1 if deleted, 0 if not found - pair_t *pair; - int ret; - int found = 0; - - if (pos >= list->size) { - return 0; - } - - for (; pos < list->size; pos++) { - pair = pair_list_get(list, pos); - if (pair->hash != hash) { - continue; - } - ret = str_cmp(pair->identity, identity); - if (ret > 0) { - if (pair_list_del_at(list, pos) < 0) { - return -1; - } - found = 1; - pos--; - } - else if (ret == -1) { - return -1; - } - } - - return found; -} - -static inline int -_pair_list_del_hash(pair_list_t *list, PyObject *identity, - PyObject *key, Py_hash_t hash) -{ - int ret = _pair_list_drop_tail(list, identity, hash, 0); - - if (ret < 0) { - return -1; - } - else if (ret == 0) { - PyErr_SetObject(PyExc_KeyError, key); - return -1; - } - else { - list->version = NEXT_VERSION(); - return 0; - } -} - - -static inline int -pair_list_del(pair_list_t *list, PyObject *key) -{ - PyObject *identity = NULL; - Py_hash_t hash; - int ret; - - identity = list->calc_identity(key); - if (identity == NULL) { - goto fail; - } - - hash = PyObject_Hash(identity); - if (hash == -1) { - goto fail; - } - - ret = _pair_list_del_hash(list, identity, key, hash); - Py_DECREF(identity); - return ret; -fail: - Py_XDECREF(identity); - return -1; -} - - -static inline uint64_t -pair_list_version(pair_list_t *list) -{ - return list->version; -} - - -static inline int -_pair_list_next(pair_list_t *list, Py_ssize_t *ppos, PyObject **pidentity, - PyObject **pkey, PyObject **pvalue, Py_hash_t *phash) -{ - pair_t *pair; - - if (*ppos >= list->size) { - return 0; - } - - pair = pair_list_get(list, *ppos); - - if (pidentity) { - *pidentity = pair->identity; - } - if (pkey) { - *pkey = pair->key; - } - if (pvalue) { - *pvalue = pair->value; - } - if (phash) { - *phash = pair->hash; - } - - *ppos += 1; - return 1; -} - - -static inline int -pair_list_next(pair_list_t *list, Py_ssize_t *ppos, PyObject **pidentity, - PyObject **pkey, PyObject **pvalue) -{ - Py_hash_t hash; - return _pair_list_next(list, ppos, pidentity, pkey, pvalue, &hash); -} - - -static inline int -pair_list_contains(pair_list_t *list, PyObject *key) -{ - Py_hash_t hash1, hash2; - Py_ssize_t pos = 0; - PyObject *ident = NULL; - PyObject *identity = NULL; - int tmp; - - ident = list->calc_identity(key); - if (ident == NULL) { - goto fail; - } - - hash1 = PyObject_Hash(ident); - if (hash1 == -1) { - goto fail; - } - - while (_pair_list_next(list, &pos, &identity, NULL, NULL, &hash2)) { - if (hash1 != hash2) { - continue; - } - tmp = str_cmp(ident, identity); - if (tmp > 0) { - Py_DECREF(ident); - return 1; - } - else if (tmp < 0) { - goto fail; - } - } - - Py_DECREF(ident); - return 0; -fail: - Py_XDECREF(ident); - return -1; -} - - -static inline PyObject * -pair_list_get_one(pair_list_t *list, PyObject *key) -{ - Py_hash_t hash1, hash2; - Py_ssize_t pos = 0; - PyObject *ident = NULL; - PyObject *identity = NULL; - PyObject *value = NULL; - int tmp; - - ident = list->calc_identity(key); - if (ident == NULL) { - goto fail; - } - - hash1 = PyObject_Hash(ident); - if (hash1 == -1) { - goto fail; - } - - while (_pair_list_next(list, &pos, &identity, NULL, &value, &hash2)) { - if (hash1 != hash2) { - continue; - } - tmp = str_cmp(ident, identity); - if (tmp > 0) { - Py_INCREF(value); - Py_DECREF(ident); - return value; - } - else if (tmp < 0) { - goto fail; - } - } - - Py_DECREF(ident); - PyErr_SetObject(PyExc_KeyError, key); - return NULL; -fail: - Py_XDECREF(ident); - return NULL; -} - - -static inline PyObject * -pair_list_get_all(pair_list_t *list, PyObject *key) -{ - Py_hash_t hash1, hash2; - Py_ssize_t pos = 0; - PyObject *ident = NULL; - PyObject *identity = NULL; - PyObject *value = NULL; - PyObject *res = NULL; - int tmp; - - ident = list->calc_identity(key); - if (ident == NULL) { - goto fail; - } - - hash1 = PyObject_Hash(ident); - if (hash1 == -1) { - goto fail; - } - - while (_pair_list_next(list, &pos, &identity, NULL, &value, &hash2)) { - if (hash1 != hash2) { - continue; - } - tmp = str_cmp(ident, identity); - if (tmp > 0) { - if (res == NULL) { - res = PyList_New(1); - if (res == NULL) { - goto fail; - } - if (PyList_SetItem(res, 0, value) < 0) { - goto fail; - } - Py_INCREF(value); - } - else if (PyList_Append(res, value) < 0) { - goto fail; - } - } - else if (tmp < 0) { - goto fail; - } - } - - if (res == NULL) { - PyErr_SetObject(PyExc_KeyError, key); - } - Py_DECREF(ident); - return res; - -fail: - Py_XDECREF(ident); - Py_XDECREF(res); - return NULL; -} - - -static inline PyObject * -pair_list_set_default(pair_list_t *list, PyObject *key, PyObject *value) -{ - Py_hash_t hash1, hash2; - Py_ssize_t pos = 0; - PyObject *ident = NULL; - PyObject *identity = NULL; - PyObject *value2 = NULL; - int tmp; - - ident = list->calc_identity(key); - if (ident == NULL) { - goto fail; - } - - hash1 = PyObject_Hash(ident); - if (hash1 == -1) { - goto fail; - } - - while (_pair_list_next(list, &pos, &identity, NULL, &value2, &hash2)) { - if (hash1 != hash2) { - continue; - } - tmp = str_cmp(ident, identity); - if (tmp > 0) { - Py_INCREF(value2); - Py_DECREF(ident); - return value2; - } - else if (tmp < 0) { - goto fail; - } - } - - if (_pair_list_add_with_hash(list, ident, key, value, hash1) < 0) { - goto fail; - } - - Py_INCREF(value); - Py_DECREF(ident); - return value; -fail: - Py_XDECREF(ident); - return NULL; -} - - -static inline PyObject * -pair_list_pop_one(pair_list_t *list, PyObject *key) -{ - pair_t *pair; - - Py_hash_t hash; - Py_ssize_t pos; - PyObject *value = NULL; - int tmp; - PyObject *ident = NULL; - - ident = list->calc_identity(key); - if (ident == NULL) { - goto fail; - } - - hash = PyObject_Hash(ident); - if (hash == -1) { - goto fail; - } - - for (pos=0; pos < list->size; pos++) { - pair = pair_list_get(list, pos); - if (pair->hash != hash) { - continue; - } - tmp = str_cmp(ident, pair->identity); - if (tmp > 0) { - value = pair->value; - Py_INCREF(value); - if (pair_list_del_at(list, pos) < 0) { - goto fail; - } - Py_DECREF(ident); - return value; - } - else if (tmp < 0) { - goto fail; - } - } - - PyErr_SetObject(PyExc_KeyError, key); - goto fail; - -fail: - Py_XDECREF(value); - Py_XDECREF(ident); - return NULL; -} - - -static inline PyObject * -pair_list_pop_all(pair_list_t *list, PyObject *key) -{ - Py_hash_t hash; - Py_ssize_t pos; - pair_t *pair; - int tmp; - PyObject *res = NULL; - PyObject *ident = NULL; - - ident = list->calc_identity(key); - if (ident == NULL) { - goto fail; - } - - hash = PyObject_Hash(ident); - if (hash == -1) { - goto fail; - } - - if (list->size == 0) { - PyErr_SetObject(PyExc_KeyError, ident); - goto fail; - } - - for (pos = list->size - 1; pos >= 0; pos--) { - pair = pair_list_get(list, pos); - if (hash != pair->hash) { - continue; - } - tmp = str_cmp(ident, pair->identity); - if (tmp > 0) { - if (res == NULL) { - res = PyList_New(1); - if (res == NULL) { - goto fail; - } - if (PyList_SetItem(res, 0, pair->value) < 0) { - goto fail; - } - Py_INCREF(pair->value); - } else if (PyList_Append(res, pair->value) < 0) { - goto fail; - } - if (pair_list_del_at(list, pos) < 0) { - goto fail; - } - } - else if (tmp < 0) { - goto fail; - } - } - - if (res == NULL) { - PyErr_SetObject(PyExc_KeyError, key); - } else if (PyList_Reverse(res) < 0) { - goto fail; - } - Py_DECREF(ident); - return res; - -fail: - Py_XDECREF(ident); - Py_XDECREF(res); - return NULL; -} - - -static inline PyObject * -pair_list_pop_item(pair_list_t *list) -{ - PyObject *ret; - pair_t *pair; - - if (list->size == 0) { - PyErr_SetString(PyExc_KeyError, "empty multidict"); - return NULL; - } - - pair = pair_list_get(list, 0); - ret = PyTuple_Pack(2, pair->key, pair->value); - if (ret == NULL) { - return NULL; - } - - if (pair_list_del_at(list, 0) < 0) { - Py_DECREF(ret); - return NULL; - } - - return ret; -} - - -static inline int -pair_list_replace(pair_list_t *list, PyObject * key, PyObject *value) -{ - pair_t *pair; - - Py_ssize_t pos; - int tmp; - int found = 0; - - PyObject *identity = NULL; - Py_hash_t hash; - - identity = list->calc_identity(key); - if (identity == NULL) { - goto fail; - } - - hash = PyObject_Hash(identity); - if (hash == -1) { - goto fail; - } - - - for (pos = 0; pos < list->size; pos++) { - pair = pair_list_get(list, pos); - if (hash != pair->hash) { - continue; - } - tmp = str_cmp(identity, pair->identity); - if (tmp > 0) { - found = 1; - Py_INCREF(key); - Py_DECREF(pair->key); - pair->key = key; - Py_INCREF(value); - Py_DECREF(pair->value); - pair->value = value; - break; - } - else if (tmp < 0) { - goto fail; - } - } - - if (!found) { - if (_pair_list_add_with_hash(list, identity, key, value, hash) < 0) { - goto fail; - } - Py_DECREF(identity); - return 0; - } - else { - list->version = NEXT_VERSION(); - if (_pair_list_drop_tail(list, identity, hash, pos+1) < 0) { - goto fail; - } - Py_DECREF(identity); - return 0; - } -fail: - Py_XDECREF(identity); - return -1; -} - - -static inline int -_dict_set_number(PyObject *dict, PyObject *key, Py_ssize_t num) -{ - PyObject *tmp = PyLong_FromSsize_t(num); - if (tmp == NULL) { - return -1; - } - - if (PyDict_SetItem(dict, key, tmp) < 0) { - Py_DECREF(tmp); - return -1; - } - - return 0; -} - - -static inline int -_pair_list_post_update(pair_list_t *list, PyObject* used_keys, Py_ssize_t pos) -{ - pair_t *pair; - PyObject *tmp; - Py_ssize_t num; - - for (; pos < list->size; pos++) { - pair = pair_list_get(list, pos); - tmp = PyDict_GetItem(used_keys, pair->identity); - if (tmp == NULL) { - // not found - continue; - } - - num = PyLong_AsSsize_t(tmp); - if (num == -1) { - if (!PyErr_Occurred()) { - PyErr_SetString(PyExc_RuntimeError, "invalid internal state"); - } - return -1; - } - - if (pos >= num) { - // del self[pos] - if (pair_list_del_at(list, pos) < 0) { - return -1; - } - pos--; - } - } - - list->version = NEXT_VERSION(); - return 0; -} - -// TODO: need refactoring function name -static inline int -_pair_list_update(pair_list_t *list, PyObject *key, - PyObject *value, PyObject *used_keys, - PyObject *identity, Py_hash_t hash) -{ - PyObject *item = NULL; - pair_t *pair = NULL; - Py_ssize_t pos; - int found; - int ident_cmp_res; - - item = PyDict_GetItem(used_keys, identity); - if (item == NULL) { - pos = 0; - } - else { - pos = PyLong_AsSsize_t(item); - if (pos == -1) { - if (!PyErr_Occurred()) { - PyErr_SetString(PyExc_RuntimeError, "invalid internal state"); - } - return -1; - } - } - - found = 0; - for (; pos < list->size; pos++) { - pair = pair_list_get(list, pos); - if (pair->hash != hash) { - continue; - } - - ident_cmp_res = str_cmp(pair->identity, identity); - if (ident_cmp_res > 0) { - Py_INCREF(key); - Py_DECREF(pair->key); - pair->key = key; - - Py_INCREF(value); - Py_DECREF(pair->value); - pair->value = value; - - if (_dict_set_number(used_keys, pair->identity, pos + 1) < 0) { - return -1; - } - - found = 1; - break; - } - else if (ident_cmp_res < 0) { - return -1; - } - } - - if (!found) { - if (_pair_list_add_with_hash(list, identity, key, value, hash) < 0) { - return -1; - } - if (_dict_set_number(used_keys, identity, list->size) < 0) { - return -1; - } - } - - return 0; -} - - -static inline int -pair_list_update(pair_list_t *list, pair_list_t *other) -{ - PyObject *used_keys = NULL; - pair_t *pair = NULL; - - Py_ssize_t pos; - - if (other->size == 0) { - return 0; - } - - used_keys = PyDict_New(); - if (used_keys == NULL) { - return -1; - } - - for (pos = 0; pos < other->size; pos++) { - pair = pair_list_get(other, pos); - if (_pair_list_update(list, pair->key, pair->value, used_keys, - pair->identity, pair->hash) < 0) { - goto fail; - } - } - - if (_pair_list_post_update(list, used_keys, 0) < 0) { - goto fail; - } - - Py_DECREF(used_keys); - return 0; - -fail: - Py_XDECREF(used_keys); - return -1; -} - - -static inline int -pair_list_update_from_seq(pair_list_t *list, PyObject *seq) -{ - PyObject *it = NULL; // iter(seq) - PyObject *fast = NULL; // item as a 2-tuple or 2-list - PyObject *item = NULL; // seq[i] - PyObject *used_keys = NULL; // dict() - - PyObject *key = NULL; - PyObject *value = NULL; - PyObject *identity = NULL; - - Py_hash_t hash; - - Py_ssize_t i; - Py_ssize_t n; - - it = PyObject_GetIter(seq); - if (it == NULL) { - return -1; - } - - used_keys = PyDict_New(); - if (used_keys == NULL) { - goto fail_1; - } - - for (i = 0; ; ++i) { // i - index into seq of current element - fast = NULL; - item = PyIter_Next(it); - if (item == NULL) { - if (PyErr_Occurred()) { - goto fail_1; - } - break; - } - - // Convert item to sequence, and verify length 2. - fast = PySequence_Fast(item, ""); - if (fast == NULL) { - if (PyErr_ExceptionMatches(PyExc_TypeError)) { - PyErr_Format(PyExc_TypeError, - "multidict cannot convert sequence element #%zd" - " to a sequence", - i); - } - goto fail_1; - } - - n = PySequence_Fast_GET_SIZE(fast); - if (n != 2) { - PyErr_Format(PyExc_ValueError, - "multidict update sequence element #%zd " - "has length %zd; 2 is required", - i, n); - goto fail_1; - } - - key = PySequence_Fast_GET_ITEM(fast, 0); - value = PySequence_Fast_GET_ITEM(fast, 1); - Py_INCREF(key); - Py_INCREF(value); - - identity = list->calc_identity(key); - if (identity == NULL) { - goto fail_1; - } - - hash = PyObject_Hash(identity); - if (hash == -1) { - goto fail_1; - } - - if (_pair_list_update(list, key, value, used_keys, identity, hash) < 0) { - goto fail_1; - } - - Py_DECREF(key); - Py_DECREF(value); - Py_DECREF(fast); - Py_DECREF(item); - Py_DECREF(identity); - } - - if (_pair_list_post_update(list, used_keys, 0) < 0) { - goto fail_2; - } - - Py_DECREF(it); - Py_DECREF(used_keys); - return 0; - -fail_1: - Py_XDECREF(key); - Py_XDECREF(value); - Py_XDECREF(fast); - Py_XDECREF(item); - Py_XDECREF(identity); - -fail_2: - Py_XDECREF(it); - Py_XDECREF(used_keys); - return -1; -} - -static inline int -pair_list_eq_to_mapping(pair_list_t *list, PyObject *other) -{ - PyObject *key = NULL; - PyObject *avalue = NULL; - PyObject *bvalue; - - Py_ssize_t pos, other_len; - - int eq; - - if (!PyMapping_Check(other)) { - PyErr_Format(PyExc_TypeError, - "other argument must be a mapping, not %s", - Py_TYPE(other)->tp_name); - return -1; - } - - other_len = PyMapping_Size(other); - if (other_len < 0) { - return -1; - } - if (pair_list_len(list) != other_len) { - return 0; - } - - pos = 0; - while (pair_list_next(list, &pos, NULL, &key, &avalue)) { - bvalue = PyObject_GetItem(other, key); - if (bvalue == NULL) { - if (PyErr_ExceptionMatches(PyExc_KeyError)) { - PyErr_Clear(); - return 0; - } - return -1; - } - - eq = PyObject_RichCompareBool(avalue, bvalue, Py_EQ); - Py_DECREF(bvalue); - - if (eq <= 0) { - return eq; - } - } - - return 1; -} - - -/***********************************************************************/ - -static inline int -pair_list_traverse(pair_list_t *list, visitproc visit, void *arg) -{ - pair_t *pair = NULL; - Py_ssize_t pos; - - for (pos = 0; pos < list->size; pos++) { - pair = pair_list_get(list, pos); - // Don't need traverse the identity: it is a terminal - Py_VISIT(pair->key); - Py_VISIT(pair->value); - } - - return 0; -} - - -static inline int -pair_list_clear(pair_list_t *list) -{ - pair_t *pair = NULL; - Py_ssize_t pos; - - if (list->size == 0) { - return 0; - } - - list->version = NEXT_VERSION(); - for (pos = 0; pos < list->size; pos++) { - pair = pair_list_get(list, pos); - Py_CLEAR(pair->key); - Py_CLEAR(pair->identity); - Py_CLEAR(pair->value); - } - list->size = 0; - if (list->pairs != list->buffer) { - PyMem_Del(list->pairs); - list->pairs = list->buffer; - } - - return 0; -} - - -#ifdef __cplusplus -} -#endif -#endif diff --git a/third_party/python/multidict/multidict/_multilib/views.h b/third_party/python/multidict/multidict/_multilib/views.h deleted file mode 100644 index 5b1ebfe77cf0..000000000000 --- a/third_party/python/multidict/multidict/_multilib/views.h +++ /dev/null @@ -1,464 +0,0 @@ -#ifndef _MULTIDICT_VIEWS_H -#define _MULTIDICT_VIEWS_H - -#ifdef __cplusplus -extern "C" { -#endif - -static PyTypeObject multidict_itemsview_type; -static PyTypeObject multidict_valuesview_type; -static PyTypeObject multidict_keysview_type; - -static PyObject *viewbaseset_richcmp_func; -static PyObject *viewbaseset_and_func; -static PyObject *viewbaseset_or_func; -static PyObject *viewbaseset_sub_func; -static PyObject *viewbaseset_xor_func; - -static PyObject *abc_itemsview_register_func; -static PyObject *abc_keysview_register_func; -static PyObject *abc_valuesview_register_func; - -static PyObject *itemsview_isdisjoint_func; -static PyObject *itemsview_repr_func; - -static PyObject *keysview_repr_func; -static PyObject *keysview_isdisjoint_func; - -static PyObject *valuesview_repr_func; - -typedef struct { - PyObject_HEAD - PyObject *md; -} _Multidict_ViewObject; - - -/********** Base **********/ - -static inline void -_init_view(_Multidict_ViewObject *self, PyObject *md) -{ - Py_INCREF(md); - self->md = md; -} - -static inline void -multidict_view_dealloc(_Multidict_ViewObject *self) -{ - PyObject_GC_UnTrack(self); - Py_XDECREF(self->md); - PyObject_GC_Del(self); -} - -static inline int -multidict_view_traverse(_Multidict_ViewObject *self, visitproc visit, void *arg) -{ - Py_VISIT(self->md); - return 0; -} - -static inline int -multidict_view_clear(_Multidict_ViewObject *self) -{ - Py_CLEAR(self->md); - return 0; -} - -static inline Py_ssize_t -multidict_view_len(_Multidict_ViewObject *self) -{ - return pair_list_len(&((MultiDictObject*)self->md)->pairs); -} - -static inline PyObject * -multidict_view_richcompare(PyObject *self, PyObject *other, int op) -{ - PyObject *ret; - PyObject *op_obj = PyLong_FromLong(op); - if (op_obj == NULL) { - return NULL; - } - ret = PyObject_CallFunctionObjArgs( - viewbaseset_richcmp_func, self, other, op_obj, NULL); - Py_DECREF(op_obj); - return ret; -} - -static inline PyObject * -multidict_view_and(PyObject *self, PyObject *other) -{ - return PyObject_CallFunctionObjArgs( - viewbaseset_and_func, self, other, NULL); -} - -static inline PyObject * -multidict_view_or(PyObject *self, PyObject *other) -{ - return PyObject_CallFunctionObjArgs( - viewbaseset_or_func, self, other, NULL); -} - -static inline PyObject * -multidict_view_sub(PyObject *self, PyObject *other) -{ - return PyObject_CallFunctionObjArgs( - viewbaseset_sub_func, self, other, NULL); -} - -static inline PyObject * -multidict_view_xor(PyObject *self, PyObject *other) -{ - return PyObject_CallFunctionObjArgs( - viewbaseset_xor_func, self, other, NULL); -} - -static PyNumberMethods multidict_view_as_number = { - .nb_subtract = (binaryfunc)multidict_view_sub, - .nb_and = (binaryfunc)multidict_view_and, - .nb_xor = (binaryfunc)multidict_view_xor, - .nb_or = (binaryfunc)multidict_view_or, -}; - -/********** Items **********/ - -static inline PyObject * -multidict_itemsview_new(PyObject *md) -{ - _Multidict_ViewObject *mv = PyObject_GC_New( - _Multidict_ViewObject, &multidict_itemsview_type); - if (mv == NULL) { - return NULL; - } - - _init_view(mv, md); - - PyObject_GC_Track(mv); - return (PyObject *)mv; -} - -static inline PyObject * -multidict_itemsview_iter(_Multidict_ViewObject *self) -{ - return multidict_items_iter_new((MultiDictObject*)self->md); -} - -static inline PyObject * -multidict_itemsview_repr(_Multidict_ViewObject *self) -{ - return PyObject_CallFunctionObjArgs( - itemsview_repr_func, self, NULL); -} - -static inline PyObject * -multidict_itemsview_isdisjoint(_Multidict_ViewObject *self, PyObject *other) -{ - return PyObject_CallFunctionObjArgs( - itemsview_isdisjoint_func, self, other, NULL); -} - -PyDoc_STRVAR(itemsview_isdisjoint_doc, - "Return True if two sets have a null intersection."); - -static PyMethodDef multidict_itemsview_methods[] = { - { - "isdisjoint", - (PyCFunction)multidict_itemsview_isdisjoint, - METH_O, - itemsview_isdisjoint_doc - }, - { - NULL, - NULL - } /* sentinel */ -}; - -static inline int -multidict_itemsview_contains(_Multidict_ViewObject *self, PyObject *obj) -{ - PyObject *akey = NULL, - *aval = NULL, - *bkey = NULL, - *bval = NULL, - *iter = NULL, - *item = NULL; - int ret1, ret2; - - if (!PyTuple_Check(obj) || PyTuple_GET_SIZE(obj) != 2) { - return 0; - } - - bkey = PyTuple_GET_ITEM(obj, 0); - bval = PyTuple_GET_ITEM(obj, 1); - - iter = multidict_itemsview_iter(self); - if (iter == NULL) { - return 0; - } - - while ((item = PyIter_Next(iter)) != NULL) { - akey = PyTuple_GET_ITEM(item, 0); - aval = PyTuple_GET_ITEM(item, 1); - - ret1 = PyObject_RichCompareBool(akey, bkey, Py_EQ); - if (ret1 < 0) { - Py_DECREF(iter); - Py_DECREF(item); - return -1; - } - ret2 = PyObject_RichCompareBool(aval, bval, Py_EQ); - if (ret2 < 0) { - Py_DECREF(iter); - Py_DECREF(item); - return -1; - } - if (ret1 > 0 && ret2 > 0) - { - Py_DECREF(iter); - Py_DECREF(item); - return 1; - } - - Py_DECREF(item); - } - - Py_DECREF(iter); - - if (PyErr_Occurred()) { - return -1; - } - - return 0; -} - -static PySequenceMethods multidict_itemsview_as_sequence = { - .sq_length = (lenfunc)multidict_view_len, - .sq_contains = (objobjproc)multidict_itemsview_contains, -}; - -static PyTypeObject multidict_itemsview_type = { - PyVarObject_HEAD_INIT(DEFERRED_ADDRESS(&PyType_Type), 0) - "multidict._multidict._ItemsView", /* tp_name */ - sizeof(_Multidict_ViewObject), /* tp_basicsize */ - .tp_dealloc = (destructor)multidict_view_dealloc, - .tp_repr = (reprfunc)multidict_itemsview_repr, - .tp_as_number = &multidict_view_as_number, - .tp_as_sequence = &multidict_itemsview_as_sequence, - .tp_getattro = PyObject_GenericGetAttr, - .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, - .tp_traverse = (traverseproc)multidict_view_traverse, - .tp_clear = (inquiry)multidict_view_clear, - .tp_richcompare = multidict_view_richcompare, - .tp_iter = (getiterfunc)multidict_itemsview_iter, - .tp_methods = multidict_itemsview_methods, -}; - - -/********** Keys **********/ - -static inline PyObject * -multidict_keysview_new(PyObject *md) -{ - _Multidict_ViewObject *mv = PyObject_GC_New( - _Multidict_ViewObject, &multidict_keysview_type); - if (mv == NULL) { - return NULL; - } - - _init_view(mv, md); - - PyObject_GC_Track(mv); - return (PyObject *)mv; -} - -static inline PyObject * -multidict_keysview_iter(_Multidict_ViewObject *self) -{ - return multidict_keys_iter_new(((MultiDictObject*)self->md)); -} - -static inline PyObject * -multidict_keysview_repr(_Multidict_ViewObject *self) -{ - return PyObject_CallFunctionObjArgs( - keysview_repr_func, self, NULL); -} - -static inline PyObject * -multidict_keysview_isdisjoint(_Multidict_ViewObject *self, PyObject *other) -{ - return PyObject_CallFunctionObjArgs( - keysview_isdisjoint_func, self, other, NULL); -} - -PyDoc_STRVAR(keysview_isdisjoint_doc, - "Return True if two sets have a null intersection."); - -static PyMethodDef multidict_keysview_methods[] = { - { - "isdisjoint", - (PyCFunction)multidict_keysview_isdisjoint, - METH_O, - keysview_isdisjoint_doc - }, - { - NULL, - NULL - } /* sentinel */ -}; - -static inline int -multidict_keysview_contains(_Multidict_ViewObject *self, PyObject *key) -{ - return pair_list_contains(&((MultiDictObject*)self->md)->pairs, key); -} - -static PySequenceMethods multidict_keysview_as_sequence = { - .sq_length = (lenfunc)multidict_view_len, - .sq_contains = (objobjproc)multidict_keysview_contains, -}; - -static PyTypeObject multidict_keysview_type = { - PyVarObject_HEAD_INIT(DEFERRED_ADDRESS(&PyType_Type), 0) - "multidict._multidict._KeysView", /* tp_name */ - sizeof(_Multidict_ViewObject), /* tp_basicsize */ - .tp_dealloc = (destructor)multidict_view_dealloc, - .tp_repr = (reprfunc)multidict_keysview_repr, - .tp_as_number = &multidict_view_as_number, - .tp_as_sequence = &multidict_keysview_as_sequence, - .tp_getattro = PyObject_GenericGetAttr, - .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, - .tp_traverse = (traverseproc)multidict_view_traverse, - .tp_clear = (inquiry)multidict_view_clear, - .tp_richcompare = multidict_view_richcompare, - .tp_iter = (getiterfunc)multidict_keysview_iter, - .tp_methods = multidict_keysview_methods, -}; - - -/********** Values **********/ - -static inline PyObject * -multidict_valuesview_new(PyObject *md) -{ - _Multidict_ViewObject *mv = PyObject_GC_New( - _Multidict_ViewObject, &multidict_valuesview_type); - if (mv == NULL) { - return NULL; - } - - _init_view(mv, md); - - PyObject_GC_Track(mv); - return (PyObject *)mv; -} - -static inline PyObject * -multidict_valuesview_iter(_Multidict_ViewObject *self) -{ - return multidict_values_iter_new(((MultiDictObject*)self->md)); -} - -static inline PyObject * -multidict_valuesview_repr(_Multidict_ViewObject *self) -{ - return PyObject_CallFunctionObjArgs( - valuesview_repr_func, self, NULL); -} - -static PySequenceMethods multidict_valuesview_as_sequence = { - .sq_length = (lenfunc)multidict_view_len, -}; - -static PyTypeObject multidict_valuesview_type = { - PyVarObject_HEAD_INIT(DEFERRED_ADDRESS(&PyType_Type), 0) - "multidict._multidict._ValuesView", /* tp_name */ - sizeof(_Multidict_ViewObject), /* tp_basicsize */ - .tp_dealloc = (destructor)multidict_view_dealloc, - .tp_repr = (reprfunc)multidict_valuesview_repr, - .tp_as_sequence = &multidict_valuesview_as_sequence, - .tp_getattro = PyObject_GenericGetAttr, - .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, - .tp_traverse = (traverseproc)multidict_view_traverse, - .tp_clear = (inquiry)multidict_view_clear, - .tp_iter = (getiterfunc)multidict_valuesview_iter, -}; - - -static inline int -multidict_views_init() -{ - PyObject *reg_func_call_result = NULL; - PyObject *module = PyImport_ImportModule("multidict._multidict_base"); - if (module == NULL) { - goto fail; - } - -#define GET_MOD_ATTR(VAR, NAME) \ - VAR = PyObject_GetAttrString(module, NAME); \ - if (VAR == NULL) { \ - goto fail; \ - } - - GET_MOD_ATTR(viewbaseset_richcmp_func, "_viewbaseset_richcmp"); - GET_MOD_ATTR(viewbaseset_and_func, "_viewbaseset_and"); - GET_MOD_ATTR(viewbaseset_or_func, "_viewbaseset_or"); - GET_MOD_ATTR(viewbaseset_sub_func, "_viewbaseset_sub"); - GET_MOD_ATTR(viewbaseset_xor_func, "_viewbaseset_xor"); - - GET_MOD_ATTR(abc_itemsview_register_func, "_abc_itemsview_register"); - GET_MOD_ATTR(abc_keysview_register_func, "_abc_keysview_register"); - GET_MOD_ATTR(abc_valuesview_register_func, "_abc_valuesview_register"); - - GET_MOD_ATTR(itemsview_repr_func, "_itemsview_isdisjoint"); - GET_MOD_ATTR(itemsview_repr_func, "_itemsview_repr"); - - GET_MOD_ATTR(keysview_repr_func, "_keysview_repr"); - GET_MOD_ATTR(keysview_isdisjoint_func, "_keysview_isdisjoint"); - - GET_MOD_ATTR(valuesview_repr_func, "_valuesview_repr"); - - if (PyType_Ready(&multidict_itemsview_type) < 0 || - PyType_Ready(&multidict_valuesview_type) < 0 || - PyType_Ready(&multidict_keysview_type) < 0) - { - goto fail; - } - - // abc.ItemsView.register(_ItemsView) - reg_func_call_result = PyObject_CallFunctionObjArgs( - abc_itemsview_register_func, (PyObject*)&multidict_itemsview_type, NULL); - if (reg_func_call_result == NULL) { - goto fail; - } - Py_DECREF(reg_func_call_result); - - // abc.KeysView.register(_KeysView) - reg_func_call_result = PyObject_CallFunctionObjArgs( - abc_keysview_register_func, (PyObject*)&multidict_keysview_type, NULL); - if (reg_func_call_result == NULL) { - goto fail; - } - Py_DECREF(reg_func_call_result); - - // abc.ValuesView.register(_KeysView) - reg_func_call_result = PyObject_CallFunctionObjArgs( - abc_valuesview_register_func, (PyObject*)&multidict_valuesview_type, NULL); - if (reg_func_call_result == NULL) { - goto fail; - } - Py_DECREF(reg_func_call_result); - - Py_DECREF(module); - return 0; - -fail: - Py_CLEAR(module); - return -1; - -#undef GET_MOD_ATTR -} - -#ifdef __cplusplus -} -#endif -#endif diff --git a/third_party/python/multidict/multidict/py.typed b/third_party/python/multidict/multidict/py.typed deleted file mode 100644 index dfe8cc048e71..000000000000 --- a/third_party/python/multidict/multidict/py.typed +++ /dev/null @@ -1 +0,0 @@ -PEP-561 marker. \ No newline at end of file diff --git a/third_party/python/multidict/pyproject.toml b/third_party/python/multidict/pyproject.toml deleted file mode 100644 index f1b83b8f62fd..000000000000 --- a/third_party/python/multidict/pyproject.toml +++ /dev/null @@ -1,11 +0,0 @@ -[build-system] -requires = ["setuptools>=40", "wheel"] - - -[tool.towncrier] -package = "multidict" -filename = "CHANGES.rst" -directory = "CHANGES/" -title_format = "{version} ({project_date})" -template = "CHANGES/.TEMPLATE.rst" -issue_format = "`#{issue} `_" diff --git a/third_party/python/multidict/setup.cfg b/third_party/python/multidict/setup.cfg deleted file mode 100644 index 2c11fd4aedbb..000000000000 --- a/third_party/python/multidict/setup.cfg +++ /dev/null @@ -1,37 +0,0 @@ -[aliases] -test = pytest - -[metadata] -license_files = - LICENSE -long_description = file: README.rst - -[flake8] -ignore = E302,E701,E305,E704,F811,N811, W503 -max-line-length = 88 - -[isort] -multi_line_output = 3 -include_trailing_comma = True -force_grid_wrap = 0 -use_parentheses = True -known_first_party = multidict -known_third_party = pytest - -[tool:pytest] -testpaths = tests -norecursedirs = dist build .tox docs requirements tools -addopts = --doctest-modules --cov=multidict --cov-report term-missing:skip-covered --cov-report xml --junitxml=junit-test-results.xml -v -doctest_optionflags = ALLOW_UNICODE ELLIPSIS -junit_family = xunit2 - -[mypy-pytest] -ignore_missing_imports = true - -[mypy-multidict._multidict] -ignore_missing_imports = true - -[egg_info] -tag_build = -tag_date = 0 - diff --git a/third_party/python/multidict/setup.py b/third_party/python/multidict/setup.py deleted file mode 100644 index 044f1d72ed1f..000000000000 --- a/third_party/python/multidict/setup.py +++ /dev/null @@ -1,96 +0,0 @@ -import codecs -import os -import platform -import re -import sys - -from setuptools import Extension, setup - -NO_EXTENSIONS = bool(os.environ.get("MULTIDICT_NO_EXTENSIONS")) - -if sys.implementation.name != "cpython": - NO_EXTENSIONS = True - -CFLAGS = ["-O2"] -# CFLAGS = ['-g'] -if platform.system() != "Windows": - CFLAGS.extend( - [ - "-std=c99", - "-Wall", - "-Wsign-compare", - "-Wconversion", - "-fno-strict-aliasing", - "-pedantic", - ] - ) - -extensions = [ - Extension( - "multidict._multidict", - ["multidict/_multidict.c"], - extra_compile_args=CFLAGS, - ), -] - - -with codecs.open( - os.path.join( - os.path.abspath(os.path.dirname(__file__)), "multidict", "__init__.py" - ), - "r", - "latin1", -) as fp: - try: - version = re.findall(r'^__version__ = "([^"]+)"\r?$', fp.read(), re.M)[0] - except IndexError: - raise RuntimeError("Unable to determine version.") - - -def read(f): - return open(os.path.join(os.path.dirname(__file__), f)).read().strip() - - -args = dict( - name="multidict", - version=version, - description=("multidict implementation"), - long_description=read("README.rst"), - classifiers=[ - "License :: OSI Approved :: Apache Software License", - "Intended Audience :: Developers", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.6", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Development Status :: 5 - Production/Stable", - ], - author="Andrew Svetlov", - author_email="andrew.svetlov@gmail.com", - url="https://github.com/aio-libs/multidict", - project_urls={ - "Chat: Gitter": "https://gitter.im/aio-libs/Lobby", - "CI: Azure Pipelines": "https://dev.azure.com/aio-libs/multidict/_build", - "Coverage: codecov": "https://codecov.io/github/aio-libs/multidict", - "Docs: RTD": "https://multidict.readthedocs.io", - "GitHub: issues": "https://github.com/aio-libs/multidict/issues", - "GitHub: repo": "https://github.com/aio-libs/multidict", - }, - license="Apache 2", - packages=["multidict"], - python_requires=">=3.6", - include_package_data=True, -) - -if not NO_EXTENSIONS: - print("*********************") - print("* Accelerated build *") - print("*********************") - setup(ext_modules=extensions, **args) -else: - print("*********************") - print("* Pure Python build *") - print("*********************") - setup(**args) diff --git a/third_party/python/pathlib2/pathlib2-2.3.2.dist-info/DESCRIPTION.rst b/third_party/python/pathlib2/pathlib2-2.3.2.dist-info/DESCRIPTION.rst new file mode 100644 index 000000000000..b35a9362835d --- /dev/null +++ b/third_party/python/pathlib2/pathlib2-2.3.2.dist-info/DESCRIPTION.rst @@ -0,0 +1,47 @@ +The `old pathlib `_ +module on bitbucket is in bugfix-only mode. +The goal of pathlib2 is to provide a backport of +`standard pathlib `_ +module which tracks the standard library module, +so all the newest features of the standard pathlib can be +used also on older Python versions. + +Download +-------- + +Standalone releases are available on PyPI: +http://pypi.python.org/pypi/pathlib2/ + +Development +----------- + +The main development takes place in the Python standard library: see +the `Python developer's guide `_. +In particular, new features should be submitted to the +`Python bug tracker `_. + +Issues that occur in this backport, but that do not occur not in the +standard Python pathlib module can be submitted on +the `pathlib2 bug tracker `_. + +Documentation +------------- + +Refer to the +`standard pathlib `_ +documentation. + +.. |travis| image:: https://travis-ci.org/mcmtroffaes/pathlib2.png?branch=develop + :target: https://travis-ci.org/mcmtroffaes/pathlib2 + :alt: travis-ci + +.. |appveyor| image:: https://ci.appveyor.com/api/projects/status/baddx3rpet2wyi2c?svg=true + :target: https://ci.appveyor.com/project/mcmtroffaes/pathlib2 + :alt: appveyor + +.. |codecov| image:: https://codecov.io/gh/mcmtroffaes/pathlib2/branch/develop/graph/badge.svg + :target: https://codecov.io/gh/mcmtroffaes/pathlib2 + :alt: codecov + + + diff --git a/third_party/python/pathlib2/pathlib2-2.3.2.dist-info/METADATA b/third_party/python/pathlib2/pathlib2-2.3.2.dist-info/METADATA new file mode 100644 index 000000000000..e8281c254979 --- /dev/null +++ b/third_party/python/pathlib2/pathlib2-2.3.2.dist-info/METADATA @@ -0,0 +1,76 @@ +Metadata-Version: 2.0 +Name: pathlib2 +Version: 2.3.2 +Summary: Object-oriented filesystem paths +Home-page: https://pypi.python.org/pypi/pathlib2/ +Author: Matthias C. M. Troffaes +Author-email: matthias.troffaes@gmail.com +License: MIT +Download-URL: https://pypi.python.org/pypi/pathlib2/ +Description-Content-Type: UNKNOWN +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Topic :: Software Development :: Libraries +Classifier: Topic :: System :: Filesystems +Requires-Dist: six +Requires-Dist: scandir; python_version < "3.5" + +The `old pathlib `_ +module on bitbucket is in bugfix-only mode. +The goal of pathlib2 is to provide a backport of +`standard pathlib `_ +module which tracks the standard library module, +so all the newest features of the standard pathlib can be +used also on older Python versions. + +Download +-------- + +Standalone releases are available on PyPI: +http://pypi.python.org/pypi/pathlib2/ + +Development +----------- + +The main development takes place in the Python standard library: see +the `Python developer's guide `_. +In particular, new features should be submitted to the +`Python bug tracker `_. + +Issues that occur in this backport, but that do not occur not in the +standard Python pathlib module can be submitted on +the `pathlib2 bug tracker `_. + +Documentation +------------- + +Refer to the +`standard pathlib `_ +documentation. + +.. |travis| image:: https://travis-ci.org/mcmtroffaes/pathlib2.png?branch=develop + :target: https://travis-ci.org/mcmtroffaes/pathlib2 + :alt: travis-ci + +.. |appveyor| image:: https://ci.appveyor.com/api/projects/status/baddx3rpet2wyi2c?svg=true + :target: https://ci.appveyor.com/project/mcmtroffaes/pathlib2 + :alt: appveyor + +.. |codecov| image:: https://codecov.io/gh/mcmtroffaes/pathlib2/branch/develop/graph/badge.svg + :target: https://codecov.io/gh/mcmtroffaes/pathlib2 + :alt: codecov + + + diff --git a/third_party/python/pathlib2/pathlib2-2.3.2.dist-info/RECORD b/third_party/python/pathlib2/pathlib2-2.3.2.dist-info/RECORD new file mode 100644 index 000000000000..46814c1877dd --- /dev/null +++ b/third_party/python/pathlib2/pathlib2-2.3.2.dist-info/RECORD @@ -0,0 +1,7 @@ +pathlib2/__init__.py,sha256=r2TnG46R2dTSgCDmYn4UdnhtFnsP_Fk2Ihh3uWpVQ-A,54694 +pathlib2-2.3.2.dist-info/RECORD,, +pathlib2-2.3.2.dist-info/top_level.txt,sha256=tNPkisFiGBFsPUnCIHg62vSFlkx_1NO86Id8lbJmfFQ,9 +pathlib2-2.3.2.dist-info/DESCRIPTION.rst,sha256=UJ43bvYTDNBtKnuWgVYC5HNsuTs98JDirzbt_jtMlNo,1569 +pathlib2-2.3.2.dist-info/metadata.json,sha256=0BNRc6V07cUIZzo9XxBBKgaz36kYVxkA139x-mHEnBA,1275 +pathlib2-2.3.2.dist-info/METADATA,sha256=5t8fstL_kQZjWN3Zu3A9FGGppO8QeQt7fJq1EabuSdI,2707 +pathlib2-2.3.2.dist-info/WHEEL,sha256=AvR0WeTpDaxT645bl5FQxUK6NPsTls2ttpcGJg3j1Xg,110 diff --git a/third_party/python/funcsigs/funcsigs-1.0.2.dist-info/WHEEL b/third_party/python/pathlib2/pathlib2-2.3.2.dist-info/WHEEL similarity index 70% rename from third_party/python/funcsigs/funcsigs-1.0.2.dist-info/WHEEL rename to third_party/python/pathlib2/pathlib2-2.3.2.dist-info/WHEEL index 8b6dd1b5a884..9dff69d86102 100644 --- a/third_party/python/funcsigs/funcsigs-1.0.2.dist-info/WHEEL +++ b/third_party/python/pathlib2/pathlib2-2.3.2.dist-info/WHEEL @@ -1,5 +1,5 @@ Wheel-Version: 1.0 -Generator: bdist_wheel (0.29.0) +Generator: bdist_wheel (0.24.0) Root-Is-Purelib: true Tag: py2-none-any Tag: py3-none-any diff --git a/third_party/python/pathlib2/pathlib2-2.3.2.dist-info/metadata.json b/third_party/python/pathlib2/pathlib2-2.3.2.dist-info/metadata.json new file mode 100644 index 000000000000..68120ba57339 --- /dev/null +++ b/third_party/python/pathlib2/pathlib2-2.3.2.dist-info/metadata.json @@ -0,0 +1 @@ +{"license": "MIT", "name": "pathlib2", "metadata_version": "2.0", "generator": "bdist_wheel (0.24.0)", "download_url": "https://pypi.python.org/pypi/pathlib2/", "summary": "Object-oriented filesystem paths", "run_requires": [{"environment": "python_version < \"3.5\"", "requires": ["scandir"]}, {"requires": ["six"]}], "version": "2.3.2", "extensions": {"python.details": {"project_urls": {"Home": "https://pypi.python.org/pypi/pathlib2/"}, "document_names": {"description": "DESCRIPTION.rst"}, "contacts": [{"role": "author", "email": "matthias.troffaes@gmail.com", "name": "Matthias C. M. Troffaes"}]}}, "description_content_type": "UNKNOWN", "classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 3", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Topic :: Software Development :: Libraries", "Topic :: System :: Filesystems"], "extras": []} \ No newline at end of file diff --git a/third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.6-py2.py3-none-any/pathlib2-2.3.6.dist-info/top_level.txt b/third_party/python/pathlib2/pathlib2-2.3.2.dist-info/top_level.txt similarity index 100% rename from third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.6-py2.py3-none-any/pathlib2-2.3.6.dist-info/top_level.txt rename to third_party/python/pathlib2/pathlib2-2.3.2.dist-info/top_level.txt diff --git a/third_party/python/pep487/PKG-INFO b/third_party/python/pep487/PKG-INFO new file mode 100644 index 000000000000..c44d702629cf --- /dev/null +++ b/third_party/python/pep487/PKG-INFO @@ -0,0 +1,87 @@ +Metadata-Version: 2.1 +Name: pep487 +Version: 1.0.1 +Summary: PEP487 - Simpler customisation of class creation +Home-page: https://github.com/zaehlwerk/pep487 +Author: Gregor Giesen +Author-email: giesen@zaehlwerk.net +License: GPLv3 +Description: =============================================== + PEP487: Simpler customisation of class creation + =============================================== + + This is a backport of PEP487's simpler customisation of class + creation by Martin Teichmann + for Python versions before 3.6. + + PEP487 is free software: you can redistribute it and/or modify it + under the terms of the GNU General Public License as published + by the Free Software Foundation, either version 3 of the License, + or (at your option) any later version. + + + Subclass init + ============= + + >>> from pep487 import PEP487Object + >>> class FooBase(PEP487Object): + ... foos = set() + ... + ... def __init_subclass__(cls, **kwargs): + ... cls.foos.add(cls.__name__) + + Using `PEP487Object` as base class all subclasses of FooBase + will add their name to the common class variable 'foos'. + + >>> class Foo1(FooBase): + ... pass + >>> class Foo2(FooBase): + ... pass + + Hence: + + >>> FooBase.foos + {'Foo1', 'Foo2'} + + + + Property names and owner + ======================== + + If a class object has a method `__set_name__` upon declaration + of an PEP487Object class, it will be called: + + >>> class NamedProperty: + ... def __set_name__(self, owner, name): + ... self.context = owner + ... self.name = name + + >>> class Bar(PEP487Object): + ... foo = NamedProperty() + ... bar = NamedProperty() + + Consequently: + + >>> Bar.foo.name is 'foo' and Bar.foo.context is Bar + True + >>> Bar.bar.name is 'bar' and Bar.bar.context is Bar + True + + + Abstract base classes + ===================== + + Since `PEP487Object` has a custom metaclass, it is incompatible + to `abc.ABC`. Therefore `pep487` contains patched versions of `ABC` + and `ABCMeta`. + +Keywords: pep487 +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+) +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Provides-Extra: test diff --git a/third_party/python/pep487/README.rst b/third_party/python/pep487/README.rst new file mode 100644 index 000000000000..4190c77f79b4 --- /dev/null +++ b/third_party/python/pep487/README.rst @@ -0,0 +1,68 @@ +=============================================== +PEP487: Simpler customisation of class creation +=============================================== + +This is a backport of PEP487's simpler customisation of class +creation by Martin Teichmann +for Python versions before 3.6. + +PEP487 is free software: you can redistribute it and/or modify it +under the terms of the GNU General Public License as published +by the Free Software Foundation, either version 3 of the License, +or (at your option) any later version. + + +Subclass init +============= + +>>> from pep487 import PEP487Object +>>> class FooBase(PEP487Object): +... foos = set() +... +... def __init_subclass__(cls, **kwargs): +... cls.foos.add(cls.__name__) + +Using `PEP487Object` as base class all subclasses of FooBase +will add their name to the common class variable 'foos'. + +>>> class Foo1(FooBase): +... pass +>>> class Foo2(FooBase): +... pass + +Hence: + +>>> FooBase.foos +{'Foo1', 'Foo2'} + + + +Property names and owner +======================== + +If a class object has a method `__set_name__` upon declaration +of an PEP487Object class, it will be called: + +>>> class NamedProperty: +... def __set_name__(self, owner, name): +... self.context = owner +... self.name = name + +>>> class Bar(PEP487Object): +... foo = NamedProperty() +... bar = NamedProperty() + +Consequently: + +>>> Bar.foo.name is 'foo' and Bar.foo.context is Bar +True +>>> Bar.bar.name is 'bar' and Bar.bar.context is Bar +True + + +Abstract base classes +===================== + +Since `PEP487Object` has a custom metaclass, it is incompatible +to `abc.ABC`. Therefore `pep487` contains patched versions of `ABC` +and `ABCMeta`. diff --git a/third_party/python/pep487/lib/pep487/__init__.py b/third_party/python/pep487/lib/pep487/__init__.py new file mode 100644 index 000000000000..1bfcec003629 --- /dev/null +++ b/third_party/python/pep487/lib/pep487/__init__.py @@ -0,0 +1,81 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2017 by Gregor Giesen +# +# This is a backport of PEP487's simpler customisation of class +# creation by Martin Teichmann +# for Python versions before 3.6. +# +# PEP487 is free software: you can redistribute it and/or modify it +# under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, +# or (at your option) any later version. +# +# PEP487 is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with PEP487. If not, see . +# +"""pep487.py: Simpler customisation of class creation""" + +import abc +import sys +import types + +__all__ = ('PEP487Meta', 'PEP487Object', 'ABCMeta', 'ABC') + +HAS_PY36 = sys.version_info >= (3, 6) +HAS_PEP487 = HAS_PY36 + +if HAS_PEP487: + PEP487Meta = type # pragma: no cover + ABCMeta = abc.ABCMeta # pragma: no cover + ABC = abc.ABC # pragma: no cover + PEP487Base = object # pragma: no cover + PEP487Object = object # pragma: no cover +else: + class PEP487Meta(type): + def __new__(mcls, name, bases, ns, **kwargs): + init = ns.get('__init_subclass__') + if isinstance(init, types.FunctionType): + ns['__init_subclass__'] = classmethod(init) + cls = super().__new__(mcls, name, bases, ns) + for key, value in cls.__dict__.items(): + func = getattr(value, '__set_name__', None) + if func is not None: + func(cls, key) + super(cls, cls).__init_subclass__(**kwargs) + return cls + + def __init__(cls, name, bases, ns, **kwargs): + super().__init__(name, bases, ns) + + class ABCMeta(abc.ABCMeta): + def __new__(mcls, name, bases, ns, **kwargs): + init = ns.get('__init_subclass__') + if isinstance(init, types.FunctionType): + ns['__init_subclass__'] = classmethod(init) + cls = super().__new__(mcls, name, bases, ns) + for key, value in cls.__dict__.items(): + func = getattr(value, '__set_name__', None) + if func is not None: + func(cls, key) + super(cls, cls).__init_subclass__(**kwargs) + return cls + + def __init__(cls, name, bases, ns, **kwargs): + super().__init__(name, bases, ns) + + class PEP487Base: + @classmethod + def __init_subclass__(cls, **kwargs): + pass + + class PEP487Object(PEP487Base, metaclass=PEP487Meta): + pass + + class ABC(PEP487Base, metaclass=ABCMeta): + pass diff --git a/third_party/python/pep487/lib/pep487/version.py b/third_party/python/pep487/lib/pep487/version.py new file mode 100644 index 000000000000..02a2605ea5b8 --- /dev/null +++ b/third_party/python/pep487/lib/pep487/version.py @@ -0,0 +1,27 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2017 by Gregor Giesen +# +# This file is part of PEP487. +# +# PEP487 is free software: you can redistribute it and/or modify it +# under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, +# or (at your option) any later version. +# +# PEP487 is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with PEP487. If not, see . +# +"""pep487/version.py: version info""" + +__author__ = "Gregor Giesen" +__copyright__ = "Copyright 2017, Gregor Giesen" +__credits__ = ["Martin Teichmann", "Gregor Giesen"] +__license__ = "GPLv3" +__version__ = "1.0.1" +__maintainer__ = "Gregor Giesen" diff --git a/third_party/python/pep487/setup.cfg b/third_party/python/pep487/setup.cfg new file mode 100644 index 000000000000..f80f35b7f5b4 --- /dev/null +++ b/third_party/python/pep487/setup.cfg @@ -0,0 +1,12 @@ +[bdist_wheel] +universal = 1 + +[aliases] +test = pytest + +[tool:pytest] + +[egg_info] +tag_build = +tag_date = 0 + diff --git a/third_party/python/pep487/setup.py b/third_party/python/pep487/setup.py new file mode 100644 index 000000000000..a3d833202b1f --- /dev/null +++ b/third_party/python/pep487/setup.py @@ -0,0 +1,121 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2017 by Gregor Giesen +# +# This file is part of PEP487. +# +# PEP487 is free software: you can redistribute it and/or modify it +# under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, +# or (at your option) any later version. +# +# PEP487 is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with PEP487. If not, see . +# + +# Always prefer setuptools over distutils +from setuptools import setup, find_packages +# To use a consistent encoding +from codecs import open +import re +import os.path + +here = os.path.abspath(os.path.dirname(__file__)) + +version_file = os.path.join(here, 'lib', 'pep487', 'version.py') +with open(version_file, 'rt') as fp: + re_version = re.compile( + r"""^__version__[ ]*=[ ]*["']{1,3}(.+)["']{1,3}$""") + for line in fp: + r = re_version.match(line) + if r is not None: + version = r.group(1) + break + else: + raise RuntimeError("Cannot find version string in %s" % version_file) + +# Get the long description from the README file +with open(os.path.join(here, 'README.rst'), encoding='utf-8') as f: + long_description = f.read() + +setup( + name='pep487', + + # Versions should comply with PEP440. For a discussion on single-sourcing + # the version across setup.py and the project code, see + # https://packaging.python.org/en/latest/single_source_version.html + version=version, + + description='PEP487 - Simpler customisation of class creation', + long_description=long_description, + + # The project's main homepage. + url='https://github.com/zaehlwerk/pep487', + + # Author details + author='Gregor Giesen', + author_email='giesen@zaehlwerk.net', + + # Choose your license + license="GPLv3", + + # See https://pypi.python.org/pypi?%3Aaction=list_classifiers + classifiers=[ + 'Development Status :: 5 - Production/Stable', + 'Intended Audience :: Developers', + 'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)', + 'Programming Language :: Python', + 'Programming Language :: Python :: 3.4', + 'Programming Language :: Python :: 3.5', + 'Programming Language :: Python :: 3.6', + ], + + # What does your project relate to? + keywords='pep487', + + # You can just specify the packages manually here if your project is + # simple. Or you can use find_packages(). + package_dir={'': 'lib'}, + packages=find_packages('lib', exclude=['tests']), + + # List run-time dependencies here. These will be installed by pip when + # your project is installed. For an analysis of "install_requires" vs pip's + # requirements files see: + # https://packaging.python.org/en/latest/requirements.html + install_requires=[], + + # List additional groups of dependencies here (e.g. development + # dependencies). You can install these using the following syntax, + # for example: + # $ pip install -e .[dev,test] + extras_require={ + 'test': ['pytest', + 'pytest-cov', + 'pytest-flakes', + 'pytest-mock', + 'pytest-pep8', + 'pytest-runner'], + }, + + # If there are data files included in your packages that need to be + # installed, specify them here. If using Python 2.6 or less, then these + # have to be included in MANIFEST.in as well. + package_data={}, + + setup_requires=['pytest-runner'], + tests_require=['pytest', + 'pytest-cov', + 'pytest-flakes', + 'pytest-pep8', + 'pytest-mock'], + + # To provide executable scripts, use entry points in preference to the + # "scripts" keyword. Entry points provide cross-platform support and allow + # pip to create the appropriate form of executable for the target platform. + entry_points={}, +) diff --git a/third_party/python/pyasn1-modules/CHANGES.txt b/third_party/python/pyasn1-modules/CHANGES.txt new file mode 100644 index 000000000000..1cc789f7dcc7 --- /dev/null +++ b/third_party/python/pyasn1-modules/CHANGES.txt @@ -0,0 +1,124 @@ + +Revision 0.1.5, released 10-10-2017 +----------------------------------- + +- OCSP response blob fixed in test +- Fixed wrong OCSP ResponderID components tagging + +Revision 0.1.4, released 07-09-2017 +----------------------------------- + +- Typo fixed in the dependency spec + +Revision 0.1.3, released 07-09-2017 +----------------------------------- + +- Apparently, pip>=1.5.6 is still widely used and it is not PEP440 + compliant. Had to replace the `~=` version dependency spec with a + sequence of simple comparisons to remain compatible with the aging pip. + +Revision 0.1.2, released 07-09-2017 +----------------------------------- + +- Pinned to pyasn1 ~0.3.4 + +Revision 0.1.1, released 27-08-2017 +----------------------------------- + +- Tests refactored into proper unit tests +- pem.readBase64fromText() convenience function added +- Pinned to pyasn1 0.3.3 + +Revision 0.0.11, released 04-08-2017 +------------------------------------ + +- Fixed typo in ASN.1 definitions at rfc2315.py + +Revision 0.0.10, released 27-07-2017 +------------------------------------ + +* Fixed SequenceOf initializer to pass now-mandatory componentType + keyword argument (since pyasn1 0.3.1) +* Temporarily fixed recursive ASN.1 type definition to work with + pyasn1 0.3.1+. This is going to be fixed properly shortly. + +Revision 0.0.9, released 01-06-2017 +----------------------------------- + +* More CRL data structures added (RFC3279) +* Added X.509 certificate extensions map +* Added X.509 attribute type map +* Fix to __doc__ use in setup.py to make -O0 installation mode working +* Copyright added to source files +* More PEP-8'ing done on the code +* Author's e-mail changed + +Revision 0.0.8, released 28-09-2015 +----------------------------------- + +- Wheel distribution format now supported +- Fix to misspelled rfc2459.id_at_sutname variable +- Fix to misspelled rfc2459.NameConstraints component tag ID +- Fix to misspelled rfc2459.GeneralSubtree component default status + +Revision 0.0.7, released 01-08-2015 +----------------------------------- + +- Extensions added to text files, CVS attic flushed. +- Fix to rfc2459.BasicConstraints syntax. + +Revision 0.0.6, released 21-06-2015 +----------------------------------- + +- Typo fix to id_kp_serverAuth object value +- A test case for indefinite length encoding eliminated as it's + forbidden in DER. + +Revision 0.0.5 +-------------- + +- License updated to vanilla BSD 2-Clause to ease package use + (http://opensource.org/licenses/BSD-2-Clause). +- Missing components added to rfc4210.PKIBody. +- Fix to rfc2459.CRLDistPointsSyntax typo. +- Fix to rfc2511.CertReqMsg typo. + +Revision 0.0.4 +-------------- + +- CMP structures (RFC4210), cmpdump.py tool and test case added. +- SNMPv2c Message syntax (RFC1901) properly defined. +- Package version established in form of __init__.__version__ + which is in-sync with distutils. +- Package meta information and classifiers updated. + +Revision 0.0.3 +-------------- + +- Text cases implemented +- X.509 CRMF structures (RFC2511) and crmfdump.py tool added +- X.509 CRL structures and crldump.py tool added +- PKCS#10 structures and pkcs10dump.py tool added +- PKCS#8 structures and pkcs8dump.py tool added +- PKCS#1 (rfc3447) structures added +- OCSP request & response dumping tool added +- SNMPv2c & SNMPv3/USM structures added +- keydump.py moved into pkcs1dump.py +- PEM files read function generalized to be used more universally. +- complete PKIX1 '88 code implemented at rfc2459.py + + +Revision 0.0.2 +-------------- + +- Require pyasn1 >= 0.1.1 +- Fixes towards Py3K compatibility + + use either of existing urllib module + + adopt to the new bytes type + + print operator is now a function + + new exception syntax + +Revision 0.0.1a +--------------- + +- Initial revision, most code carried from pyasn1 examples. diff --git a/third_party/python/pyasn1_modules/pyasn1_modules-0.2.8.dist-info/LICENSE.txt b/third_party/python/pyasn1-modules/LICENSE.txt similarity index 95% rename from third_party/python/pyasn1_modules/pyasn1_modules-0.2.8.dist-info/LICENSE.txt rename to third_party/python/pyasn1-modules/LICENSE.txt index ac630e821cbb..02b45c430c71 100644 --- a/third_party/python/pyasn1_modules/pyasn1_modules-0.2.8.dist-info/LICENSE.txt +++ b/third_party/python/pyasn1-modules/LICENSE.txt @@ -1,4 +1,4 @@ -Copyright (c) 2005-2019, Ilya Etingof +Copyright (c) 2005-2017, Ilya Etingof All rights reserved. Redistribution and use in source and binary forms, with or without diff --git a/third_party/python/pyasn1-modules/MANIFEST.in b/third_party/python/pyasn1-modules/MANIFEST.in new file mode 100644 index 000000000000..57135d7cec8b --- /dev/null +++ b/third_party/python/pyasn1-modules/MANIFEST.in @@ -0,0 +1,4 @@ +include *.txt *.md +recursive-include tools *.py +recursive-include tests *.py +prune doc/build diff --git a/third_party/python/pyasn1_modules/pyasn1_modules-0.2.8.dist-info/METADATA b/third_party/python/pyasn1-modules/PKG-INFO similarity index 76% rename from third_party/python/pyasn1_modules/pyasn1_modules-0.2.8.dist-info/METADATA rename to third_party/python/pyasn1-modules/PKG-INFO index 52a16232725e..5b88f044088b 100644 --- a/third_party/python/pyasn1_modules/pyasn1_modules-0.2.8.dist-info/METADATA +++ b/third_party/python/pyasn1-modules/PKG-INFO @@ -1,12 +1,12 @@ -Metadata-Version: 2.1 +Metadata-Version: 1.1 Name: pyasn1-modules -Version: 0.2.8 +Version: 0.1.5 Summary: A collection of ASN.1-based protocols modules. Home-page: https://github.com/etingof/pyasn1-modules -Author: Ilya Etingof +Author: Ilya Etingof Author-email: etingof@gmail.com -Maintainer: Ilya Etingof -License: BSD-2-Clause +License: BSD +Description: A collection of ASN.1 modules expressed in form of pyasn1 classes. Includes protocols PDUs definition (SNMP, LDAP etc.) and various data structures (X.509, PKCS etc.). Platform: any Classifier: Development Status :: 5 - Production/Stable Classifier: Environment :: Console @@ -29,14 +29,7 @@ Classifier: Programming Language :: Python :: 3.3 Classifier: Programming Language :: Python :: 3.4 Classifier: Programming Language :: Python :: 3.5 Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 Classifier: Topic :: Communications Classifier: Topic :: System :: Monitoring Classifier: Topic :: System :: Networking :: Monitoring Classifier: Topic :: Software Development :: Libraries :: Python Modules -Requires-Dist: pyasn1 (<0.5.0,>=0.4.6) - -A collection of ASN.1 modules expressed in form of pyasn1 classes. Includes protocols PDUs definition (SNMP, LDAP etc.) and various data structures (X.509, PKCS etc.). - - diff --git a/third_party/python/pyasn1-modules/README.md b/third_party/python/pyasn1-modules/README.md new file mode 100644 index 000000000000..7fd77d6d1c1e --- /dev/null +++ b/third_party/python/pyasn1-modules/README.md @@ -0,0 +1,30 @@ + +ASN.1 modules for Python +------------------------ +[![PyPI](https://img.shields.io/pypi/v/pyasn1-modules.svg?maxAge=2592000)](https://pypi.python.org/pypi/pyasn1-modules) +[![Python Versions](https://img.shields.io/pypi/pyversions/pyasn1-modules.svg)](https://pypi.python.org/pypi/pyasn1-modules/) +[![Build status](https://travis-ci.org/etingof/pyasn1-modules.svg?branch=master)](https://secure.travis-ci.org/etingof/pyasn1-modules) +[![Coverage Status](https://img.shields.io/codecov/c/github/etingof/pyasn1-modules.svg)](https://codecov.io/github/etingof/pyasn1-modules/) +[![GitHub license](https://img.shields.io/badge/license-BSD-blue.svg)](https://raw.githubusercontent.com/etingof/pyasn1-modules/master/LICENSE.txt) + +This is a small but growing collection of +[ASN.1](https://www.itu.int/rec/dologin_pub.asp?lang=e&id=T-REC-X.208-198811-W!!PDF-E&type=items) +data structures expressed in Python terms using [pyasn1](https://github.com/etingof/pyasn1) data model. + +If ASN.1 module you need is not present in this collection, try using +[Asn1ate](https://github.com/kimgr/asn1ate) tool that compiles ASN.1 documents +into pyasn1 code. + +Feedback +-------- + +If something does not work as expected, try browsing pyasn1 +[mailing list archives](https://sourceforge.net/p/pyasn1/mailman/pyasn1-users/) +or post your question +[to Stack Overflow](http://stackoverflow.com/questions/ask). +If you want to contribute ASN.1 modules you have converted into pyasn1, +please send me a pull request. + +Copyright (c) 2005-2017, [Ilya Etingof](mailto:etingof@gmail.com). +All rights reserved. + diff --git a/third_party/python/pyasn1-modules/pyasn1_modules.egg-info/PKG-INFO b/third_party/python/pyasn1-modules/pyasn1_modules.egg-info/PKG-INFO new file mode 100644 index 000000000000..5b88f044088b --- /dev/null +++ b/third_party/python/pyasn1-modules/pyasn1_modules.egg-info/PKG-INFO @@ -0,0 +1,35 @@ +Metadata-Version: 1.1 +Name: pyasn1-modules +Version: 0.1.5 +Summary: A collection of ASN.1-based protocols modules. +Home-page: https://github.com/etingof/pyasn1-modules +Author: Ilya Etingof +Author-email: etingof@gmail.com +License: BSD +Description: A collection of ASN.1 modules expressed in form of pyasn1 classes. Includes protocols PDUs definition (SNMP, LDAP etc.) and various data structures (X.509, PKCS etc.). +Platform: any +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Console +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: Education +Classifier: Intended Audience :: Information Technology +Classifier: Intended Audience :: System Administrators +Classifier: Intended Audience :: Telecommunications Industry +Classifier: License :: OSI Approved :: BSD License +Classifier: Natural Language :: English +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.4 +Classifier: Programming Language :: Python :: 2.5 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.2 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Topic :: Communications +Classifier: Topic :: System :: Monitoring +Classifier: Topic :: System :: Networking :: Monitoring +Classifier: Topic :: Software Development :: Libraries :: Python Modules diff --git a/third_party/python/pyasn1-modules/pyasn1_modules.egg-info/SOURCES.txt b/third_party/python/pyasn1-modules/pyasn1_modules.egg-info/SOURCES.txt new file mode 100644 index 000000000000..15c3110f09ef --- /dev/null +++ b/third_party/python/pyasn1-modules/pyasn1_modules.egg-info/SOURCES.txt @@ -0,0 +1,66 @@ +CHANGES.txt +LICENSE.txt +MANIFEST.in +README.md +requirements.txt +setup.cfg +setup.py +pyasn1_modules/__init__.py +pyasn1_modules/pem.py +pyasn1_modules/rfc1155.py +pyasn1_modules/rfc1157.py +pyasn1_modules/rfc1901.py +pyasn1_modules/rfc1902.py +pyasn1_modules/rfc1905.py +pyasn1_modules/rfc2251.py +pyasn1_modules/rfc2314.py +pyasn1_modules/rfc2315.py +pyasn1_modules/rfc2437.py +pyasn1_modules/rfc2459.py +pyasn1_modules/rfc2511.py +pyasn1_modules/rfc2560.py +pyasn1_modules/rfc3279.py +pyasn1_modules/rfc3280.py +pyasn1_modules/rfc3281.py +pyasn1_modules/rfc3412.py +pyasn1_modules/rfc3414.py +pyasn1_modules/rfc3447.py +pyasn1_modules/rfc3852.py +pyasn1_modules/rfc4210.py +pyasn1_modules/rfc4211.py +pyasn1_modules/rfc5208.py +pyasn1_modules/rfc5280.py +pyasn1_modules/rfc5652.py +pyasn1_modules/rfc6402.py +pyasn1_modules.egg-info/PKG-INFO +pyasn1_modules.egg-info/SOURCES.txt +pyasn1_modules.egg-info/dependency_links.txt +pyasn1_modules.egg-info/requires.txt +pyasn1_modules.egg-info/top_level.txt +pyasn1_modules.egg-info/zip-safe +tests/__init__.py +tests/__main__.py +tests/test_rfc2314.py +tests/test_rfc2315.py +tests/test_rfc2437.py +tests/test_rfc2459.py +tests/test_rfc2511.py +tests/test_rfc2560.py +tests/test_rfc4210.py +tests/test_rfc5208.py +tests/test_rfc5280.py +tests/test_rfc5652.py +tools/cmcdump.py +tools/cmpdump.py +tools/crldump.py +tools/crmfdump.py +tools/ocspclient.py +tools/ocspreqdump.py +tools/ocsprspdump.py +tools/pkcs10dump.py +tools/pkcs1dump.py +tools/pkcs7dump.py +tools/pkcs8dump.py +tools/snmpget.py +tools/x509dump-rfc5280.py +tools/x509dump.py \ No newline at end of file diff --git a/third_party/python/pyasn1_modules/pyasn1_modules-0.2.8.dist-info/zip-safe b/third_party/python/pyasn1-modules/pyasn1_modules.egg-info/dependency_links.txt similarity index 100% rename from third_party/python/pyasn1_modules/pyasn1_modules-0.2.8.dist-info/zip-safe rename to third_party/python/pyasn1-modules/pyasn1_modules.egg-info/dependency_links.txt diff --git a/third_party/python/pyasn1-modules/pyasn1_modules.egg-info/requires.txt b/third_party/python/pyasn1-modules/pyasn1_modules.egg-info/requires.txt new file mode 100644 index 000000000000..01d237c82f19 --- /dev/null +++ b/third_party/python/pyasn1-modules/pyasn1_modules.egg-info/requires.txt @@ -0,0 +1 @@ +pyasn1>=0.3.4,<0.4.0 diff --git a/third_party/python/pyasn1_modules/pyasn1_modules-0.2.8.dist-info/top_level.txt b/third_party/python/pyasn1-modules/pyasn1_modules.egg-info/top_level.txt similarity index 100% rename from third_party/python/pyasn1_modules/pyasn1_modules-0.2.8.dist-info/top_level.txt rename to third_party/python/pyasn1-modules/pyasn1_modules.egg-info/top_level.txt diff --git a/third_party/python/virtualenv/__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs-2.2.0.dist-info/zip-safe b/third_party/python/pyasn1-modules/pyasn1_modules.egg-info/zip-safe similarity index 100% rename from third_party/python/virtualenv/__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs-2.2.0.dist-info/zip-safe rename to third_party/python/pyasn1-modules/pyasn1_modules.egg-info/zip-safe diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/__init__.py b/third_party/python/pyasn1-modules/pyasn1_modules/__init__.py similarity index 66% rename from third_party/python/pyasn1_modules/pyasn1_modules/__init__.py rename to third_party/python/pyasn1-modules/pyasn1_modules/__init__.py index 917ac12b1bd5..ba164a2c953f 100644 --- a/third_party/python/pyasn1_modules/pyasn1_modules/__init__.py +++ b/third_party/python/pyasn1-modules/pyasn1_modules/__init__.py @@ -1,2 +1,2 @@ # http://www.python.org/dev/peps/pep-0396/ -__version__ = '0.2.8' +__version__ = '0.1.5' diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/pem.py b/third_party/python/pyasn1-modules/pyasn1_modules/pem.py similarity index 94% rename from third_party/python/pyasn1_modules/pyasn1_modules/pem.py rename to third_party/python/pyasn1-modules/pyasn1_modules/pem.py index a6090bdd2102..9f16308a1c4b 100644 --- a/third_party/python/pyasn1_modules/pyasn1_modules/pem.py +++ b/third_party/python/pyasn1-modules/pyasn1_modules/pem.py @@ -1,8 +1,8 @@ # # This file is part of pyasn1-modules software. # -# Copyright (c) 2005-2019, Ilya Etingof -# License: http://snmplabs.com/pyasn1/license.html +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html # import base64 import sys diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc1155.py b/third_party/python/pyasn1-modules/pyasn1_modules/rfc1155.py similarity index 91% rename from third_party/python/pyasn1_modules/pyasn1_modules/rfc1155.py rename to third_party/python/pyasn1-modules/pyasn1_modules/rfc1155.py index 611e97eb7425..4980a38edbc9 100644 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc1155.py +++ b/third_party/python/pyasn1-modules/pyasn1_modules/rfc1155.py @@ -1,8 +1,8 @@ # # This file is part of pyasn1-modules software. # -# Copyright (c) 2005-2019, Ilya Etingof -# License: http://snmplabs.com/pyasn1/license.html +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html # # SNMPv1 message syntax # @@ -12,10 +12,7 @@ # Sample captures from: # http://wiki.wireshark.org/SampleCaptures/ # -from pyasn1.type import constraint -from pyasn1.type import namedtype -from pyasn1.type import tag -from pyasn1.type import univ +from pyasn1.type import univ, namedtype, tag, constraint class ObjectName(univ.ObjectIdentifier): diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc1157.py b/third_party/python/pyasn1-modules/pyasn1_modules/rfc1157.py similarity index 93% rename from third_party/python/pyasn1_modules/pyasn1_modules/rfc1157.py rename to third_party/python/pyasn1-modules/pyasn1_modules/rfc1157.py index b80d926a266a..1ad1d271a9e6 100644 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc1157.py +++ b/third_party/python/pyasn1-modules/pyasn1_modules/rfc1157.py @@ -1,8 +1,8 @@ # # This file is part of pyasn1-modules software. # -# Copyright (c) 2005-2019, Ilya Etingof -# License: http://snmplabs.com/pyasn1/license.html +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html # # SNMPv1 message syntax # @@ -12,11 +12,7 @@ # Sample captures from: # http://wiki.wireshark.org/SampleCaptures/ # -from pyasn1.type import namedtype -from pyasn1.type import namedval -from pyasn1.type import tag -from pyasn1.type import univ - +from pyasn1.type import univ, namedtype, namedval, tag from pyasn1_modules import rfc1155 diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc1901.py b/third_party/python/pyasn1-modules/pyasn1_modules/rfc1901.py similarity index 67% rename from third_party/python/pyasn1_modules/pyasn1_modules/rfc1901.py rename to third_party/python/pyasn1-modules/pyasn1_modules/rfc1901.py index 04533da0da00..eadf9aa395b0 100644 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc1901.py +++ b/third_party/python/pyasn1-modules/pyasn1_modules/rfc1901.py @@ -1,17 +1,15 @@ # # This file is part of pyasn1-modules software. # -# Copyright (c) 2005-2019, Ilya Etingof -# License: http://snmplabs.com/pyasn1/license.html +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html # # SNMPv2c message syntax # # ASN.1 source from: # http://www.ietf.org/rfc/rfc1901.txt # -from pyasn1.type import namedtype -from pyasn1.type import namedval -from pyasn1.type import univ +from pyasn1.type import univ, namedtype, namedval class Message(univ.Sequence): diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc1902.py b/third_party/python/pyasn1-modules/pyasn1_modules/rfc1902.py similarity index 93% rename from third_party/python/pyasn1_modules/pyasn1_modules/rfc1902.py rename to third_party/python/pyasn1-modules/pyasn1_modules/rfc1902.py index d1a1648978c8..5e9307e528cc 100644 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc1902.py +++ b/third_party/python/pyasn1-modules/pyasn1_modules/rfc1902.py @@ -1,18 +1,15 @@ # # This file is part of pyasn1-modules software. # -# Copyright (c) 2005-2019, Ilya Etingof -# License: http://snmplabs.com/pyasn1/license.html +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html # # SNMPv2c message syntax # # ASN.1 source from: # http://www.ietf.org/rfc/rfc1902.txt # -from pyasn1.type import constraint -from pyasn1.type import namedtype -from pyasn1.type import tag -from pyasn1.type import univ +from pyasn1.type import univ, namedtype, tag, constraint class Integer(univ.Integer): diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc1905.py b/third_party/python/pyasn1-modules/pyasn1_modules/rfc1905.py similarity index 92% rename from third_party/python/pyasn1_modules/pyasn1_modules/rfc1905.py rename to third_party/python/pyasn1-modules/pyasn1_modules/rfc1905.py index 72c44ed4366d..de5bb031d4f2 100644 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc1905.py +++ b/third_party/python/pyasn1-modules/pyasn1_modules/rfc1905.py @@ -1,20 +1,15 @@ # # This file is part of pyasn1-modules software. # -# Copyright (c) 2005-2019, Ilya Etingof -# License: http://snmplabs.com/pyasn1/license.html +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html # # SNMPv2c PDU syntax # # ASN.1 source from: # http://www.ietf.org/rfc/rfc1905.txt # -from pyasn1.type import constraint -from pyasn1.type import namedtype -from pyasn1.type import namedval -from pyasn1.type import tag -from pyasn1.type import univ - +from pyasn1.type import univ, namedtype, namedval, tag, constraint from pyasn1_modules import rfc1902 max_bindings = rfc1902.Integer(2147483647) @@ -42,7 +37,7 @@ class VarBind(univ.Sequence): class VarBindList(univ.SequenceOf): componentType = VarBind() - sizeSpec = univ.SequenceOf.sizeSpec + constraint.ValueSizeConstraint( + subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint( 0, max_bindings ) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc2251.py b/third_party/python/pyasn1-modules/pyasn1_modules/rfc2251.py similarity index 98% rename from third_party/python/pyasn1_modules/pyasn1_modules/rfc2251.py rename to third_party/python/pyasn1-modules/pyasn1_modules/rfc2251.py index 84c3d87c2319..94ba5891e86c 100644 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc2251.py +++ b/third_party/python/pyasn1-modules/pyasn1_modules/rfc2251.py @@ -1,8 +1,8 @@ # # This file is part of pyasn1-modules software. # -# Copyright (c) 2005-2019, Ilya Etingof -# License: http://snmplabs.com/pyasn1/license.html +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html # # LDAP message syntax # @@ -12,11 +12,7 @@ # Sample captures from: # http://wiki.wireshark.org/SampleCaptures/ # -from pyasn1.type import constraint -from pyasn1.type import namedtype -from pyasn1.type import namedval -from pyasn1.type import tag -from pyasn1.type import univ +from pyasn1.type import tag, namedtype, namedval, univ, constraint maxInt = univ.Integer(2147483647) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc2314.py b/third_party/python/pyasn1-modules/pyasn1_modules/rfc2314.py similarity index 91% rename from third_party/python/pyasn1_modules/pyasn1_modules/rfc2314.py rename to third_party/python/pyasn1-modules/pyasn1_modules/rfc2314.py index a45321768056..ef6a65bbf88d 100644 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc2314.py +++ b/third_party/python/pyasn1-modules/pyasn1_modules/rfc2314.py @@ -1,8 +1,8 @@ # # This file is part of pyasn1-modules software. # -# Copyright (c) 2005-2019, Ilya Etingof -# License: http://snmplabs.com/pyasn1/license.html +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html # # PKCS#10 syntax # diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc2315.py b/third_party/python/pyasn1-modules/pyasn1_modules/rfc2315.py similarity index 87% rename from third_party/python/pyasn1_modules/pyasn1_modules/rfc2315.py rename to third_party/python/pyasn1-modules/pyasn1_modules/rfc2315.py index a98c9a9e1f04..cf732b05504e 100644 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc2315.py +++ b/third_party/python/pyasn1-modules/pyasn1_modules/rfc2315.py @@ -1,8 +1,8 @@ # # This file is part of pyasn1-modules software. # -# Copyright (c) 2005-2019, Ilya Etingof -# License: http://snmplabs.com/pyasn1/license.html +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html # # PKCS#7 message syntax # @@ -25,8 +25,7 @@ class Attribute(univ.Sequence): class AttributeValueAssertion(univ.Sequence): componentType = namedtype.NamedTypes( namedtype.NamedType('attributeType', AttributeType()), - namedtype.NamedType('attributeValue', AttributeValue(), - openType=opentype.OpenType('type', certificateAttributesMap)) + namedtype.NamedType('attributeValue', AttributeValue()) ) @@ -51,19 +50,12 @@ class EncryptedContent(univ.OctetString): pass -contentTypeMap = {} - - class EncryptedContentInfo(univ.Sequence): componentType = namedtype.NamedTypes( namedtype.NamedType('contentType', ContentType()), namedtype.NamedType('contentEncryptionAlgorithm', ContentEncryptionAlgorithmIdentifier()), - namedtype.OptionalNamedType( - 'encryptedContent', EncryptedContent().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0) - ), - openType=opentype.OpenType('contentType', contentTypeMap) - ) + namedtype.OptionalNamedType('encryptedContent', EncryptedContent().subtype( + implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))) ) @@ -93,11 +85,8 @@ class Digest(univ.OctetString): class ContentInfo(univ.Sequence): componentType = namedtype.NamedTypes( namedtype.NamedType('contentType', ContentType()), - namedtype.OptionalNamedType( - 'content', - univ.Any().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0)), - openType=opentype.OpenType('contentType', contentTypeMap) - ) + namedtype.OptionalNamedType('content', univ.Any().subtype( + explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))) ) @@ -269,26 +258,15 @@ class DigestInfo(univ.Sequence): class SignedData(univ.Sequence): componentType = namedtype.NamedTypes( namedtype.NamedType('version', Version()), - namedtype.OptionalNamedType('digestAlgorithms', DigestAlgorithmIdentifiers()), + namedtype.NamedType('digestAlgorithms', DigestAlgorithmIdentifiers()), namedtype.NamedType('contentInfo', ContentInfo()), namedtype.OptionalNamedType('certificates', ExtendedCertificatesAndCertificates().subtype( implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))), namedtype.OptionalNamedType('crls', CertificateRevocationLists().subtype( implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))), - namedtype.OptionalNamedType('signerInfos', SignerInfos()) + namedtype.NamedType('signerInfos', SignerInfos()) ) class Data(univ.OctetString): pass - -_contentTypeMapUpdate = { - data: Data(), - signedData: SignedData(), - envelopedData: EnvelopedData(), - signedAndEnvelopedData: SignedAndEnvelopedData(), - digestedData: DigestedData(), - encryptedData: EncryptedData() -} - -contentTypeMap.update(_contentTypeMapUpdate) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc2437.py b/third_party/python/pyasn1-modules/pyasn1_modules/rfc2437.py similarity index 92% rename from third_party/python/pyasn1_modules/pyasn1_modules/rfc2437.py rename to third_party/python/pyasn1-modules/pyasn1_modules/rfc2437.py index 1139eb4bccb8..678d92d5a77a 100644 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc2437.py +++ b/third_party/python/pyasn1-modules/pyasn1_modules/rfc2437.py @@ -1,8 +1,8 @@ # # This file is part of pyasn1-modules software. # -# Copyright (c) 2005-2019, Ilya Etingof -# License: http://snmplabs.com/pyasn1/license.html +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html # # PKCS#1 syntax # @@ -11,10 +11,7 @@ # # Sample captures could be obtained with "openssl genrsa" command # -from pyasn1.type import namedtype -from pyasn1.type import tag -from pyasn1.type import univ - +from pyasn1.type import tag, namedtype, univ from pyasn1_modules.rfc2459 import AlgorithmIdentifier pkcs_1 = univ.ObjectIdentifier('1.2.840.113549.1.1') diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc2459.py b/third_party/python/pyasn1-modules/pyasn1_modules/rfc2459.py similarity index 95% rename from third_party/python/pyasn1_modules/pyasn1_modules/rfc2459.py rename to third_party/python/pyasn1-modules/pyasn1_modules/rfc2459.py index 9f3578797aa3..c988c4f2c356 100644 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc2459.py +++ b/third_party/python/pyasn1-modules/pyasn1_modules/rfc2459.py @@ -1,11 +1,8 @@ # # This file is part of pyasn1-modules software. # -# Updated by Russ Housley to resolve the TODO regarding the Certificate -# Policies Certificate Extension. -# -# Copyright (c) 2005-2019, Ilya Etingof -# License: http://snmplabs.com/pyasn1/license.html +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html # # X.509 message syntax # @@ -16,14 +13,7 @@ # Sample captures from: # http://wiki.wireshark.org/SampleCaptures/ # -from pyasn1.type import char -from pyasn1.type import constraint -from pyasn1.type import namedtype -from pyasn1.type import namedval -from pyasn1.type import opentype -from pyasn1.type import tag -from pyasn1.type import univ -from pyasn1.type import useful +from pyasn1.type import tag, namedtype, namedval, univ, constraint, char, useful MAX = float('inf') @@ -94,6 +84,26 @@ id_ad_ocsp = univ.ObjectIdentifier('1.3.6.1.5.5.7.48.1') id_ad_caIssuers = univ.ObjectIdentifier('1.3.6.1.5.5.7.48.2') +class AttributeValue(univ.Any): + pass + + +class AttributeType(univ.ObjectIdentifier): + pass + + +class AttributeTypeAndValue(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('type', AttributeType()), + namedtype.NamedType('value', AttributeValue()) + ) + + +class Attribute(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('type', AttributeType()), + namedtype.NamedType('vals', univ.SetOf(componentType=AttributeValue())) + ) id_at = univ.ObjectIdentifier('2.5.4') @@ -267,6 +277,19 @@ class DSAPrivateKey(univ.Sequence): # ---- +class RelativeDistinguishedName(univ.SetOf): + componentType = AttributeTypeAndValue() + + +class RDNSequence(univ.SequenceOf): + componentType = RelativeDistinguishedName() + + +class Name(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('', RDNSequence()) + ) + class DirectoryString(univ.Choice): componentType = namedtype.NamedTypes( @@ -293,6 +316,111 @@ class AlgorithmIdentifier(univ.Sequence): ) +class Extension(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('extnID', univ.ObjectIdentifier()), + namedtype.DefaultedNamedType('critical', univ.Boolean('False')), + namedtype.NamedType('extnValue', univ.Any()) + ) + + +class Extensions(univ.SequenceOf): + componentType = Extension() + sizeSpec = univ.SequenceOf.sizeSpec + constraint.ValueSizeConstraint(1, MAX) + + +class SubjectPublicKeyInfo(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('algorithm', AlgorithmIdentifier()), + namedtype.NamedType('subjectPublicKey', univ.BitString()) + ) + + +class UniqueIdentifier(univ.BitString): + pass + + +class Time(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('utcTime', useful.UTCTime()), + namedtype.NamedType('generalTime', useful.GeneralizedTime()) + ) + + +class Validity(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('notBefore', Time()), + namedtype.NamedType('notAfter', Time()) + ) + + +class CertificateSerialNumber(univ.Integer): + pass + + +class Version(univ.Integer): + namedValues = namedval.NamedValues( + ('v1', 0), ('v2', 1), ('v3', 2) + ) + + +class TBSCertificate(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.DefaultedNamedType('version', Version('v1').subtype( + explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), + namedtype.NamedType('serialNumber', CertificateSerialNumber()), + namedtype.NamedType('signature', AlgorithmIdentifier()), + namedtype.NamedType('issuer', Name()), + namedtype.NamedType('validity', Validity()), + namedtype.NamedType('subject', Name()), + namedtype.NamedType('subjectPublicKeyInfo', SubjectPublicKeyInfo()), + namedtype.OptionalNamedType('issuerUniqueID', UniqueIdentifier().subtype( + implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), + namedtype.OptionalNamedType('subjectUniqueID', UniqueIdentifier().subtype( + implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))), + namedtype.OptionalNamedType('extensions', Extensions().subtype( + explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))) + ) + + +class Certificate(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('tbsCertificate', TBSCertificate()), + namedtype.NamedType('signatureAlgorithm', AlgorithmIdentifier()), + namedtype.NamedType('signatureValue', univ.BitString()) + ) + + +# CRL structures + +class RevokedCertificate(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('userCertificate', CertificateSerialNumber()), + namedtype.NamedType('revocationDate', Time()), + namedtype.OptionalNamedType('crlEntryExtensions', Extensions()) + ) + + +class TBSCertList(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.OptionalNamedType('version', Version()), + namedtype.NamedType('signature', AlgorithmIdentifier()), + namedtype.NamedType('issuer', Name()), + namedtype.NamedType('thisUpdate', Time()), + namedtype.OptionalNamedType('nextUpdate', Time()), + namedtype.OptionalNamedType('revokedCertificates', univ.SequenceOf(componentType=RevokedCertificate())), + namedtype.OptionalNamedType('crlExtensions', Extensions().subtype( + explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))) + ) + + +class CertificateList(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('tbsCertList', TBSCertList()), + namedtype.NamedType('signatureAlgorithm', AlgorithmIdentifier()), + namedtype.NamedType('signature', univ.BitString()) + ) + # Algorithm OIDs and parameter structures @@ -357,7 +485,7 @@ class TeletexDomainDefinedAttribute(univ.Sequence): class TeletexDomainDefinedAttributes(univ.SequenceOf): componentType = TeletexDomainDefinedAttribute() - sizeSpec = univ.SequenceOf.sizeSpec + constraint.ValueSizeConstraint(1, ub_domain_defined_attributes) + subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, ub_domain_defined_attributes) terminal_type = univ.Integer(23) @@ -548,7 +676,7 @@ teletex_organizational_unit_names = univ.Integer(5) class TeletexOrganizationalUnitNames(univ.SequenceOf): componentType = TeletexOrganizationalUnitName() - sizeSpec = univ.SequenceOf.sizeSpec + constraint.ValueSizeConstraint(1, ub_organizational_units) + subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, ub_organizational_units) teletex_personal_name = univ.Integer(4) @@ -604,7 +732,7 @@ class ExtensionAttribute(univ.Sequence): class ExtensionAttributes(univ.SetOf): componentType = ExtensionAttribute() - sizeSpec = univ.SetOf.sizeSpec + constraint.ValueSizeConstraint(1, ub_extension_attributes) + subtypeSpec = univ.SetOf.subtypeSpec + constraint.ValueSizeConstraint(1, ub_extension_attributes) class BuiltInDomainDefinedAttribute(univ.Sequence): @@ -618,16 +746,16 @@ class BuiltInDomainDefinedAttribute(univ.Sequence): class BuiltInDomainDefinedAttributes(univ.SequenceOf): componentType = BuiltInDomainDefinedAttribute() - sizeSpec = univ.SequenceOf.sizeSpec + constraint.ValueSizeConstraint(1, ub_domain_defined_attributes) + subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, ub_domain_defined_attributes) class OrganizationalUnitName(char.PrintableString): - subtypeSpec = char.PrintableString.subtypeSpec + constraint.ValueSizeConstraint(1, ub_organizational_unit_name_length) + subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, ub_organizational_unit_name_length) class OrganizationalUnitNames(univ.SequenceOf): componentType = OrganizationalUnitName() - sizeSpec = univ.SequenceOf.sizeSpec + constraint.ValueSizeConstraint(1, ub_organizational_units) + subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, ub_organizational_units) class PersonalName(univ.Set): @@ -774,7 +902,7 @@ id_ce_cRLNumber = univ.ObjectIdentifier('2.5.29.20') class CRLNumber(univ.Integer): - subtypeSpec = univ.Integer.subtypeSpec + constraint.ValueSizeConstraint(0, MAX) + subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(0, MAX) class BaseCRLNumber(CRLNumber): @@ -799,7 +927,7 @@ class KeyPurposeId(univ.ObjectIdentifier): class ExtKeyUsageSyntax(univ.SequenceOf): componentType = KeyPurposeId() - sizeSpec = univ.SequenceOf.sizeSpec + constraint.ValueSizeConstraint(1, MAX) + subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX) class ReasonFlags(univ.BitString): @@ -844,6 +972,11 @@ class BasicConstraints(univ.Sequence): id_ce_subjectDirectoryAttributes = univ.ObjectIdentifier('2.5.29.9') +class SubjectDirectoryAttributes(univ.SequenceOf): + componentType = Attribute() + subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX) + + class EDIPartyName(univ.Sequence): componentType = namedtype.NamedTypes( namedtype.OptionalNamedType('nameAssigner', DirectoryString().subtype( @@ -853,10 +986,76 @@ class EDIPartyName(univ.Sequence): ) +class AnotherName(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('type-id', univ.ObjectIdentifier()), + namedtype.NamedType('value', + univ.Any().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))) + ) + + +class GeneralName(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('otherName', + AnotherName().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), + namedtype.NamedType('rfc822Name', + char.IA5String().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), + namedtype.NamedType('dNSName', + char.IA5String().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))), + namedtype.NamedType('x400Address', + ORAddress().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))), + namedtype.NamedType('directoryName', + Name().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 4))), + namedtype.NamedType('ediPartyName', + EDIPartyName().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 5))), + namedtype.NamedType('uniformResourceIdentifier', + char.IA5String().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 6))), + namedtype.NamedType('iPAddress', univ.OctetString().subtype( + implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 7))), + namedtype.NamedType('registeredID', univ.ObjectIdentifier().subtype( + implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 8))) + ) + + +class GeneralNames(univ.SequenceOf): + componentType = GeneralName() + subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX) + + +class AccessDescription(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('accessMethod', univ.ObjectIdentifier()), + namedtype.NamedType('accessLocation', GeneralName()) + ) + + +class AuthorityInfoAccessSyntax(univ.SequenceOf): + componentType = AccessDescription() + subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX) + id_ce_deltaCRLIndicator = univ.ObjectIdentifier('2.5.29.27') +class DistributionPointName(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('fullName', GeneralNames().subtype( + implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))), + namedtype.NamedType('nameRelativeToCRLIssuer', RelativeDistinguishedName().subtype( + implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))) + ) + + +class DistributionPoint(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.OptionalNamedType('distributionPoint', DistributionPointName().subtype( + implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))), + namedtype.OptionalNamedType('reasons', ReasonFlags().subtype( + implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), + namedtype.OptionalNamedType('cRLIssuer', GeneralNames().subtype( + implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2))) + ) + class BaseDistance(univ.Integer): subtypeSpec = univ.Integer.subtypeSpec + constraint.ValueRangeConstraint(0, MAX) @@ -865,14 +1064,56 @@ class BaseDistance(univ.Integer): id_ce_cRLDistributionPoints = univ.ObjectIdentifier('2.5.29.31') +class CRLDistPointsSyntax(univ.SequenceOf): + componentType = DistributionPoint() + subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX) + + id_ce_issuingDistributionPoint = univ.ObjectIdentifier('2.5.29.28') +class IssuingDistributionPoint(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.OptionalNamedType('distributionPoint', DistributionPointName().subtype( + implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))), + namedtype.NamedType('onlyContainsUserCerts', univ.Boolean(False).subtype( + implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), + namedtype.NamedType('onlyContainsCACerts', univ.Boolean(False).subtype( + implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))), + namedtype.OptionalNamedType('onlySomeReasons', ReasonFlags().subtype( + implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))), + namedtype.NamedType('indirectCRL', univ.Boolean(False).subtype( + implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 4))) + ) + + +class GeneralSubtree(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('base', GeneralName()), + namedtype.DefaultedNamedType('minimum', BaseDistance(0).subtype( + implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))), + namedtype.OptionalNamedType('maximum', BaseDistance().subtype( + implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))) + ) + + +class GeneralSubtrees(univ.SequenceOf): + componentType = GeneralSubtree() + subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX) id_ce_nameConstraints = univ.ObjectIdentifier('2.5.29.30') +class NameConstraints(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.OptionalNamedType('permittedSubtrees', GeneralSubtrees().subtype( + implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))), + namedtype.OptionalNamedType('excludedSubtrees', GeneralSubtrees().subtype( + implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))) + ) + + class DisplayText(univ.Choice): componentType = namedtype.NamedTypes( namedtype.NamedType('visibleString', @@ -928,7 +1169,7 @@ class PolicyInformation(univ.Sequence): class CertificatePolicies(univ.SequenceOf): componentType = PolicyInformation() - sizeSpec = univ.SequenceOf.sizeSpec + constraint.ValueSizeConstraint(1, MAX) + subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX) id_ce_policyMappings = univ.ObjectIdentifier('2.5.29.33') @@ -943,7 +1184,7 @@ class PolicyMapping(univ.Sequence): class PolicyMappings(univ.SequenceOf): componentType = PolicyMapping() - sizeSpec = univ.SequenceOf.sizeSpec + constraint.ValueSizeConstraint(1, MAX) + subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX) id_ce_privateKeyUsagePeriod = univ.ObjectIdentifier('2.5.29.16') @@ -991,110 +1232,6 @@ class SubjectKeyIdentifier(KeyIdentifier): pass -id_ce_certificateIssuer = univ.ObjectIdentifier('2.5.29.29') - - -id_ce_subjectAltName = univ.ObjectIdentifier('2.5.29.17') - - -id_ce_issuerAltName = univ.ObjectIdentifier('2.5.29.18') - - -class AttributeValue(univ.Any): - pass - - -class AttributeType(univ.ObjectIdentifier): - pass - -certificateAttributesMap = {} - - -class AttributeTypeAndValue(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('type', AttributeType()), - namedtype.NamedType('value', AttributeValue(), - openType=opentype.OpenType('type', certificateAttributesMap)) - ) - - -class Attribute(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('type', AttributeType()), - namedtype.NamedType('vals', univ.SetOf(componentType=AttributeValue())) - ) - - -class SubjectDirectoryAttributes(univ.SequenceOf): - componentType = Attribute() - sizeSpec = univ.SequenceOf.sizeSpec + constraint.ValueSizeConstraint(1, MAX) - - -class RelativeDistinguishedName(univ.SetOf): - componentType = AttributeTypeAndValue() - - -class RDNSequence(univ.SequenceOf): - componentType = RelativeDistinguishedName() - - -class Name(univ.Choice): - componentType = namedtype.NamedTypes( - namedtype.NamedType('', RDNSequence()) - ) - -class CertificateSerialNumber(univ.Integer): - pass - - -class AnotherName(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('type-id', univ.ObjectIdentifier()), - namedtype.NamedType('value', - univ.Any().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))) - ) - - -class GeneralName(univ.Choice): - componentType = namedtype.NamedTypes( - namedtype.NamedType('otherName', - AnotherName().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), - namedtype.NamedType('rfc822Name', - char.IA5String().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), - namedtype.NamedType('dNSName', - char.IA5String().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))), - namedtype.NamedType('x400Address', - ORAddress().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))), - namedtype.NamedType('directoryName', - Name().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 4))), - namedtype.NamedType('ediPartyName', - EDIPartyName().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 5))), - namedtype.NamedType('uniformResourceIdentifier', - char.IA5String().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 6))), - namedtype.NamedType('iPAddress', univ.OctetString().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 7))), - namedtype.NamedType('registeredID', univ.ObjectIdentifier().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 8))) - ) - - -class GeneralNames(univ.SequenceOf): - componentType = GeneralName() - sizeSpec = univ.SequenceOf.sizeSpec + constraint.ValueSizeConstraint(1, MAX) - - -class AccessDescription(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('accessMethod', univ.ObjectIdentifier()), - namedtype.NamedType('accessLocation', GeneralName()) - ) - - -class AuthorityInfoAccessSyntax(univ.SequenceOf): - componentType = AccessDescription() - sizeSpec = univ.SequenceOf.sizeSpec + constraint.ValueSizeConstraint(1, MAX) - - class AuthorityKeyIdentifier(univ.Sequence): componentType = namedtype.NamedTypes( namedtype.OptionalNamedType('keyIdentifier', KeyIdentifier().subtype( @@ -1106,189 +1243,30 @@ class AuthorityKeyIdentifier(univ.Sequence): ) -class DistributionPointName(univ.Choice): - componentType = namedtype.NamedTypes( - namedtype.NamedType('fullName', GeneralNames().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))), - namedtype.NamedType('nameRelativeToCRLIssuer', RelativeDistinguishedName().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))) - ) - - -class DistributionPoint(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.OptionalNamedType('distributionPoint', DistributionPointName().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))), - namedtype.OptionalNamedType('reasons', ReasonFlags().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), - namedtype.OptionalNamedType('cRLIssuer', GeneralNames().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2))) - ) - - -class CRLDistPointsSyntax(univ.SequenceOf): - componentType = DistributionPoint() - sizeSpec = univ.SequenceOf.sizeSpec + constraint.ValueSizeConstraint(1, MAX) - - -class IssuingDistributionPoint(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.OptionalNamedType('distributionPoint', DistributionPointName().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))), - namedtype.NamedType('onlyContainsUserCerts', univ.Boolean(False).subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), - namedtype.NamedType('onlyContainsCACerts', univ.Boolean(False).subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))), - namedtype.OptionalNamedType('onlySomeReasons', ReasonFlags().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))), - namedtype.NamedType('indirectCRL', univ.Boolean(False).subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 4))) - ) - - -class GeneralSubtree(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('base', GeneralName()), - namedtype.DefaultedNamedType('minimum', BaseDistance(0).subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))), - namedtype.OptionalNamedType('maximum', BaseDistance().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))) - ) - - -class GeneralSubtrees(univ.SequenceOf): - componentType = GeneralSubtree() - sizeSpec = univ.SequenceOf.sizeSpec + constraint.ValueSizeConstraint(1, MAX) - - -class NameConstraints(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.OptionalNamedType('permittedSubtrees', GeneralSubtrees().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))), - namedtype.OptionalNamedType('excludedSubtrees', GeneralSubtrees().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))) - ) +id_ce_certificateIssuer = univ.ObjectIdentifier('2.5.29.29') class CertificateIssuer(GeneralNames): pass +id_ce_subjectAltName = univ.ObjectIdentifier('2.5.29.17') + + class SubjectAltName(GeneralNames): pass +id_ce_issuerAltName = univ.ObjectIdentifier('2.5.29.18') + + class IssuerAltName(GeneralNames): pass -certificateExtensionsMap = {} - - -class Extension(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('extnID', univ.ObjectIdentifier()), - namedtype.DefaultedNamedType('critical', univ.Boolean('False')), - namedtype.NamedType('extnValue', univ.OctetString(), - openType=opentype.OpenType('extnID', certificateExtensionsMap)) - ) - - -class Extensions(univ.SequenceOf): - componentType = Extension() - sizeSpec = univ.SequenceOf.sizeSpec + constraint.ValueSizeConstraint(1, MAX) - - -class SubjectPublicKeyInfo(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('algorithm', AlgorithmIdentifier()), - namedtype.NamedType('subjectPublicKey', univ.BitString()) - ) - - -class UniqueIdentifier(univ.BitString): - pass - - -class Time(univ.Choice): - componentType = namedtype.NamedTypes( - namedtype.NamedType('utcTime', useful.UTCTime()), - namedtype.NamedType('generalTime', useful.GeneralizedTime()) - ) - - -class Validity(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('notBefore', Time()), - namedtype.NamedType('notAfter', Time()) - ) - - -class Version(univ.Integer): - namedValues = namedval.NamedValues( - ('v1', 0), ('v2', 1), ('v3', 2) - ) - - -class TBSCertificate(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.DefaultedNamedType('version', Version('v1').subtype( - explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), - namedtype.NamedType('serialNumber', CertificateSerialNumber()), - namedtype.NamedType('signature', AlgorithmIdentifier()), - namedtype.NamedType('issuer', Name()), - namedtype.NamedType('validity', Validity()), - namedtype.NamedType('subject', Name()), - namedtype.NamedType('subjectPublicKeyInfo', SubjectPublicKeyInfo()), - namedtype.OptionalNamedType('issuerUniqueID', UniqueIdentifier().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), - namedtype.OptionalNamedType('subjectUniqueID', UniqueIdentifier().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))), - namedtype.OptionalNamedType('extensions', Extensions().subtype( - explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))) - ) - - -class Certificate(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('tbsCertificate', TBSCertificate()), - namedtype.NamedType('signatureAlgorithm', AlgorithmIdentifier()), - namedtype.NamedType('signatureValue', univ.BitString()) - ) - -# CRL structures - -class RevokedCertificate(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('userCertificate', CertificateSerialNumber()), - namedtype.NamedType('revocationDate', Time()), - namedtype.OptionalNamedType('crlEntryExtensions', Extensions()) - ) - - -class TBSCertList(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.OptionalNamedType('version', Version()), - namedtype.NamedType('signature', AlgorithmIdentifier()), - namedtype.NamedType('issuer', Name()), - namedtype.NamedType('thisUpdate', Time()), - namedtype.OptionalNamedType('nextUpdate', Time()), - namedtype.OptionalNamedType('revokedCertificates', univ.SequenceOf(componentType=RevokedCertificate())), - namedtype.OptionalNamedType('crlExtensions', Extensions().subtype( - explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))) - ) - - -class CertificateList(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('tbsCertList', TBSCertList()), - namedtype.NamedType('signatureAlgorithm', AlgorithmIdentifier()), - namedtype.NamedType('signature', univ.BitString()) - ) - # map of AttributeType -> AttributeValue -_certificateAttributesMapUpdate = { +certificateAttributesMap = { id_at_name: X520name(), id_at_surname: X520name(), id_at_givenName: X520name(), @@ -1305,17 +1283,14 @@ _certificateAttributesMapUpdate = { emailAddress: Pkcs9email(), } -certificateAttributesMap.update(_certificateAttributesMapUpdate) - - # map of Certificate Extension OIDs to Extensions -_certificateExtensionsMapUpdate = { +certificateExtensionsMap = { id_ce_authorityKeyIdentifier: AuthorityKeyIdentifier(), id_ce_subjectKeyIdentifier: SubjectKeyIdentifier(), id_ce_keyUsage: KeyUsage(), id_ce_privateKeyUsagePeriod: PrivateKeyUsagePeriod(), - id_ce_certificatePolicies: CertificatePolicies(), + id_ce_certificatePolicies: PolicyInformation(), # could be a sequence of concat'ed objects? id_ce_policyMappings: PolicyMappings(), id_ce_subjectAltName: SubjectAltName(), id_ce_issuerAltName: IssuerAltName(), @@ -1334,6 +1309,3 @@ _certificateExtensionsMapUpdate = { id_ce_invalidityDate: useful.GeneralizedTime(), id_ce_certificateIssuer: GeneralNames(), } - -certificateExtensionsMap.update(_certificateExtensionsMapUpdate) - diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc2511.py b/third_party/python/pyasn1-modules/pyasn1_modules/rfc2511.py similarity index 96% rename from third_party/python/pyasn1_modules/pyasn1_modules/rfc2511.py rename to third_party/python/pyasn1-modules/pyasn1_modules/rfc2511.py index 5dd6fc224a4d..4ae7db55022e 100644 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc2511.py +++ b/third_party/python/pyasn1-modules/pyasn1_modules/rfc2511.py @@ -1,8 +1,8 @@ # # This file is part of pyasn1-modules software. # -# Copyright (c) 2005-2019, Ilya Etingof -# License: http://snmplabs.com/pyasn1/license.html +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html # # X.509 certificate Request Message Format (CRMF) syntax # @@ -11,8 +11,8 @@ # # Sample captures could be obtained with OpenSSL # -from pyasn1_modules import rfc2315 from pyasn1_modules.rfc2459 import * +from pyasn1_modules import rfc2315 MAX = float('inf') @@ -109,7 +109,7 @@ class PKIPublicationInfo(univ.Sequence): namedtype.NamedType('action', univ.Integer(namedValues=namedval.NamedValues(('dontPublish', 0), ('pleasePublish', 1)))), namedtype.OptionalNamedType('pubInfos', univ.SequenceOf(componentType=SinglePubInfo()).subtype( - sizeSpec=constraint.ValueSizeConstraint(1, MAX))) + subtypeSpec=constraint.ValueSizeConstraint(1, MAX))) ) @@ -195,7 +195,7 @@ class ProofOfPossession(univ.Choice): class Controls(univ.SequenceOf): componentType = AttributeTypeAndValue() - sizeSpec = univ.SequenceOf.sizeSpec + constraint.ValueSizeConstraint(1, MAX) + subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX) class OptionalValidity(univ.Sequence): @@ -249,10 +249,10 @@ class CertReqMsg(univ.Sequence): namedtype.NamedType('certReq', CertRequest()), namedtype.OptionalNamedType('pop', ProofOfPossession()), namedtype.OptionalNamedType('regInfo', univ.SequenceOf(componentType=AttributeTypeAndValue()).subtype( - sizeSpec=constraint.ValueSizeConstraint(1, MAX))) + subtypeSpec=constraint.ValueSizeConstraint(1, MAX))) ) class CertReqMessages(univ.SequenceOf): componentType = CertReqMsg() - sizeSpec = univ.SequenceOf.sizeSpec + constraint.ValueSizeConstraint(1, MAX) + subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc2560.py b/third_party/python/pyasn1-modules/pyasn1_modules/rfc2560.py similarity index 96% rename from third_party/python/pyasn1_modules/pyasn1_modules/rfc2560.py rename to third_party/python/pyasn1-modules/pyasn1_modules/rfc2560.py index c37e25b65e50..472099e2dfd3 100644 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc2560.py +++ b/third_party/python/pyasn1-modules/pyasn1_modules/rfc2560.py @@ -1,8 +1,8 @@ # # This file is part of pyasn1-modules software. # -# Copyright (c) 2005-2019, Ilya Etingof -# License: http://snmplabs.com/pyasn1/license.html +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html # # OCSP request/response syntax # @@ -21,12 +21,7 @@ # * dates are left as strings in GeneralizedTime format -- datetime.datetime # would be nicer # -from pyasn1.type import namedtype -from pyasn1.type import namedval -from pyasn1.type import tag -from pyasn1.type import univ -from pyasn1.type import useful - +from pyasn1.type import tag, namedtype, namedval, univ, useful from pyasn1_modules import rfc2459 diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc3279.py b/third_party/python/pyasn1-modules/pyasn1_modules/rfc3279.py similarity index 85% rename from third_party/python/pyasn1_modules/pyasn1_modules/rfc3279.py rename to third_party/python/pyasn1-modules/pyasn1_modules/rfc3279.py index f6e24deafc3e..f69ff085e651 100644 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc3279.py +++ b/third_party/python/pyasn1-modules/pyasn1_modules/rfc3279.py @@ -2,20 +2,11 @@ # This file is part of pyasn1-modules. # # Copyright (c) 2017, Danielle Madeley -# License: http://snmplabs.com/pyasn1/license.html +# License: http://pyasn1.sf.net/license.html # -# Modified by Russ Housley to add maps for use with opentypes. +# Derived from RFC 3279 # -# Algorithms and Identifiers for Internet X.509 Certificates and CRLs -# -# Derived from RFC 3279: -# https://www.rfc-editor.org/rfc/rfc3279.txt -# -from pyasn1.type import namedtype -from pyasn1.type import namedval -from pyasn1.type import univ - -from pyasn1_modules import rfc5280 +from pyasn1.type import univ, char, namedtype, namedval, tag, constraint, useful def _OID(*components): @@ -238,23 +229,3 @@ prime239v1 = _OID(primeCurve, 4) prime239v2 = _OID(primeCurve, 5) prime239v3 = _OID(primeCurve, 6) prime256v1 = _OID(primeCurve, 7) - - -# Map of Algorithm Identifier OIDs to Parameters added to the -# ones in rfc5280.py. Do not add OIDs with absent paramaters. - -_algorithmIdentifierMapUpdate = { - md2: univ.Null(""), - md5: univ.Null(""), - id_sha1: univ.Null(""), - id_dsa: Dss_Parms(), - rsaEncryption: univ.Null(""), - md2WithRSAEncryption: univ.Null(""), - md5WithRSAEncryption: univ.Null(""), - sha1WithRSAEncryption: univ.Null(""), - dhpublicnumber: DomainParameters(), - id_keyExchangeAlgorithm: KEA_Parms_Id(), - id_ecPublicKey: EcpkParameters(), -} - -rfc5280.algorithmIdentifierMap.update(_algorithmIdentifierMapUpdate) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc3280.py b/third_party/python/pyasn1-modules/pyasn1_modules/rfc3280.py similarity index 96% rename from third_party/python/pyasn1_modules/pyasn1_modules/rfc3280.py rename to third_party/python/pyasn1-modules/pyasn1_modules/rfc3280.py index e9dbc8684755..3614e6ce911e 100644 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc3280.py +++ b/third_party/python/pyasn1-modules/pyasn1_modules/rfc3280.py @@ -3,8 +3,8 @@ # This file is part of pyasn1-modules software. # # Created by Stanisław Pitucha with asn1ate tool. -# Copyright (c) 2005-2019, Ilya Etingof -# License: http://snmplabs.com/pyasn1/license.html +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html # # Internet X.509 Public Key Infrastructure Certificate and Certificate # Revocation List (CRL) Profile @@ -12,13 +12,7 @@ # ASN.1 source from: # http://www.ietf.org/rfc/rfc3280.txt # -from pyasn1.type import char -from pyasn1.type import constraint -from pyasn1.type import namedtype -from pyasn1.type import namedval -from pyasn1.type import tag -from pyasn1.type import univ -from pyasn1.type import useful +from pyasn1.type import univ, char, namedtype, namedval, tag, constraint, useful MAX = float('inf') @@ -53,7 +47,7 @@ class OrganizationalUnitNames(univ.SequenceOf): OrganizationalUnitNames.componentType = OrganizationalUnitName() -OrganizationalUnitNames.sizeSpec = constraint.ValueSizeConstraint(1, ub_organizational_units) +OrganizationalUnitNames.subtypeSpec = constraint.ValueSizeConstraint(1, ub_organizational_units) class AttributeType(univ.ObjectIdentifier): @@ -152,7 +146,7 @@ class Extensions(univ.SequenceOf): Extensions.componentType = Extension() -Extensions.sizeSpec = constraint.ValueSizeConstraint(1, MAX) +Extensions.subtypeSpec = constraint.ValueSizeConstraint(1, MAX) class CertificateSerialNumber(univ.Integer): @@ -219,7 +213,7 @@ class RelativeDistinguishedName(univ.SetOf): RelativeDistinguishedName.componentType = AttributeTypeAndValue() -RelativeDistinguishedName.sizeSpec = constraint.ValueSizeConstraint(1, MAX) +RelativeDistinguishedName.subtypeSpec = constraint.ValueSizeConstraint(1, MAX) class RDNSequence(univ.SequenceOf): @@ -519,7 +513,7 @@ class BuiltInDomainDefinedAttributes(univ.SequenceOf): BuiltInDomainDefinedAttributes.componentType = BuiltInDomainDefinedAttribute() -BuiltInDomainDefinedAttributes.sizeSpec = constraint.ValueSizeConstraint(1, ub_domain_defined_attributes) +BuiltInDomainDefinedAttributes.subtypeSpec = constraint.ValueSizeConstraint(1, ub_domain_defined_attributes) ub_extension_attributes = univ.Integer(256) @@ -542,7 +536,7 @@ class ExtensionAttributes(univ.SetOf): ExtensionAttributes.componentType = ExtensionAttribute() -ExtensionAttributes.sizeSpec = constraint.ValueSizeConstraint(1, ub_extension_attributes) +ExtensionAttributes.subtypeSpec = constraint.ValueSizeConstraint(1, ub_extension_attributes) class ORAddress(univ.Sequence): @@ -706,7 +700,7 @@ class TeletexDomainDefinedAttributes(univ.SequenceOf): TeletexDomainDefinedAttributes.componentType = TeletexDomainDefinedAttribute() -TeletexDomainDefinedAttributes.sizeSpec = constraint.ValueSizeConstraint(1, ub_domain_defined_attributes) +TeletexDomainDefinedAttributes.subtypeSpec = constraint.ValueSizeConstraint(1, ub_domain_defined_attributes) class TBSCertList(univ.Sequence): @@ -905,7 +899,7 @@ class TeletexOrganizationalUnitNames(univ.SequenceOf): TeletexOrganizationalUnitNames.componentType = TeletexOrganizationalUnitName() -TeletexOrganizationalUnitNames.sizeSpec = constraint.ValueSizeConstraint(1, ub_organizational_units) +TeletexOrganizationalUnitNames.subtypeSpec = constraint.ValueSizeConstraint(1, ub_organizational_units) physical_delivery_office_name = univ.Integer(10) @@ -1077,7 +1071,7 @@ class GeneralNames(univ.SequenceOf): GeneralNames.componentType = GeneralName() -GeneralNames.sizeSpec = constraint.ValueSizeConstraint(1, MAX) +GeneralNames.subtypeSpec = constraint.ValueSizeConstraint(1, MAX) class IssuerAltName(GeneralNames): @@ -1100,7 +1094,7 @@ PolicyMappings.componentType = univ.Sequence(componentType=namedtype.NamedTypes( namedtype.NamedType('subjectDomainPolicy', CertPolicyId()) )) -PolicyMappings.sizeSpec = constraint.ValueSizeConstraint(1, MAX) +PolicyMappings.subtypeSpec = constraint.ValueSizeConstraint(1, MAX) class PolicyQualifierId(univ.ObjectIdentifier): @@ -1119,7 +1113,7 @@ class SubjectDirectoryAttributes(univ.SequenceOf): SubjectDirectoryAttributes.componentType = Attribute() -SubjectDirectoryAttributes.sizeSpec = constraint.ValueSizeConstraint(1, MAX) +SubjectDirectoryAttributes.subtypeSpec = constraint.ValueSizeConstraint(1, MAX) anyPolicy = _OID(id_ce_certificatePolicies, 0) @@ -1198,7 +1192,7 @@ class CertificatePolicies(univ.SequenceOf): CertificatePolicies.componentType = PolicyInformation() -CertificatePolicies.sizeSpec = constraint.ValueSizeConstraint(1, MAX) +CertificatePolicies.subtypeSpec = constraint.ValueSizeConstraint(1, MAX) id_ce_basicConstraints = _OID(id_ce, 19) @@ -1216,7 +1210,7 @@ class ExtKeyUsageSyntax(univ.SequenceOf): ExtKeyUsageSyntax.componentType = KeyPurposeId() -ExtKeyUsageSyntax.sizeSpec = constraint.ValueSizeConstraint(1, MAX) +ExtKeyUsageSyntax.subtypeSpec = constraint.ValueSizeConstraint(1, MAX) class SubjectAltName(GeneralNames): @@ -1287,7 +1281,7 @@ class CRLDistributionPoints(univ.SequenceOf): CRLDistributionPoints.componentType = DistributionPoint() -CRLDistributionPoints.sizeSpec = constraint.ValueSizeConstraint(1, MAX) +CRLDistributionPoints.subtypeSpec = constraint.ValueSizeConstraint(1, MAX) class FreshestCRL(CRLDistributionPoints): @@ -1340,7 +1334,7 @@ class GeneralSubtrees(univ.SequenceOf): GeneralSubtrees.componentType = GeneralSubtree() -GeneralSubtrees.sizeSpec = constraint.ValueSizeConstraint(1, MAX) +GeneralSubtrees.subtypeSpec = constraint.ValueSizeConstraint(1, MAX) class NameConstraints(univ.Sequence): @@ -1389,7 +1383,7 @@ class AuthorityInfoAccessSyntax(univ.SequenceOf): AuthorityInfoAccessSyntax.componentType = AccessDescription() -AuthorityInfoAccessSyntax.sizeSpec = constraint.ValueSizeConstraint(1, MAX) +AuthorityInfoAccessSyntax.subtypeSpec = constraint.ValueSizeConstraint(1, MAX) id_ce_issuingDistributionPoint = _OID(id_ce, 28) @@ -1458,7 +1452,7 @@ class SubjectInfoAccessSyntax(univ.SequenceOf): SubjectInfoAccessSyntax.componentType = AccessDescription() -SubjectInfoAccessSyntax.sizeSpec = constraint.ValueSizeConstraint(1, MAX) +SubjectInfoAccessSyntax.subtypeSpec = constraint.ValueSizeConstraint(1, MAX) class KeyUsage(univ.BitString): diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc3281.py b/third_party/python/pyasn1-modules/pyasn1_modules/rfc3281.py similarity index 98% rename from third_party/python/pyasn1_modules/pyasn1_modules/rfc3281.py rename to third_party/python/pyasn1-modules/pyasn1_modules/rfc3281.py index 39ce82427cfd..8aa99d39fb5e 100644 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc3281.py +++ b/third_party/python/pyasn1-modules/pyasn1_modules/rfc3281.py @@ -3,20 +3,20 @@ # This file is part of pyasn1-modules software. # # Created by Stanisław Pitucha with asn1ate tool. -# Copyright (c) 2005-2019, Ilya Etingof -# License: http://snmplabs.com/pyasn1/license.html +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html # # An Internet Attribute Certificate Profile for Authorization # # ASN.1 source from: # http://www.ietf.org/rfc/rfc3281.txt # +from pyasn1.type import univ from pyasn1.type import char -from pyasn1.type import constraint from pyasn1.type import namedtype from pyasn1.type import namedval from pyasn1.type import tag -from pyasn1.type import univ +from pyasn1.type import constraint from pyasn1.type import useful from pyasn1_modules import rfc3280 diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc3412.py b/third_party/python/pyasn1-modules/pyasn1_modules/rfc3412.py similarity index 89% rename from third_party/python/pyasn1_modules/pyasn1_modules/rfc3412.py rename to third_party/python/pyasn1-modules/pyasn1_modules/rfc3412.py index 59f84959d08a..b3f5a929c83d 100644 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc3412.py +++ b/third_party/python/pyasn1-modules/pyasn1_modules/rfc3412.py @@ -1,18 +1,15 @@ # # This file is part of pyasn1-modules software. # -# Copyright (c) 2005-2019, Ilya Etingof -# License: http://snmplabs.com/pyasn1/license.html +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html # # SNMPv3 message syntax # # ASN.1 source from: # http://www.ietf.org/rfc/rfc3412.txt # -from pyasn1.type import constraint -from pyasn1.type import namedtype -from pyasn1.type import univ - +from pyasn1.type import univ, namedtype, constraint from pyasn1_modules import rfc1905 diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc3414.py b/third_party/python/pyasn1-modules/pyasn1_modules/rfc3414.py similarity index 82% rename from third_party/python/pyasn1_modules/pyasn1_modules/rfc3414.py rename to third_party/python/pyasn1-modules/pyasn1_modules/rfc3414.py index b9087cb579cb..aeb82aa26b1f 100644 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc3414.py +++ b/third_party/python/pyasn1-modules/pyasn1_modules/rfc3414.py @@ -1,17 +1,15 @@ # # This file is part of pyasn1-modules software. # -# Copyright (c) 2005-2019, Ilya Etingof -# License: http://snmplabs.com/pyasn1/license.html +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html # # SNMPv3 message syntax # # ASN.1 source from: # http://www.ietf.org/rfc/rfc3414.txt # -from pyasn1.type import constraint -from pyasn1.type import namedtype -from pyasn1.type import univ +from pyasn1.type import univ, namedtype, constraint class UsmSecurityParameters(univ.Sequence): diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc3447.py b/third_party/python/pyasn1-modules/pyasn1_modules/rfc3447.py similarity index 83% rename from third_party/python/pyasn1_modules/pyasn1_modules/rfc3447.py rename to third_party/python/pyasn1-modules/pyasn1_modules/rfc3447.py index c3621a0c2573..57c99faa4a73 100644 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc3447.py +++ b/third_party/python/pyasn1-modules/pyasn1_modules/rfc3447.py @@ -1,8 +1,8 @@ # # This file is part of pyasn1-modules software. # -# Copyright (c) 2005-2019, Ilya Etingof -# License: http://snmplabs.com/pyasn1/license.html +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html # # PKCS#1 syntax # @@ -11,9 +11,7 @@ # # Sample captures could be obtained with "openssl genrsa" command # -from pyasn1.type import constraint -from pyasn1.type import namedval - +from pyasn1.type import constraint, namedval from pyasn1_modules.rfc2437 import * @@ -27,7 +25,7 @@ class OtherPrimeInfo(univ.Sequence): class OtherPrimeInfos(univ.SequenceOf): componentType = OtherPrimeInfo() - sizeSpec = univ.SequenceOf.sizeSpec + constraint.ValueSizeConstraint(1, MAX) + subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX) class RSAPrivateKey(univ.Sequence): diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc3852.py b/third_party/python/pyasn1-modules/pyasn1_modules/rfc3852.py similarity index 96% rename from third_party/python/pyasn1_modules/pyasn1_modules/rfc3852.py rename to third_party/python/pyasn1-modules/pyasn1_modules/rfc3852.py index d294c5b722e4..872eb88cecf0 100644 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc3852.py +++ b/third_party/python/pyasn1-modules/pyasn1_modules/rfc3852.py @@ -3,20 +3,15 @@ # This file is part of pyasn1-modules software. # # Created by Stanisław Pitucha with asn1ate tool. -# Copyright (c) 2005-2019, Ilya Etingof -# License: http://snmplabs.com/pyasn1/license.html +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html # # Cryptographic Message Syntax (CMS) # # ASN.1 source from: # http://www.ietf.org/rfc/rfc3852.txt # -from pyasn1.type import constraint -from pyasn1.type import namedtype -from pyasn1.type import namedval -from pyasn1.type import tag -from pyasn1.type import univ -from pyasn1.type import useful +from pyasn1.type import univ, namedtype, namedval, tag, constraint, useful from pyasn1_modules import rfc3280 from pyasn1_modules import rfc3281 @@ -54,7 +49,7 @@ class SignedAttributes(univ.SetOf): SignedAttributes.componentType = Attribute() -SignedAttributes.sizeSpec = constraint.ValueSizeConstraint(1, MAX) +SignedAttributes.subtypeSpec = constraint.ValueSizeConstraint(1, MAX) class OtherRevocationInfoFormat(univ.Sequence): @@ -309,7 +304,7 @@ class RecipientInfos(univ.SetOf): RecipientInfos.componentType = RecipientInfo() -RecipientInfos.sizeSpec = constraint.ValueSizeConstraint(1, MAX) +RecipientInfos.subtypeSpec = constraint.ValueSizeConstraint(1, MAX) class DigestAlgorithmIdentifier(rfc3280.AlgorithmIdentifier): @@ -336,7 +331,7 @@ class UnprotectedAttributes(univ.SetOf): UnprotectedAttributes.componentType = Attribute() -UnprotectedAttributes.sizeSpec = constraint.ValueSizeConstraint(1, MAX) +UnprotectedAttributes.subtypeSpec = constraint.ValueSizeConstraint(1, MAX) class ContentType(univ.ObjectIdentifier): @@ -430,7 +425,7 @@ class UnauthAttributes(univ.SetOf): UnauthAttributes.componentType = Attribute() -UnauthAttributes.sizeSpec = constraint.ValueSizeConstraint(1, MAX) +UnauthAttributes.subtypeSpec = constraint.ValueSizeConstraint(1, MAX) class ExtendedCertificateInfo(univ.Sequence): @@ -550,7 +545,7 @@ class UnsignedAttributes(univ.SetOf): UnsignedAttributes.componentType = Attribute() -UnsignedAttributes.sizeSpec = constraint.ValueSizeConstraint(1, MAX) +UnsignedAttributes.subtypeSpec = constraint.ValueSizeConstraint(1, MAX) class SignatureValue(univ.OctetString): @@ -632,7 +627,7 @@ class AuthAttributes(univ.SetOf): AuthAttributes.componentType = Attribute() -AuthAttributes.sizeSpec = constraint.ValueSizeConstraint(1, MAX) +AuthAttributes.subtypeSpec = constraint.ValueSizeConstraint(1, MAX) class AuthenticatedData(univ.Sequence): diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc4210.py b/third_party/python/pyasn1-modules/pyasn1_modules/rfc4210.py similarity index 94% rename from third_party/python/pyasn1_modules/pyasn1_modules/rfc4210.py rename to third_party/python/pyasn1-modules/pyasn1_modules/rfc4210.py index 4d01a337dd81..d7e6db09bed3 100644 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc4210.py +++ b/third_party/python/pyasn1-modules/pyasn1_modules/rfc4210.py @@ -1,24 +1,15 @@ # # This file is part of pyasn1-modules software. # -# Copyright (c) 2005-2019, Ilya Etingof -# License: http://snmplabs.com/pyasn1/license.html +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html # # Certificate Management Protocol structures as per RFC4210 # # Based on Alex Railean's work # -from pyasn1.type import char -from pyasn1.type import constraint -from pyasn1.type import namedtype -from pyasn1.type import namedval -from pyasn1.type import tag -from pyasn1.type import univ -from pyasn1.type import useful - -from pyasn1_modules import rfc2314 -from pyasn1_modules import rfc2459 -from pyasn1_modules import rfc2511 +from pyasn1.type import tag, namedtype, namedval, univ, constraint, char, useful +from pyasn1_modules import rfc2459, rfc2511, rfc2314 MAX = float('inf') @@ -44,7 +35,7 @@ class PKIFreeText(univ.SequenceOf): PKIFreeText ::= SEQUENCE SIZE (1..MAX) OF UTF8String """ componentType = char.UTF8String() - sizeSpec = univ.SequenceOf.sizeSpec + constraint.ValueSizeConstraint(1, MAX) + subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX) class PollRepContent(univ.SequenceOf): @@ -354,21 +345,16 @@ class RevRepContent(univ.Sequence): OPTIONAL """ componentType = namedtype.NamedTypes( - namedtype.NamedType( - 'status', univ.SequenceOf( - componentType=PKIStatusInfo(), - sizeSpec=constraint.ValueSizeConstraint(1, MAX) - ) - ), + namedtype.NamedType('status', PKIStatusInfo()), namedtype.OptionalNamedType( 'revCerts', univ.SequenceOf(componentType=rfc2511.CertId()).subtype( - sizeSpec=constraint.ValueSizeConstraint(1, MAX), + subtypeSpec=constraint.ValueSizeConstraint(1, MAX), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0) ) ), namedtype.OptionalNamedType( 'crls', univ.SequenceOf(componentType=rfc2459.CertificateList()).subtype( - sizeSpec=constraint.ValueSizeConstraint(1, MAX), + subtypeSpec=constraint.ValueSizeConstraint(1, MAX), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1) ) ) @@ -396,12 +382,12 @@ class KeyRecRepContent(univ.Sequence): namedtype.OptionalNamedType( 'caCerts', univ.SequenceOf(componentType=CMPCertificate()).subtype( explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1), - sizeSpec=constraint.ValueSizeConstraint(1, MAX) + subtypeSpec=constraint.ValueSizeConstraint(1, MAX) ) ), namedtype.OptionalNamedType('keyPairHist', univ.SequenceOf(componentType=CertifiedKeyPair()).subtype( explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2), - sizeSpec=constraint.ValueSizeConstraint(1, MAX)) + subtypeSpec=constraint.ValueSizeConstraint(1, MAX)) ) ) @@ -435,8 +421,7 @@ class CertRepMessage(univ.Sequence): namedtype.OptionalNamedType( 'caPubs', univ.SequenceOf( componentType=CMPCertificate() - ).subtype(sizeSpec=constraint.ValueSizeConstraint(1, MAX), - explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1)) + ).subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1)) ), namedtype.NamedType('response', univ.SequenceOf(componentType=CertResponse())) ) @@ -743,11 +728,11 @@ class PKIHeader(univ.Sequence): namedtype.OptionalNamedType('generalInfo', univ.SequenceOf( componentType=InfoTypeAndValue().subtype( - sizeSpec=constraint.ValueSizeConstraint(1, MAX) + subtypeSpec=constraint.ValueSizeConstraint(1, MAX), + explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 8) ) - ).subtype( - explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 8)) - ) + ) + ) ) @@ -782,7 +767,7 @@ class PKIMessage(univ.Sequence): univ.SequenceOf( componentType=CMPCertificate() ).subtype( - sizeSpec=constraint.ValueSizeConstraint(1, MAX), + subtypeSpec=constraint.ValueSizeConstraint(1, MAX), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1) ) ) @@ -794,7 +779,7 @@ class PKIMessages(univ.SequenceOf): PKIMessages ::= SEQUENCE SIZE (1..MAX) OF PKIMessage """ componentType = PKIMessage() - sizeSpec = univ.SequenceOf.sizeSpec + constraint.ValueSizeConstraint(1, MAX) + subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX) # pyasn1 does not naturally handle recursive definitions, thus this hack: diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc4211.py b/third_party/python/pyasn1-modules/pyasn1_modules/rfc4211.py similarity index 96% rename from third_party/python/pyasn1_modules/pyasn1_modules/rfc4211.py rename to third_party/python/pyasn1-modules/pyasn1_modules/rfc4211.py index 9783058e2ca3..d20da7872a90 100644 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc4211.py +++ b/third_party/python/pyasn1-modules/pyasn1_modules/rfc4211.py @@ -3,8 +3,8 @@ # This file is part of pyasn1-modules software. # # Created by Stanisław Pitucha with asn1ate tool. -# Copyright (c) 2005-2019, Ilya Etingof -# License: http://snmplabs.com/pyasn1/license.html +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html # # Internet X.509 Public Key Infrastructure Certificate Request # Message Format (CRMF) @@ -12,12 +12,7 @@ # ASN.1 source from: # http://www.ietf.org/rfc/rfc4211.txt # -from pyasn1.type import char -from pyasn1.type import constraint -from pyasn1.type import namedtype -from pyasn1.type import namedval -from pyasn1.type import tag -from pyasn1.type import univ +from pyasn1.type import univ, char, namedtype, namedval, tag, constraint from pyasn1_modules import rfc3280 from pyasn1_modules import rfc3852 @@ -282,7 +277,7 @@ class Controls(univ.SequenceOf): Controls.componentType = AttributeTypeAndValue() -Controls.sizeSpec = constraint.ValueSizeConstraint(1, MAX) +Controls.subtypeSpec = constraint.ValueSizeConstraint(1, MAX) class CertRequest(univ.Sequence): @@ -312,7 +307,7 @@ class CertReqMessages(univ.SequenceOf): CertReqMessages.componentType = CertReqMsg() -CertReqMessages.sizeSpec = constraint.ValueSizeConstraint(1, MAX) +CertReqMessages.subtypeSpec = constraint.ValueSizeConstraint(1, MAX) class CertReq(CertRequest): diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc5208.py b/third_party/python/pyasn1-modules/pyasn1_modules/rfc5208.py similarity index 92% rename from third_party/python/pyasn1_modules/pyasn1_modules/rfc5208.py rename to third_party/python/pyasn1-modules/pyasn1_modules/rfc5208.py index 14082a89bd7f..6b6487d83afc 100644 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc5208.py +++ b/third_party/python/pyasn1-modules/pyasn1_modules/rfc5208.py @@ -1,8 +1,8 @@ # # This file is part of pyasn1-modules software. # -# Copyright (c) 2005-2019, Ilya Etingof -# License: http://snmplabs.com/pyasn1/license.html +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html # # PKCS#8 syntax # @@ -11,8 +11,8 @@ # # Sample captures could be obtained with "openssl pkcs8 -topk8" command # -from pyasn1_modules import rfc2251 from pyasn1_modules.rfc2459 import * +from pyasn1_modules import rfc2251 class KeyEncryptionAlgorithms(AlgorithmIdentifier): diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc5280.py b/third_party/python/pyasn1-modules/pyasn1_modules/rfc5280.py similarity index 89% rename from third_party/python/pyasn1_modules/pyasn1_modules/rfc5280.py rename to third_party/python/pyasn1-modules/pyasn1_modules/rfc5280.py index f2b52b25c213..7d3aa695aac5 100644 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc5280.py +++ b/third_party/python/pyasn1-modules/pyasn1_modules/rfc5280.py @@ -3,30 +3,25 @@ # This file is part of pyasn1-modules software. # # Created by Stanisław Pitucha with asn1ate tool. -# Updated by Russ Housley for ORAddress Extension Attribute opentype support. -# Updated by Russ Housley for AlgorithmIdentifier opentype support. -# -# Copyright (c) 2005-2019, Ilya Etingof -# License: http://snmplabs.com/pyasn1/license.html +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html # # Internet X.509 Public Key Infrastructure Certificate and Certificate # Revocation List (CRL) Profile # # ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc5280.txt +# http://www.ietf.org/rfc/rfc5280.txt # +from pyasn1.type import univ from pyasn1.type import char -from pyasn1.type import constraint from pyasn1.type import namedtype from pyasn1.type import namedval -from pyasn1.type import opentype from pyasn1.type import tag -from pyasn1.type import univ +from pyasn1.type import constraint from pyasn1.type import useful MAX = float('inf') - def _buildOid(*components): output = [] for x in tuple(components): @@ -75,7 +70,7 @@ class Extensions(univ.SequenceOf): Extensions.componentType = Extension() -Extensions.sizeSpec = constraint.ValueSizeConstraint(1, MAX) +Extensions.subtypeSpec = constraint.ValueSizeConstraint(1, MAX) physical_delivery_personal_name = univ.Integer(13) @@ -206,7 +201,7 @@ class TeletexDomainDefinedAttributes(univ.SequenceOf): TeletexDomainDefinedAttributes.componentType = TeletexDomainDefinedAttribute() -TeletexDomainDefinedAttributes.sizeSpec = constraint.ValueSizeConstraint(1, ub_domain_defined_attributes) +TeletexDomainDefinedAttributes.subtypeSpec = constraint.ValueSizeConstraint(1, ub_domain_defined_attributes) extended_network_address = univ.Integer(22) @@ -283,16 +278,14 @@ class CertificateSerialNumber(univ.Integer): pass -algorithmIdentifierMap = {} - - class AlgorithmIdentifier(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('algorithm', univ.ObjectIdentifier()), - namedtype.OptionalNamedType('parameters', univ.Any(), - openType=opentype.OpenType('algorithm', algorithmIdentifierMap) - ) - ) + pass + + +AlgorithmIdentifier.componentType = namedtype.NamedTypes( + namedtype.NamedType('algorithm', univ.ObjectIdentifier()), + namedtype.OptionalNamedType('parameters', univ.Any()) +) class Time(univ.Choice): @@ -309,17 +302,14 @@ class AttributeValue(univ.Any): pass -certificateAttributesMap = {} - - class AttributeTypeAndValue(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('type', AttributeType()), - namedtype.NamedType( - 'value', AttributeValue(), - openType=opentype.OpenType('type', certificateAttributesMap) - ) - ) + pass + + +AttributeTypeAndValue.componentType = namedtype.NamedTypes( + namedtype.NamedType('type', AttributeType()), + namedtype.NamedType('value', AttributeValue()) +) class RelativeDistinguishedName(univ.SetOf): @@ -327,7 +317,7 @@ class RelativeDistinguishedName(univ.SetOf): RelativeDistinguishedName.componentType = AttributeTypeAndValue() -RelativeDistinguishedName.sizeSpec = constraint.ValueSizeConstraint(1, MAX) +RelativeDistinguishedName.subtypeSpec = constraint.ValueSizeConstraint(1, MAX) class RDNSequence(univ.SequenceOf): @@ -389,23 +379,18 @@ class PhysicalDeliveryOfficeName(PDSParameter): ub_extension_attributes = univ.Integer(256) -certificateExtensionsMap = { -} - -oraddressExtensionAttributeMap = { -} - class ExtensionAttribute(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType( - 'extension-attribute-type', - univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(0, ub_extension_attributes)).subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), - namedtype.NamedType( - 'extension-attribute-value', - univ.Any().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)), - openType=opentype.OpenType('extension-attribute-type', oraddressExtensionAttributeMap)) - ) + pass + + +ExtensionAttribute.componentType = namedtype.NamedTypes( + namedtype.NamedType('extension-attribute-type', univ.Integer().subtype( + subtypeSpec=constraint.ValueRangeConstraint(0, ub_extension_attributes)).subtype( + implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), + namedtype.NamedType('extension-attribute-value', + univ.Any().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))) +) id_qt = _buildOid(id_pkix, 2) @@ -649,7 +634,7 @@ class ExtensionAttributes(univ.SetOf): ExtensionAttributes.componentType = ExtensionAttribute() -ExtensionAttributes.sizeSpec = constraint.ValueSizeConstraint(1, ub_extension_attributes) +ExtensionAttributes.subtypeSpec = constraint.ValueSizeConstraint(1, ub_extension_attributes) ub_emailaddress_length = univ.Integer(255) @@ -752,12 +737,13 @@ X520SerialNumber.subtypeSpec = constraint.ValueSizeConstraint(1, ub_serial_numbe class Attribute(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('type', AttributeType()), - namedtype.NamedType('values', - univ.SetOf(componentType=AttributeValue()), - openType=opentype.OpenType('type', certificateAttributesMap)) - ) + pass + + +Attribute.componentType = namedtype.NamedTypes( + namedtype.NamedType('type', AttributeType()), + namedtype.NamedType('values', univ.SetOf(componentType=AttributeValue())) +) ub_common_name = univ.Integer(64) @@ -796,7 +782,7 @@ class BuiltInDomainDefinedAttributes(univ.SequenceOf): BuiltInDomainDefinedAttributes.componentType = BuiltInDomainDefinedAttribute() -BuiltInDomainDefinedAttributes.sizeSpec = constraint.ValueSizeConstraint(1, ub_domain_defined_attributes) +BuiltInDomainDefinedAttributes.subtypeSpec = constraint.ValueSizeConstraint(1, ub_domain_defined_attributes) id_at_pseudonym = _buildOid(id_at, 65) @@ -877,7 +863,7 @@ class OrganizationalUnitNames(univ.SequenceOf): OrganizationalUnitNames.componentType = OrganizationalUnitName() -OrganizationalUnitNames.sizeSpec = constraint.ValueSizeConstraint(1, ub_organizational_units) +OrganizationalUnitNames.subtypeSpec = constraint.ValueSizeConstraint(1, ub_organizational_units) class PrivateDomainName(univ.Choice): @@ -1036,7 +1022,7 @@ class TeletexOrganizationalUnitNames(univ.SequenceOf): TeletexOrganizationalUnitNames.componentType = TeletexOrganizationalUnitName() -TeletexOrganizationalUnitNames.sizeSpec = constraint.ValueSizeConstraint(1, ub_organizational_units) +TeletexOrganizationalUnitNames.subtypeSpec = constraint.ValueSizeConstraint(1, ub_organizational_units) id_ce = _buildOid(2, 5, 29) @@ -1080,20 +1066,14 @@ PrivateKeyUsagePeriod.componentType = namedtype.NamedTypes( ) -anotherNameMap = { - -} - - class AnotherName(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('type-id', univ.ObjectIdentifier()), - namedtype.NamedType( - 'value', - univ.Any().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)), - openType=opentype.OpenType('type-id', anotherNameMap) - ) - ) + pass + + +AnotherName.componentType = namedtype.NamedTypes( + namedtype.NamedType('type-id', univ.ObjectIdentifier()), + namedtype.NamedType('value', univ.Any().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))) +) class EDIPartyName(univ.Sequence): @@ -1159,7 +1139,7 @@ class GeneralNames(univ.SequenceOf): GeneralNames.componentType = GeneralName() -GeneralNames.sizeSpec = constraint.ValueSizeConstraint(1, MAX) +GeneralNames.subtypeSpec = constraint.ValueSizeConstraint(1, MAX) class DistributionPointName(univ.Choice): @@ -1259,7 +1239,7 @@ class CRLDistributionPoints(univ.SequenceOf): CRLDistributionPoints.componentType = DistributionPoint() -CRLDistributionPoints.sizeSpec = constraint.ValueSizeConstraint(1, MAX) +CRLDistributionPoints.subtypeSpec = constraint.ValueSizeConstraint(1, MAX) class GeneralSubtrees(univ.SequenceOf): @@ -1267,7 +1247,7 @@ class GeneralSubtrees(univ.SequenceOf): GeneralSubtrees.componentType = GeneralSubtree() -GeneralSubtrees.sizeSpec = constraint.ValueSizeConstraint(1, MAX) +GeneralSubtrees.subtypeSpec = constraint.ValueSizeConstraint(1, MAX) class NameConstraints(univ.Sequence): @@ -1287,7 +1267,7 @@ class SubjectDirectoryAttributes(univ.SequenceOf): SubjectDirectoryAttributes.componentType = Attribute() -SubjectDirectoryAttributes.sizeSpec = constraint.ValueSizeConstraint(1, MAX) +SubjectDirectoryAttributes.subtypeSpec = constraint.ValueSizeConstraint(1, MAX) id_kp_OCSPSigning = _buildOid(id_kp, 9) @@ -1331,19 +1311,14 @@ class PolicyQualifierId(univ.ObjectIdentifier): pass -policyQualifierInfoMap = { - -} - - class PolicyQualifierInfo(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('policyQualifierId', PolicyQualifierId()), - namedtype.NamedType( - 'qualifier', univ.Any(), - openType=opentype.OpenType('policyQualifierId', policyQualifierInfoMap) - ) - ) + pass + + +PolicyQualifierInfo.componentType = namedtype.NamedTypes( + namedtype.NamedType('policyQualifierId', PolicyQualifierId()), + namedtype.NamedType('qualifier', univ.Any()) +) class CertPolicyId(univ.ObjectIdentifier): @@ -1365,7 +1340,7 @@ class CertificatePolicies(univ.SequenceOf): CertificatePolicies.componentType = PolicyInformation() -CertificatePolicies.sizeSpec = constraint.ValueSizeConstraint(1, MAX) +CertificatePolicies.subtypeSpec = constraint.ValueSizeConstraint(1, MAX) class SubjectAltName(GeneralNames): @@ -1403,7 +1378,7 @@ PolicyMappings.componentType = univ.Sequence( ) ) -PolicyMappings.sizeSpec = constraint.ValueSizeConstraint(1, MAX) +PolicyMappings.subtypeSpec = constraint.ValueSizeConstraint(1, MAX) class InhibitAnyPolicy(SkipCerts): @@ -1467,7 +1442,7 @@ class AuthorityInfoAccessSyntax(univ.SequenceOf): AuthorityInfoAccessSyntax.componentType = AccessDescription() -AuthorityInfoAccessSyntax.sizeSpec = constraint.ValueSizeConstraint(1, MAX) +AuthorityInfoAccessSyntax.subtypeSpec = constraint.ValueSizeConstraint(1, MAX) id_holdinstruction_none = _buildOid(holdInstruction, 1) @@ -1495,7 +1470,7 @@ class ExtKeyUsageSyntax(univ.SequenceOf): ExtKeyUsageSyntax.componentType = KeyPurposeId() -ExtKeyUsageSyntax.sizeSpec = constraint.ValueSizeConstraint(1, MAX) +ExtKeyUsageSyntax.subtypeSpec = constraint.ValueSizeConstraint(1, MAX) class HoldInstructionCode(univ.ObjectIdentifier): @@ -1514,7 +1489,7 @@ class SubjectInfoAccessSyntax(univ.SequenceOf): SubjectInfoAccessSyntax.componentType = AccessDescription() -SubjectInfoAccessSyntax.sizeSpec = constraint.ValueSizeConstraint(1, MAX) +SubjectInfoAccessSyntax.subtypeSpec = constraint.ValueSizeConstraint(1, MAX) class InvalidityDate(useful.GeneralizedTime): @@ -1572,40 +1547,9 @@ id_ce_subjectKeyIdentifier = _buildOid(id_ce, 14) id_ce_inhibitAnyPolicy = _buildOid(id_ce, 54) -# map of ORAddress ExtensionAttribute type to ExtensionAttribute value - -_oraddressExtensionAttributeMapUpdate = { - common_name: CommonName(), - teletex_common_name: TeletexCommonName(), - teletex_organization_name: TeletexOrganizationName(), - teletex_personal_name: TeletexPersonalName(), - teletex_organizational_unit_names: TeletexOrganizationalUnitNames(), - pds_name: PDSName(), - physical_delivery_country_name: PhysicalDeliveryCountryName(), - postal_code: PostalCode(), - physical_delivery_office_name: PhysicalDeliveryOfficeName(), - physical_delivery_office_number: PhysicalDeliveryOfficeNumber(), - extension_OR_address_components: ExtensionORAddressComponents(), - physical_delivery_personal_name: PhysicalDeliveryPersonalName(), - physical_delivery_organization_name: PhysicalDeliveryOrganizationName(), - extension_physical_delivery_address_components: ExtensionPhysicalDeliveryAddressComponents(), - unformatted_postal_address: UnformattedPostalAddress(), - street_address: StreetAddress(), - post_office_box_address: PostOfficeBoxAddress(), - poste_restante_address: PosteRestanteAddress(), - unique_postal_name: UniquePostalName(), - local_postal_attributes: LocalPostalAttributes(), - extended_network_address: ExtendedNetworkAddress(), - terminal_type: TerminalType(), - teletex_domain_defined_attributes: TeletexDomainDefinedAttributes(), -} - -oraddressExtensionAttributeMap.update(_oraddressExtensionAttributeMapUpdate) - - # map of AttributeType -> AttributeValue -_certificateAttributesMapUpdate = { +certificateAttributesMap = { id_at_name: X520name(), id_at_surname: X520name(), id_at_givenName: X520name(), @@ -1625,17 +1569,14 @@ _certificateAttributesMapUpdate = { id_emailAddress: EmailAddress(), } -certificateAttributesMap.update(_certificateAttributesMapUpdate) - - # map of Certificate Extension OIDs to Extensions -_certificateExtensionsMap = { +certificateExtensionsMap = { id_ce_authorityKeyIdentifier: AuthorityKeyIdentifier(), id_ce_subjectKeyIdentifier: SubjectKeyIdentifier(), id_ce_keyUsage: KeyUsage(), id_ce_privateKeyUsagePeriod: PrivateKeyUsagePeriod(), - id_ce_certificatePolicies: CertificatePolicies(), + id_ce_certificatePolicies: PolicyInformation(), # could be a sequence of concat'ed objects? id_ce_policyMappings: PolicyMappings(), id_ce_subjectAltName: SubjectAltName(), id_ce_issuerAltName: IssuerAltName(), @@ -1654,5 +1595,3 @@ _certificateExtensionsMap = { id_ce_invalidityDate: useful.GeneralizedTime(), id_ce_certificateIssuer: GeneralNames(), } - -certificateExtensionsMap.update(_certificateExtensionsMap) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc5652.py b/third_party/python/pyasn1-modules/pyasn1_modules/rfc5652.py similarity index 90% rename from third_party/python/pyasn1_modules/pyasn1_modules/rfc5652.py rename to third_party/python/pyasn1-modules/pyasn1_modules/rfc5652.py index 2e48962dd330..5fd5b79a9342 100644 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc5652.py +++ b/third_party/python/pyasn1-modules/pyasn1_modules/rfc5652.py @@ -3,10 +3,8 @@ # This file is part of pyasn1-modules software. # # Created by Stanisław Pitucha with asn1ate tool. -# Modified by Russ Housley to add support for opentypes. -# -# Copyright (c) 2005-2019, Ilya Etingof -# License: http://snmplabs.com/pyasn1/license.html +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html # # Cryptographic Message Syntax (CMS) # @@ -16,7 +14,6 @@ from pyasn1.type import constraint from pyasn1.type import namedtype from pyasn1.type import namedval -from pyasn1.type import opentype from pyasn1.type import tag from pyasn1.type import univ from pyasn1.type import useful @@ -38,19 +35,6 @@ def _buildOid(*components): return univ.ObjectIdentifier(output) -cmsContentTypesMap = { } - -cmsAttributesMap = { } - -otherKeyAttributesMap = { } - -otherCertFormatMap = { } - -otherRevInfoFormatMap = { } - -otherRecipientInfoMap = { } - - class AttCertVersionV1(univ.Integer): pass @@ -105,9 +89,7 @@ class Attribute(univ.Sequence): Attribute.componentType = namedtype.NamedTypes( namedtype.NamedType('attrType', univ.ObjectIdentifier()), - namedtype.NamedType('attrValues', univ.SetOf(componentType=AttributeValue()), - openType=opentype.OpenType('attrType', cmsAttributesMap) - ) + namedtype.NamedType('attrValues', univ.SetOf(componentType=AttributeValue())) ) @@ -116,7 +98,7 @@ class SignedAttributes(univ.SetOf): SignedAttributes.componentType = Attribute() -SignedAttributes.sizeSpec = constraint.ValueSizeConstraint(1, MAX) +SignedAttributes.subtypeSpec = constraint.ValueSizeConstraint(1, MAX) class AttributeCertificateV2(rfc3281.AttributeCertificate): @@ -129,9 +111,7 @@ class OtherKeyAttribute(univ.Sequence): OtherKeyAttribute.componentType = namedtype.NamedTypes( namedtype.NamedType('keyAttrId', univ.ObjectIdentifier()), - namedtype.OptionalNamedType('keyAttr', univ.Any(), - openType=opentype.OpenType('keyAttrId', otherKeyAttributesMap) - ) + namedtype.OptionalNamedType('keyAttr', univ.Any()) ) @@ -140,7 +120,7 @@ class UnauthAttributes(univ.SetOf): UnauthAttributes.componentType = Attribute() -UnauthAttributes.sizeSpec = constraint.ValueSizeConstraint(1, MAX) +UnauthAttributes.subtypeSpec = constraint.ValueSizeConstraint(1, MAX) id_encryptedData = _buildOid(1, 2, 840, 113549, 1, 7, 6) @@ -230,9 +210,7 @@ class OtherCertificateFormat(univ.Sequence): OtherCertificateFormat.componentType = namedtype.NamedTypes( namedtype.NamedType('otherCertFormat', univ.ObjectIdentifier()), - namedtype.NamedType('otherCert', univ.Any(), - openType=opentype.OpenType('otherCertFormat', otherCertFormatMap) - ) + namedtype.NamedType('otherCert', univ.Any()) ) @@ -296,9 +274,7 @@ class OtherRevocationInfoFormat(univ.Sequence): OtherRevocationInfoFormat.componentType = namedtype.NamedTypes( namedtype.NamedType('otherRevInfoFormat', univ.ObjectIdentifier()), - namedtype.NamedType('otherRevInfo', univ.Any(), - openType=opentype.OpenType('otherRevInfoFormat', otherRevInfoFormatMap) - ) + namedtype.NamedType('otherRevInfo', univ.Any()) ) @@ -361,7 +337,7 @@ class UnprotectedAttributes(univ.SetOf): UnprotectedAttributes.componentType = Attribute() -UnprotectedAttributes.sizeSpec = constraint.ValueSizeConstraint(1, MAX) +UnprotectedAttributes.subtypeSpec = constraint.ValueSizeConstraint(1, MAX) class KeyEncryptionAlgorithmIdentifier(rfc5280.AlgorithmIdentifier): @@ -479,9 +455,7 @@ class OtherRecipientInfo(univ.Sequence): OtherRecipientInfo.componentType = namedtype.NamedTypes( namedtype.NamedType('oriType', univ.ObjectIdentifier()), - namedtype.NamedType('oriValue', univ.Any(), - openType=opentype.OpenType('oriType', otherRecipientInfoMap) - ) + namedtype.NamedType('oriValue', univ.Any()) ) @@ -507,7 +481,7 @@ class RecipientInfos(univ.SetOf): RecipientInfos.componentType = RecipientInfo() -RecipientInfos.sizeSpec = constraint.ValueSizeConstraint(1, MAX) +RecipientInfos.subtypeSpec = constraint.ValueSizeConstraint(1, MAX) class EnvelopedData(univ.Sequence): @@ -559,7 +533,7 @@ class UnsignedAttributes(univ.SetOf): UnsignedAttributes.componentType = Attribute() -UnsignedAttributes.sizeSpec = constraint.ValueSizeConstraint(1, MAX) +UnsignedAttributes.subtypeSpec = constraint.ValueSizeConstraint(1, MAX) class SignerIdentifier(univ.Choice): @@ -607,9 +581,7 @@ class ContentInfo(univ.Sequence): ContentInfo.componentType = namedtype.NamedTypes( namedtype.NamedType('contentType', ContentType()), - namedtype.NamedType('content', univ.Any().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)), - openType=opentype.OpenType('contentType', cmsContentTypesMap) - ) + namedtype.NamedType('content', univ.Any().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))) ) @@ -637,7 +609,7 @@ class AuthAttributes(univ.SetOf): AuthAttributes.componentType = Attribute() -AuthAttributes.sizeSpec = constraint.ValueSizeConstraint(1, MAX) +AuthAttributes.subtypeSpec = constraint.ValueSizeConstraint(1, MAX) class Time(univ.Choice): @@ -732,30 +704,3 @@ class SigningTime(Time): id_ct_authData = _buildOid(1, 2, 840, 113549, 1, 9, 16, 1, 2) - - -# CMS Content Type Map - -_cmsContentTypesMapUpdate = { - id_ct_contentInfo: ContentInfo(), - id_data: univ.OctetString(), - id_signedData: SignedData(), - id_envelopedData: EnvelopedData(), - id_digestedData: DigestedData(), - id_encryptedData: EncryptedData(), - id_ct_authData: AuthenticatedData(), -} - -cmsContentTypesMap.update(_cmsContentTypesMapUpdate) - - -# CMS Attribute Map - -_cmsAttributesMapUpdate = { - id_contentType: ContentType(), - id_messageDigest: MessageDigest(), - id_signingTime: SigningTime(), - id_countersignature: Countersignature(), -} - -cmsAttributesMap.update(_cmsAttributesMapUpdate) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc6402.py b/third_party/python/pyasn1-modules/pyasn1_modules/rfc6402.py similarity index 84% rename from third_party/python/pyasn1_modules/pyasn1_modules/rfc6402.py rename to third_party/python/pyasn1-modules/pyasn1_modules/rfc6402.py index b5f0d48fa4f0..c35f855f0038 100644 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc6402.py +++ b/third_party/python/pyasn1-modules/pyasn1_modules/rfc6402.py @@ -3,25 +3,15 @@ # This file is part of pyasn1-modules software. # # Created by Stanisław Pitucha with asn1ate tool. -# Modified by Russ Housley to add a maps for CMC Control Attributes -# and CMC Content Types for use with opentypes. -# -# Copyright (c) 2005-2019, Ilya Etingof -# License: http://snmplabs.com/pyasn1/license.html +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html # # Certificate Management over CMS (CMC) Updates # # ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc6402.txt +# http://www.ietf.org/rfc/rfc6402.txt # -from pyasn1.type import char -from pyasn1.type import constraint -from pyasn1.type import namedtype -from pyasn1.type import namedval -from pyasn1.type import opentype -from pyasn1.type import tag -from pyasn1.type import univ -from pyasn1.type import useful +from pyasn1.type import univ, char, namedtype, namedval, tag, constraint, useful from pyasn1_modules import rfc4211 from pyasn1_modules import rfc5280 @@ -41,10 +31,6 @@ def _buildOid(*components): return univ.ObjectIdentifier(output) -# Since CMS Attributes and CMC Controls both use 'attrType', one map is used -cmcControlAttributesMap = rfc5652.cmsAttributesMap - - class ChangeSubjectName(univ.Sequence): pass @@ -98,7 +84,7 @@ class BodyPartPath(univ.SequenceOf): BodyPartPath.componentType = BodyPartID() -BodyPartPath.sizeSpec = constraint.ValueSizeConstraint(1, MAX) +BodyPartPath.subtypeSpec = constraint.ValueSizeConstraint(1, MAX) class BodyPartReference(univ.Choice): @@ -392,9 +378,7 @@ class TaggedAttribute(univ.Sequence): TaggedAttribute.componentType = namedtype.NamedTypes( namedtype.NamedType('bodyPartID', BodyPartID()), namedtype.NamedType('attrType', univ.ObjectIdentifier()), - namedtype.NamedType('attrValues', univ.SetOf(componentType=AttributeValue()), - openType=opentype.OpenType('attrType', cmcControlAttributesMap) - ) + namedtype.NamedType('attrValues', univ.SetOf(componentType=AttributeValue())) ) @@ -426,7 +410,7 @@ class BodyPartList(univ.SequenceOf): BodyPartList.componentType = BodyPartID() -BodyPartList.sizeSpec = constraint.ValueSizeConstraint(1, MAX) +BodyPartList.subtypeSpec = constraint.ValueSizeConstraint(1, MAX) id_cmc_responseBody = _buildOid(id_cmc, 37) @@ -491,7 +475,7 @@ class ExtensionReq(univ.SequenceOf): ExtensionReq.componentType = rfc5280.Extension() -ExtensionReq.sizeSpec = constraint.ValueSizeConstraint(1, MAX) +ExtensionReq.subtypeSpec = constraint.ValueSizeConstraint(1, MAX) id_kp_cmcArchive = _buildOid(rfc5280.id_kp, 28) @@ -575,54 +559,3 @@ class NoSignatureValue(univ.OctetString): id_ad_cmc = _buildOid(rfc5280.id_ad, 12) id_alg_noSignature = _buildOid(id_pkix, 6, 2) - - -# Map of CMC Control OIDs to CMC Control Attributes - -_cmcControlAttributesMapUpdate = { - id_cmc_statusInfo: CMCStatusInfo(), - id_cmc_statusInfoV2: CMCStatusInfoV2(), - id_cmc_identification: char.UTF8String(), - id_cmc_identityProof: univ.OctetString(), - id_cmc_identityProofV2: IdentifyProofV2(), - id_cmc_dataReturn: univ.OctetString(), - id_cmc_transactionId: univ.Integer(), - id_cmc_senderNonce: univ.OctetString(), - id_cmc_recipientNonce: univ.OctetString(), - id_cmc_addExtensions: AddExtensions(), - id_cmc_encryptedPOP: EncryptedPOP(), - id_cmc_decryptedPOP: DecryptedPOP(), - id_cmc_lraPOPWitness: LraPopWitness(), - id_cmc_getCert: GetCert(), - id_cmc_getCRL: GetCRL(), - id_cmc_revokeRequest: RevokeRequest(), - id_cmc_regInfo: univ.OctetString(), - id_cmc_responseInfo: univ.OctetString(), - id_cmc_queryPending: univ.OctetString(), - id_cmc_popLinkRandom: univ.OctetString(), - id_cmc_popLinkWitness: univ.OctetString(), - id_cmc_popLinkWitnessV2: PopLinkWitnessV2(), - id_cmc_confirmCertAcceptance: CMCCertId(), - id_cmc_trustedAnchors: PublishTrustAnchors(), - id_cmc_authData: AuthPublish(), - id_cmc_batchRequests: BodyPartList(), - id_cmc_batchResponses: BodyPartList(), - id_cmc_publishCert: CMCPublicationInfo(), - id_cmc_modCertTemplate: ModCertTemplate(), - id_cmc_controlProcessed: ControlsProcessed(), - id_ExtensionReq: ExtensionReq(), -} - -cmcControlAttributesMap.update(_cmcControlAttributesMapUpdate) - - -# Map of CMC Content Type OIDs to CMC Content Types are added to -# the ones that are in rfc5652.py - -_cmsContentTypesMapUpdate = { - id_cct_PKIData: PKIData(), - id_cct_PKIResponse: PKIResponse(), -} - -rfc5652.cmsContentTypesMap.update(_cmsContentTypesMapUpdate) - diff --git a/third_party/python/pyasn1-modules/requirements.txt b/third_party/python/pyasn1-modules/requirements.txt new file mode 100644 index 000000000000..01d237c82f19 --- /dev/null +++ b/third_party/python/pyasn1-modules/requirements.txt @@ -0,0 +1 @@ +pyasn1>=0.3.4,<0.4.0 diff --git a/third_party/python/pyasn1-modules/setup.cfg b/third_party/python/pyasn1-modules/setup.cfg new file mode 100644 index 000000000000..6f08d0e3e7d4 --- /dev/null +++ b/third_party/python/pyasn1-modules/setup.cfg @@ -0,0 +1,8 @@ +[bdist_wheel] +universal = 1 + +[egg_info] +tag_build = +tag_date = 0 +tag_svn_revision = 0 + diff --git a/third_party/python/pyasn1-modules/setup.py b/third_party/python/pyasn1-modules/setup.py new file mode 100644 index 000000000000..4d1a6d3d3480 --- /dev/null +++ b/third_party/python/pyasn1-modules/setup.py @@ -0,0 +1,135 @@ +#!/usr/bin/env python +# +# This file is part of pyasn1-modules software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# +import sys + +doclines = """A collection of ASN.1-based protocols modules. + + A collection of ASN.1 modules expressed in form of pyasn1 classes. + Includes protocols PDUs definition (SNMP, LDAP etc.) and various + data structures (X.509, PKCS etc.). +""" + +doclines = [x.strip() for x in doclines.split('\n') if x] + + +classifiers = """\ +Development Status :: 5 - Production/Stable +Environment :: Console +Intended Audience :: Developers +Intended Audience :: Education +Intended Audience :: Information Technology +Intended Audience :: System Administrators +Intended Audience :: Telecommunications Industry +License :: OSI Approved :: BSD License +Natural Language :: English +Operating System :: OS Independent +Programming Language :: Python :: 2 +Programming Language :: Python :: 2.4 +Programming Language :: Python :: 2.5 +Programming Language :: Python :: 2.6 +Programming Language :: Python :: 2.7 +Programming Language :: Python :: 3 +Programming Language :: Python :: 3.2 +Programming Language :: Python :: 3.3 +Programming Language :: Python :: 3.4 +Programming Language :: Python :: 3.5 +Programming Language :: Python :: 3.6 +Topic :: Communications +Topic :: System :: Monitoring +Topic :: System :: Networking :: Monitoring +Topic :: Software Development :: Libraries :: Python Modules +""" + + +def howto_install_setuptools(): + print(""" + Error: You need setuptools Python package! + + It's very easy to install it, just type (as root on Linux): + + wget https://bitbucket.org/pypa/setuptools/raw/bootstrap/ez_setup.py + python ez_setup.py + + Then you could make eggs from this package. +""") + + +if sys.version_info[:2] < (2, 4): + print("ERROR: this package requires Python 2.4 or later!") + sys.exit(1) + +try: + from setuptools import setup, Command + + params = { + 'zip_safe': True, + 'install_requires': ['pyasn1>=0.3.4,<0.4.0'] + } + +except ImportError: + for arg in sys.argv: + if 'egg' in arg: + howto_install_setuptools() + sys.exit(1) + + from distutils.core import setup, Command + + if sys.version_info[:2] > (2, 4): + params = { + 'requires': ['pyasn1(>=0.3.4,<0.4.0)'] + } + else: + params = { + 'requires': ['pyasn1'] + } + +params.update( + {'name': 'pyasn1-modules', + 'version': open('pyasn1_modules/__init__.py').read().split('\'')[1], + 'description': doclines[0], + 'long_description': ' '.join(doclines[1:]), + 'maintainer': 'Ilya Etingof ', + 'author': 'Ilya Etingof', + 'author_email': 'etingof@gmail.com', + 'url': 'https://github.com/etingof/pyasn1-modules', + 'platforms': ['any'], + 'classifiers': [x for x in classifiers.split('\n') if x], + 'license': 'BSD', + 'packages': ['pyasn1_modules']} +) + + +# handle unittest discovery feature +try: + import unittest2 as unittest +except ImportError: + import unittest + + +class PyTest(Command): + user_options = [] + + def initialize_options(self): + pass + + def finalize_options(self): + pass + + def run(self): + suite = unittest.TestLoader().loadTestsFromNames( + ['tests.__main__.suite'] + ) + + unittest.TextTestRunner(verbosity=2).run(suite) + +params['cmdclass'] = { + 'test': PyTest, + 'tests': PyTest +} + +setup(**params) diff --git a/third_party/python/pyasn1-modules/tests/__init__.py b/third_party/python/pyasn1-modules/tests/__init__.py new file mode 100644 index 000000000000..8c3066b2e68f --- /dev/null +++ b/third_party/python/pyasn1-modules/tests/__init__.py @@ -0,0 +1 @@ +# This file is necessary to make this directory a package. diff --git a/third_party/python/pyasn1-modules/tests/__main__.py b/third_party/python/pyasn1-modules/tests/__main__.py new file mode 100644 index 000000000000..c6377a6273cb --- /dev/null +++ b/third_party/python/pyasn1-modules/tests/__main__.py @@ -0,0 +1,28 @@ +# +# This file is part of pyasn1-modules software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# +try: + import unittest2 as unittest + +except ImportError: + import unittest + +suite = unittest.TestLoader().loadTestsFromNames( + ['tests.test_rfc2314.suite', + 'tests.test_rfc2315.suite', + 'tests.test_rfc2437.suite', + 'tests.test_rfc2459.suite', + 'tests.test_rfc2511.suite', + 'tests.test_rfc2560.suite', + 'tests.test_rfc4210.suite', + 'tests.test_rfc5208.suite', + 'tests.test_rfc5280.suite', + 'tests.test_rfc5652.suite',] +) + + +if __name__ == '__main__': + unittest.TextTestRunner(verbosity=2).run(suite) diff --git a/third_party/python/pyasn1-modules/tests/test_rfc2314.py b/third_party/python/pyasn1-modules/tests/test_rfc2314.py new file mode 100644 index 000000000000..6dd5c47f3346 --- /dev/null +++ b/third_party/python/pyasn1-modules/tests/test_rfc2314.py @@ -0,0 +1,57 @@ +# +# This file is part of pyasn1-modules software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# +import sys +from pyasn1.codec.der import decoder as der_decoder +from pyasn1.codec.der import encoder as der_encoder + +from pyasn1_modules import rfc2314, pem + +try: + import unittest2 as unittest +except ImportError: + import unittest + + +class CertificationRequestTestCase(unittest.TestCase): + pem_text = """\ +MIIDATCCAekCAQAwgZkxCzAJBgNVBAYTAlJVMRYwFAYDVQQIEw1Nb3Njb3cgUmVn +aW9uMQ8wDQYDVQQHEwZNb3Njb3cxGjAYBgNVBAoTEVNOTVAgTGFib3JhdG9yaWVz +MQwwCgYDVQQLFANSJkQxFTATBgNVBAMTDHNubXBsYWJzLmNvbTEgMB4GCSqGSIb3 +DQEJARYRaW5mb0Bzbm1wbGFicy5jb20wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAw +ggEKAoIBAQC9n2NfGS98JDBmAXQn+vNUyPB3QPYC1cwpX8UMYh9MdAmBZJCnvXrQ +Pp14gNAv6AQKxefmGES1b+Yd+1we9HB8AKm1/8xvRDUjAvy4iO0sqFCPvIfSujUy +pBcfnR7QE2itvyrMxCDSEVnMhKdCNb23L2TptUmpvLcb8wfAMLFsSu2yaOtJysep +oH/mvGqlRv2ti2+E2YA0M7Pf83wyV1XmuEsc9tQ225rprDk2uyshUglkDD2235rf +0QyONq3Aw3BMrO9ss1qj7vdDhVHVsxHnTVbEgrxEWkq2GkVKh9QReMZ2AKxe40j4 +og+OjKXguOCggCZHJyXKxccwqCaeCztbAgMBAAGgIjAgBgkqhkiG9w0BCQIxExMR +U05NUCBMYWJvcmF0b3JpZXMwDQYJKoZIhvcNAQEFBQADggEBAAihbwmN9M2bsNNm +9KfxqiGMqqcGCtzIlpDz/2NVwY93cEZsbz3Qscc0QpknRmyTSoDwIG+1nUH0vzkT +Nv8sBmp9I1GdhGg52DIaWwL4t9O5WUHgfHSJpPxZ/zMP2qIsdPJ+8o19BbXRlufc +73c03H1piGeb9VcePIaulSHI622xukI6f4Sis49vkDaoi+jadbEEb6TYkJQ3AMRD +WdApGGm0BePdLqboW1Yv70WRRFFD8sxeT7Yw4qrJojdnq0xMHPGfKpf6dJsqWkHk +b5DRbjil1Zt9pJuF680S9wtBzSi0hsMHXR9TzS7HpMjykL2nmCVY6A78MZapsCzn +GGbx7DI= +""" + + def setUp(self): + self.asn1Spec = rfc2314.CertificationRequest() + + def testDerCodec(self): + + substrate = pem.readBase64fromText(self.pem_text) + + asn1Object, rest = der_decoder.decode(substrate, asn1Spec=self.asn1Spec) + + assert not rest + assert asn1Object.prettyPrint() + assert der_encoder.encode(asn1Object) == substrate + + +suite = unittest.TestLoader().loadTestsFromModule(sys.modules[__name__]) + +if __name__ == '__main__': + unittest.TextTestRunner(verbosity=2).run(suite) diff --git a/third_party/python/pyasn1-modules/tests/test_rfc2315.py b/third_party/python/pyasn1-modules/tests/test_rfc2315.py new file mode 100644 index 000000000000..b451ed10961a --- /dev/null +++ b/third_party/python/pyasn1-modules/tests/test_rfc2315.py @@ -0,0 +1,179 @@ +# +# This file is part of pyasn1-modules software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# +import sys +from pyasn1.codec.der import decoder as der_decoder +from pyasn1.codec.der import encoder as der_encoder + +from pyasn1_modules import rfc2315, pem + +try: + import unittest2 as unittest +except ImportError: + import unittest + + +class Pkcs7TestCase(unittest.TestCase): + pem_text_unordered = """\ +MIIKdQYJKoZIhvcNAQcCoIIKZjCCCmICAQExADALBgkqhkiG9w0BBwGgggpIMIIC +XjCCAcegAwIBAgIBADANBgkqhkiG9w0BAQQFADB1MQswCQYDVQQGEwJSVTEPMA0G +A1UEBxMGTW9zY293MRcwFQYDVQQKEw5Tb3ZhbSBUZWxlcG9ydDEMMAoGA1UECxMD +TklTMQ8wDQYDVQQDEwZBQlMgQ0ExHTAbBgkqhkiG9w0BCQEWDmNlcnRAb25saW5l +LnJ1MB4XDTk5MDgxNTE5MDI1OFoXDTAwMDExMjE5MDI1OFowdTELMAkGA1UEBhMC +UlUxDzANBgNVBAcTBk1vc2NvdzEXMBUGA1UEChMOU292YW0gVGVsZXBvcnQxDDAK +BgNVBAsTA05JUzEPMA0GA1UEAxMGQUJTIENBMR0wGwYJKoZIhvcNAQkBFg5jZXJ0 +QG9ubGluZS5ydTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAw0g1P0yQAZIi +ml2XOCOxnCcuhHmAgj4Ei9M2ebrrGwUMONPzr1a8W7JcpnR3FeOjxEIxrzkHr6UA +oj4l/oC7Rv28uIig+Okf+82ekhH6VgAQNr5LAzfN8J6dZLx2OXAmmLleAqHuisT7 +I40vEFRoRmC5hiMlILE2rIlIKJn6cUkCAwEAATANBgkqhkiG9w0BAQQFAAOBgQBZ +7ELDfGUNb+fbpHl5W3d9JMXsdOgd96+HG+X1SPgeiRAMjkla8WFCSaQPIR4vCy0m +tm5a2bWSji6+vP5FGbjOz5iMlHMrCtu0He7Eim2zpaGI06ZIY75Cn1h2r3+KS0/R +h01TJUbmsfV1tZm6Wk3bayJ+/K8A4mBHv8P6rhYacDCCAowwggH1oAMCAQICAQAw +DQYJKoZIhvcNAQEEBQAwgYsxCzAJBgNVBAYTAlJVMQ8wDQYDVQQHEwZNb3Njb3cx +FzAVBgNVBAoTDkdvbGRlbiBUZWxlY29tMQwwCgYDVQQLEwNST0wxHjAcBgNVBAMT +FUdvbGRlbiBUZWxlY29tIEFCUyBDQTEkMCIGCSqGSIb3DQEJARYVY2VydEBnb2xk +ZW50ZWxlY29tLnJ1MB4XDTAwMDEwNTE1MDY1MVoXDTEwMDExNTE1MDY1MVowgYsx +CzAJBgNVBAYTAlJVMQ8wDQYDVQQHEwZNb3Njb3cxFzAVBgNVBAoTDkdvbGRlbiBU +ZWxlY29tMQwwCgYDVQQLEwNST0wxHjAcBgNVBAMTFUdvbGRlbiBUZWxlY29tIEFC +UyBDQTEkMCIGCSqGSIb3DQEJARYVY2VydEBnb2xkZW50ZWxlY29tLnJ1MIGfMA0G +CSqGSIb3DQEBAQUAA4GNADCBiQKBgQDPFel/Svli6ogoUEb6eLtEvNSjyalETSMP +MIZXdmWIkWijvEUhDnNJVAE3knAt6dVYqxWq0vc6CbAGFZNqEyioGU48IECLzV0G +toiYejF/c9PuyIKDejeV9/YZnNFaZAUOXhOjREdZURLISKhX4tAbQyvK0Qka9AAR +MEy9DoqV8QIDAQABMA0GCSqGSIb3DQEBBAUAA4GBAHQzgqFkoSMQr077UCr5C0l1 +rxLA17TrocCmUC1/PLmN0LmUHD0d7TjjTQKJaJBHxcKIg6+FOY6LSSY4nAN79eXi +nBz+jEUG7+NTU/jcEArI35yP7fi4Mwb96EYDmUkUGtcLNq3JBe/d1Zhmy9HnNBL1 +Dn9thM2Q8RPYAJIU3JnGMIICqTCCAhICAQAwDQYJKoZIhvcNAQEEBQAwgZwxCzAJ +BgNVBAYTAlJVMQ8wDQYDVQQIEwZNb3Njb3cxDzANBgNVBAcTBk1vc2NvdzEXMBUG +A1UEChMOR29sZGVuIFRlbGVjb20xDDAKBgNVBAsTA1JPTDEeMBwGA1UEAxMVR29s +ZGVuIFRlbGVjb20gQUJTIENBMSQwIgYJKoZIhvcNAQkBFhVjZXJ0QGdvbGRlbnRl +bGVjb20ucnUwHhcNMTAwMTE1MTU0MDI2WhcNMjAwMjIyMTU0MDI2WjCBnDELMAkG +A1UEBhMCUlUxDzANBgNVBAgTBk1vc2NvdzEPMA0GA1UEBxMGTW9zY293MRcwFQYD +VQQKEw5Hb2xkZW4gVGVsZWNvbTEMMAoGA1UECxMDUk9MMR4wHAYDVQQDExVHb2xk +ZW4gVGVsZWNvbSBBQlMgQ0ExJDAiBgkqhkiG9w0BCQEWFWNlcnRAZ29sZGVudGVs +ZWNvbS5ydTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAzxXpf0r5YuqIKFBG ++ni7RLzUo8mpRE0jDzCGV3ZliJFoo7xFIQ5zSVQBN5JwLenVWKsVqtL3OgmwBhWT +ahMoqBlOPCBAi81dBraImHoxf3PT7siCg3o3lff2GZzRWmQFDl4To0RHWVESyEio +V+LQG0MrytEJGvQAETBMvQ6KlfECAwEAATANBgkqhkiG9w0BAQQFAAOBgQCMrS4T +LIzxcpu8nwOq/xMcxW4Ctz/wjIoePWkmSLe+Tkb4zo7aTsvzn+ETaWb7qztUpyl0 +QvlXn4vC2iCJloPpofPqSzF1UV3g5Zb93ReZu7E6kEyW0ag8R5XZKv0xuR3b3Le+ +ZqolT8wQELd5Mmw5JPofZ+O2cGNvet8tYwOKFjCCAqUwggIOoAMCAQICAgboMA0G +CSqGSIb3DQEBBAUAMIGcMQswCQYDVQQGEwJSVTEPMA0GA1UECBMGTW9zY293MQ8w +DQYDVQQHEwZNb3Njb3cxFzAVBgNVBAoTDkdvbGRlbiBUZWxlY29tMQwwCgYDVQQL +EwNST0wxHjAcBgNVBAMTFUdvbGRlbiBUZWxlY29tIEFCUyBDQTEkMCIGCSqGSIb3 +DQEJARYVY2VydEBnb2xkZW50ZWxlY29tLnJ1MB4XDTExMDEyODEyMTcwOVoXDTEy +MDIwMTAwMDAwMFowdjELMAkGA1UEBhMCUlUxDDAKBgNVBAgTA04vQTEXMBUGA1UE +ChMOR29sZGVuIFRlbGVjb20xDDAKBgNVBAsTA0lTUDEWMBQGA1UEAxMNY3JheS5n +bGFzLm5ldDEaMBgGCSqGSIb3DQEJARYLZWxpZUByb2wucnUwgZ8wDQYJKoZIhvcN +AQEBBQADgY0AMIGJAoGBAPJAm8KG3ZCoJSvoGmLMPlGaMIpadu/EGSEYu+M/ybLp +Cs8XmwB3876JVKKCbtGI6eqxOqvjedYXb+nKcyhz4Ztmm8RgAD7Z1WUItIpatejT +79EYOUWrDN713SLZsImMyP4B4EySl4LZfHFRU2iOwLB6WozGCYuULLqYS9MDPrnT +AgMBAAGjGzAZMBcGCWCGSAGG+EIBDQQKFghDPS07Uz0tOzANBgkqhkiG9w0BAQQF +AAOBgQDEttS70qYCA+MGBA3hOR88XiBcTmuBarJDwn/rj31vRjYZUgp9bbFwscRI +Ic4lDnlyvunwNitl+341bDg7u6Ebu9hCMbciyu4EtrsDh77DlLzbmNcXbnhlvbFL +K9GiPz3dNyvQMfmaA0twd62zJDOVJ1SmO04lLmu/pAx8GhBZkqEAMQA= +""" + + pem_text_reordered = """\ +MIIKcwYJKoZIhvcNAQcCoIIKZDCCCmACAQExADALBgkqhkiG9w0BBwGgggpIMIIC +XjCCAcegAwIBAgIBADANBgkqhkiG9w0BAQQFADB1MQswCQYDVQQGEwJSVTEPMA0G +A1UEBxMGTW9zY293MRcwFQYDVQQKEw5Tb3ZhbSBUZWxlcG9ydDEMMAoGA1UECxMD +TklTMQ8wDQYDVQQDEwZBQlMgQ0ExHTAbBgkqhkiG9w0BCQEWDmNlcnRAb25saW5l +LnJ1MB4XDTk5MDgxNTE5MDI1OFoXDTAwMDExMjE5MDI1OFowdTELMAkGA1UEBhMC +UlUxDzANBgNVBAcTBk1vc2NvdzEXMBUGA1UEChMOU292YW0gVGVsZXBvcnQxDDAK +BgNVBAsTA05JUzEPMA0GA1UEAxMGQUJTIENBMR0wGwYJKoZIhvcNAQkBFg5jZXJ0 +QG9ubGluZS5ydTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAw0g1P0yQAZIi +ml2XOCOxnCcuhHmAgj4Ei9M2ebrrGwUMONPzr1a8W7JcpnR3FeOjxEIxrzkHr6UA +oj4l/oC7Rv28uIig+Okf+82ekhH6VgAQNr5LAzfN8J6dZLx2OXAmmLleAqHuisT7 +I40vEFRoRmC5hiMlILE2rIlIKJn6cUkCAwEAATANBgkqhkiG9w0BAQQFAAOBgQBZ +7ELDfGUNb+fbpHl5W3d9JMXsdOgd96+HG+X1SPgeiRAMjkla8WFCSaQPIR4vCy0m +tm5a2bWSji6+vP5FGbjOz5iMlHMrCtu0He7Eim2zpaGI06ZIY75Cn1h2r3+KS0/R +h01TJUbmsfV1tZm6Wk3bayJ+/K8A4mBHv8P6rhYacDCCAowwggH1oAMCAQICAQAw +DQYJKoZIhvcNAQEEBQAwgYsxCzAJBgNVBAYTAlJVMQ8wDQYDVQQHEwZNb3Njb3cx +FzAVBgNVBAoTDkdvbGRlbiBUZWxlY29tMQwwCgYDVQQLEwNST0wxHjAcBgNVBAMT +FUdvbGRlbiBUZWxlY29tIEFCUyBDQTEkMCIGCSqGSIb3DQEJARYVY2VydEBnb2xk +ZW50ZWxlY29tLnJ1MB4XDTAwMDEwNTE1MDY1MVoXDTEwMDExNTE1MDY1MVowgYsx +CzAJBgNVBAYTAlJVMQ8wDQYDVQQHEwZNb3Njb3cxFzAVBgNVBAoTDkdvbGRlbiBU +ZWxlY29tMQwwCgYDVQQLEwNST0wxHjAcBgNVBAMTFUdvbGRlbiBUZWxlY29tIEFC +UyBDQTEkMCIGCSqGSIb3DQEJARYVY2VydEBnb2xkZW50ZWxlY29tLnJ1MIGfMA0G +CSqGSIb3DQEBAQUAA4GNADCBiQKBgQDPFel/Svli6ogoUEb6eLtEvNSjyalETSMP +MIZXdmWIkWijvEUhDnNJVAE3knAt6dVYqxWq0vc6CbAGFZNqEyioGU48IECLzV0G +toiYejF/c9PuyIKDejeV9/YZnNFaZAUOXhOjREdZURLISKhX4tAbQyvK0Qka9AAR +MEy9DoqV8QIDAQABMA0GCSqGSIb3DQEBBAUAA4GBAHQzgqFkoSMQr077UCr5C0l1 +rxLA17TrocCmUC1/PLmN0LmUHD0d7TjjTQKJaJBHxcKIg6+FOY6LSSY4nAN79eXi +nBz+jEUG7+NTU/jcEArI35yP7fi4Mwb96EYDmUkUGtcLNq3JBe/d1Zhmy9HnNBL1 +Dn9thM2Q8RPYAJIU3JnGMIICpTCCAg6gAwIBAgICBugwDQYJKoZIhvcNAQEEBQAw +gZwxCzAJBgNVBAYTAlJVMQ8wDQYDVQQIEwZNb3Njb3cxDzANBgNVBAcTBk1vc2Nv +dzEXMBUGA1UEChMOR29sZGVuIFRlbGVjb20xDDAKBgNVBAsTA1JPTDEeMBwGA1UE +AxMVR29sZGVuIFRlbGVjb20gQUJTIENBMSQwIgYJKoZIhvcNAQkBFhVjZXJ0QGdv +bGRlbnRlbGVjb20ucnUwHhcNMTEwMTI4MTIxNzA5WhcNMTIwMjAxMDAwMDAwWjB2 +MQswCQYDVQQGEwJSVTEMMAoGA1UECBMDTi9BMRcwFQYDVQQKEw5Hb2xkZW4gVGVs +ZWNvbTEMMAoGA1UECxMDSVNQMRYwFAYDVQQDEw1jcmF5LmdsYXMubmV0MRowGAYJ +KoZIhvcNAQkBFgtlbGllQHJvbC5ydTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkC +gYEA8kCbwobdkKglK+gaYsw+UZowilp278QZIRi74z/JsukKzxebAHfzvolUooJu +0Yjp6rE6q+N51hdv6cpzKHPhm2abxGAAPtnVZQi0ilq16NPv0Rg5RasM3vXdItmw +iYzI/gHgTJKXgtl8cVFTaI7AsHpajMYJi5QsuphL0wM+udMCAwEAAaMbMBkwFwYJ +YIZIAYb4QgENBAoWCEM9LTtTPS07MA0GCSqGSIb3DQEBBAUAA4GBAMS21LvSpgID +4wYEDeE5HzxeIFxOa4FqskPCf+uPfW9GNhlSCn1tsXCxxEghziUOeXK+6fA2K2X7 +fjVsODu7oRu72EIxtyLK7gS2uwOHvsOUvNuY1xdueGW9sUsr0aI/Pd03K9Ax+ZoD +S3B3rbMkM5UnVKY7TiUua7+kDHwaEFmSMIICqTCCAhICAQAwDQYJKoZIhvcNAQEE +BQAwgZwxCzAJBgNVBAYTAlJVMQ8wDQYDVQQIEwZNb3Njb3cxDzANBgNVBAcTBk1v +c2NvdzEXMBUGA1UEChMOR29sZGVuIFRlbGVjb20xDDAKBgNVBAsTA1JPTDEeMBwG +A1UEAxMVR29sZGVuIFRlbGVjb20gQUJTIENBMSQwIgYJKoZIhvcNAQkBFhVjZXJ0 +QGdvbGRlbnRlbGVjb20ucnUwHhcNMTAwMTE1MTU0MDI2WhcNMjAwMjIyMTU0MDI2 +WjCBnDELMAkGA1UEBhMCUlUxDzANBgNVBAgTBk1vc2NvdzEPMA0GA1UEBxMGTW9z +Y293MRcwFQYDVQQKEw5Hb2xkZW4gVGVsZWNvbTEMMAoGA1UECxMDUk9MMR4wHAYD +VQQDExVHb2xkZW4gVGVsZWNvbSBBQlMgQ0ExJDAiBgkqhkiG9w0BCQEWFWNlcnRA +Z29sZGVudGVsZWNvbS5ydTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAzxXp +f0r5YuqIKFBG+ni7RLzUo8mpRE0jDzCGV3ZliJFoo7xFIQ5zSVQBN5JwLenVWKsV +qtL3OgmwBhWTahMoqBlOPCBAi81dBraImHoxf3PT7siCg3o3lff2GZzRWmQFDl4T +o0RHWVESyEioV+LQG0MrytEJGvQAETBMvQ6KlfECAwEAATANBgkqhkiG9w0BAQQF +AAOBgQCMrS4TLIzxcpu8nwOq/xMcxW4Ctz/wjIoePWkmSLe+Tkb4zo7aTsvzn+ET +aWb7qztUpyl0QvlXn4vC2iCJloPpofPqSzF1UV3g5Zb93ReZu7E6kEyW0ag8R5XZ +Kv0xuR3b3Le+ZqolT8wQELd5Mmw5JPofZ+O2cGNvet8tYwOKFjEA +""" + + def setUp(self): + self.asn1Spec = rfc2315.ContentInfo() + + def testDerCodec(self): + + substrate = pem.readBase64fromText(self.pem_text_unordered) + + asn1Object, rest = der_decoder.decode(substrate, asn1Spec=self.asn1Spec) + + assert not rest + assert asn1Object.prettyPrint() + assert der_encoder.encode(asn1Object) == substrate + + contentType = asn1Object['contentType'] + substrate = asn1Object['content'] + + contentInfoMap = { + (1, 2, 840, 113549, 1, 7, 1): rfc2315.Data(), + (1, 2, 840, 113549, 1, 7, 2): rfc2315.SignedData(), + (1, 2, 840, 113549, 1, 7, 3): rfc2315.EnvelopedData(), + (1, 2, 840, 113549, 1, 7, 4): rfc2315.SignedAndEnvelopedData(), + (1, 2, 840, 113549, 1, 7, 5): rfc2315.DigestedData(), + (1, 2, 840, 113549, 1, 7, 6): rfc2315.EncryptedData() + } + + innerAsn1Object, rest = der_decoder.decode( + substrate, asn1Spec=contentInfoMap[contentType] + ) + + asn1Object['content'] = der_encoder.encode(innerAsn1Object) + + substrate = pem.readBase64fromText(self.pem_text_reordered) + + assert not rest + assert asn1Object.prettyPrint() + assert der_encoder.encode(asn1Object) == substrate + + +suite = unittest.TestLoader().loadTestsFromModule(sys.modules[__name__]) + +if __name__ == '__main__': + unittest.TextTestRunner(verbosity=2).run(suite) diff --git a/third_party/python/pyasn1-modules/tests/test_rfc2437.py b/third_party/python/pyasn1-modules/tests/test_rfc2437.py new file mode 100644 index 000000000000..8d3539aa1370 --- /dev/null +++ b/third_party/python/pyasn1-modules/tests/test_rfc2437.py @@ -0,0 +1,47 @@ +# +# This file is part of pyasn1-modules software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# +import sys +from pyasn1.codec.der import decoder as der_decoder +from pyasn1.codec.der import encoder as der_encoder + +from pyasn1_modules import rfc2437, pem + +try: + import unittest2 as unittest +except ImportError: + import unittest + + +class RSAPrivateKeyTestCase(unittest.TestCase): + pem_text = """\ +MIIBPAIBAAJBAMfAjvBNDDYBCl1w3yNcagZkPhqd0q5KqeOTgKSLuJWfe5+VSeR5 +Y1PcF3DyH8dvS3t8PIQjxJLoKS7HVRlsfhECAwEAAQJBAIr93/gxhIenXbD7MykF +yvi7k8MtgkWoymICZwcX+c6RudFyuPPfQJ/sf6RmFZlRA9X9CQm5NwVG7+x1Yi6t +KoECIQDmJUCWkPCiQYow6YxetpXFa0K6hTzOPmax7MNHVWNgmQIhAN4xOZ4JFT34 +xVhK+8EudBCYRomJUHmOJfoQAxiIXVw5AiEAyB7ecc5on/5zhqKef4Eu7LKfHIdc +304diFuDVpTmTAkCIC2ZmKOQZaWkSowGR4isCfHl7oQHhFaOD8k0RA5i3hYxAiEA +n8lDw3JT6NjvMnD6aM8KBsLyhazWSVVkaUSqmJzgCF0= +""" + + def setUp(self): + self.asn1Spec = rfc2437.RSAPrivateKey() + + def testDerCodec(self): + + substrate = pem.readBase64fromText(self.pem_text) + + asn1Object, rest = der_decoder.decode(substrate, asn1Spec=self.asn1Spec) + + assert not rest + assert asn1Object.prettyPrint() + assert der_encoder.encode(asn1Object) == substrate + + +suite = unittest.TestLoader().loadTestsFromModule(sys.modules[__name__]) + +if __name__ == '__main__': + unittest.TextTestRunner(verbosity=2).run(suite) diff --git a/third_party/python/pyasn1-modules/tests/test_rfc2459.py b/third_party/python/pyasn1-modules/tests/test_rfc2459.py new file mode 100644 index 000000000000..1fa9d07ed738 --- /dev/null +++ b/third_party/python/pyasn1-modules/tests/test_rfc2459.py @@ -0,0 +1,111 @@ +# +# This file is part of pyasn1-modules software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# +import sys +from pyasn1.codec.der import decoder as der_decoder +from pyasn1.codec.der import encoder as der_encoder + +from pyasn1_modules import rfc2459, pem + +try: + import unittest2 as unittest +except ImportError: + import unittest + + +class CertificateTestCase(unittest.TestCase): + pem_text = """\ +MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0 +IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz +BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y +aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG +9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMjIzM1oXDTE5MDYy +NjAwMjIzM1owgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y +azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs +YXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw +Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl +cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDjmFGWHOjVsQaBalfD +cnWTq8+epvzzFlLWLU2fNUSoLgRNB0mKOCn1dzfnt6td3zZxFJmP3MKS8edgkpfs +2Ejcv8ECIMYkpChMMFp2bbFc893enhBxoYjHW5tBbcqwuI4V7q0zK89HBFx1cQqY +JJgpp0lZpd34t0NiYfPT4tBVPwIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFa7AliE +Zwgs3x/be0kz9dNnnfS0ChCzycUs4pJqcXgn8nCDQtM+z6lU9PHYkhaM0QTLS6vJ +n0WuPIqpsHEzXcjFV9+vqDWzf4mH6eglkrh/hXqu1rweN1gqZ8mRzyqBPu3GOd/A +PhmcGcwTTYJBtYze4D1gCCAPRX5ron+jjBXu +""" + + def setUp(self): + self.asn1Spec = rfc2459.Certificate() + + def testDerCodec(self): + + substrate = pem.readBase64fromText(self.pem_text) + + asn1Object, rest = der_decoder.decode(substrate, asn1Spec=self.asn1Spec) + + assert not rest + assert asn1Object.prettyPrint() + assert der_encoder.encode(asn1Object) == substrate + + +class CertificateListTestCase(unittest.TestCase): + pem_text = """\ +MIIBVjCBwAIBATANBgkqhkiG9w0BAQUFADB+MQswCQYDVQQGEwJBVTETMBEGA1UE +CBMKU29tZS1TdGF0ZTEhMB8GA1UEChMYSW50ZXJuZXQgV2lkZ2l0cyBQdHkgTHRk +MRUwEwYDVQQDEwxzbm1wbGFicy5jb20xIDAeBgkqhkiG9w0BCQEWEWluZm9Ac25t +cGxhYnMuY29tFw0xMjA0MTExMzQwNTlaFw0xMjA1MTExMzQwNTlaoA4wDDAKBgNV +HRQEAwIBATANBgkqhkiG9w0BAQUFAAOBgQC1D/wwnrcY/uFBHGc6SyoYss2kn+nY +RTwzXmmldbNTCQ03x5vkWGGIaRJdN8QeCzbEi7gpgxgpxAx6Y5WkxkMQ1UPjNM5n +DGVDOtR0dskFrrbHuNpWqWrDaBN0/ryZiWKjr9JRbrpkHgVY29I1gLooQ6IHuKHY +vjnIhxTFoCb5vA== +""" + + def setUp(self): + self.asn1Spec = rfc2459.CertificateList() + + def testDerCodec(self): + + substrate = pem.readBase64fromText(self.pem_text) + + asn1Object, rest = der_decoder.decode(substrate, asn1Spec=self.asn1Spec) + + assert not rest + assert asn1Object.prettyPrint() + assert der_encoder.encode(asn1Object) == substrate + + +class DSAPrivateKeyTestCase(unittest.TestCase): + pem_text = """\ +MIIBugIBAAKBgQCN91+Cma8UPw09gjwP9WOJCdpv3mv3/qFqzgiODGZx0Q002iTl +1dq36m5TsWYFEcMCEyC3tFuoQ0mGq5zUUOmJvHCIPufs0g8Av0fhY77uFqneHHUi +VQMCPCHX9vTCWskmDE21LJppU27bR4H2q+ysE30d6u3+84qrItsn4bjpcQIVAPR5 +QrmooOXDn7fHJzshmxImGC4VAoGAXxKyEnlvzq93d4V6KLWX3H5Jk2JP771Ss1bT +6D/mSbLlvjjo7qsj6diul1axu6Wny31oPertzA2FeGEzkqvjSNmSxyYYMDB3kEcx +ahntt37I1FgSlgdZHuhdtl1h1DBKXqCCneOZuNj+kW5ib14u5HDfFIbec2HJbvVs +lJ/k83kCgYB4TD8vgHetXHxqsiZDoy5wOnQ3mmFAfl8ZdQsIfov6kEgArwPYUOVB +JsX84f+MFjIOKXUV8dHZ8VRrGCLAbXcxKqLNWKlKHUnEsvt63pkaTy/RKHyQS+pn +wontdTt9EtbF+CqIWnm2wpn3O+SbdtawzPOL1CcGB0jYABwbeQ81RwIUFKdyRYaa +INow2I3/ks+0MxDabTY= +""" + + def setUp(self): + self.asn1Spec = rfc2459.DSAPrivateKey() + + def testDerCodec(self): + + substrate = pem.readBase64fromText(self.pem_text) + + asn1Object, rest = der_decoder.decode(substrate, asn1Spec=self.asn1Spec) + + assert not rest + assert asn1Object.prettyPrint() + assert der_encoder.encode(asn1Object) == substrate + + + +suite = unittest.TestLoader().loadTestsFromModule(sys.modules[__name__]) + +if __name__ == '__main__': + unittest.TextTestRunner(verbosity=2).run(suite) diff --git a/third_party/python/pyasn1-modules/tests/test_rfc2511.py b/third_party/python/pyasn1-modules/tests/test_rfc2511.py new file mode 100644 index 000000000000..ef4cc000c2bd --- /dev/null +++ b/third_party/python/pyasn1-modules/tests/test_rfc2511.py @@ -0,0 +1,49 @@ +# +# This file is part of pyasn1-modules software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# +import sys +from pyasn1.codec.der import decoder as der_decoder +from pyasn1.codec.der import encoder as der_encoder + +from pyasn1_modules import rfc2511, pem + +try: + import unittest2 as unittest +except ImportError: + import unittest + + +class CertificateReqTestCase(unittest.TestCase): + pem_text = """\ +MIIBozCCAZ8wggEFAgUAwTnj2jCByoABAqURMA8xDTALBgNVBAMTBHVzZXKmgZ8w +DQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBAJ6ZQ2cYbn/lFsmBOlRltbRbFQUvvE0Q +nbopOu1kC7Bmaaz7QTx8nxeiHi4m7uxCbGGxHNoGCt7EmdG8eZUBNAcHyGlXrJdm +0z3/uNEGiBHq+xB8FnFJCA5EIJ3RWFnlbu9otSITLxWK7c5+/NHmWM+yaeHD/f/h +rp01c/8qXZfZAgMBAAGpEDAOBgNVHQ8BAf8EBAMCBeAwLzASBgkrBgEFBQcFAQEM +BTExMTExMBkGCSsGAQUFBwUBAgwMc2VydmVyX21hZ2ljoYGTMA0GCSqGSIb3DQEB +BQUAA4GBAEI3KNEvTq/n1kNVhNhPkovk1AZxyJrN1u1+7Gkc4PLjWwjLOjcEVWt4 +AajUk/gkIJ6bbeO+fZlMjHfPSDKcD6AV2hN+n72QZwfzcw3icNvBG1el9EU4XfIm +xfu5YVWi81/fw8QQ6X6YGHFQkomLd7jxakVyjxSng9BhO6GpjJNF +""" + + def setUp(self): + self.asn1Spec = rfc2511.CertReqMessages() + + def testDerCodec(self): + + substrate = pem.readBase64fromText(self.pem_text) + + asn1Object, rest = der_decoder.decode(substrate, asn1Spec=self.asn1Spec) + + assert not rest + assert asn1Object.prettyPrint() + assert der_encoder.encode(asn1Object) == substrate + + +suite = unittest.TestLoader().loadTestsFromModule(sys.modules[__name__]) + +if __name__ == '__main__': + unittest.TextTestRunner(verbosity=2).run(suite) diff --git a/third_party/python/pyasn1-modules/tests/test_rfc2560.py b/third_party/python/pyasn1-modules/tests/test_rfc2560.py new file mode 100644 index 000000000000..142284bc309d --- /dev/null +++ b/third_party/python/pyasn1-modules/tests/test_rfc2560.py @@ -0,0 +1,81 @@ +# +# This file is part of pyasn1-modules software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# +import sys +from pyasn1.codec.der import decoder as der_decoder +from pyasn1.codec.der import encoder as der_encoder + +from pyasn1_modules import rfc2560, pem + +try: + import unittest2 as unittest +except ImportError: + import unittest + + +class OCSPRequestTestCase(unittest.TestCase): + pem_text = """\ +MGowaDBBMD8wPTAJBgUrDgMCGgUABBS3ZrMV9C5Dko03aH13cEZeppg3wgQUkqR1LKSevoFE63n8 +isWVpesQdXMCBDXe9M+iIzAhMB8GCSsGAQUFBzABAgQSBBBjdJOiIW9EKJGELNNf/rdA +""" + + def setUp(self): + self.asn1Spec = rfc2560.OCSPRequest() + + def testDerCodec(self): + + substrate = pem.readBase64fromText(self.pem_text) + + asn1Object, rest = der_decoder.decode(substrate, asn1Spec=self.asn1Spec) + + assert not rest + assert asn1Object.prettyPrint() + assert der_encoder.encode(asn1Object) == substrate + + +class OCSPResponseTestCase(unittest.TestCase): + pem_text = """\ +MIIEvQoBAKCCBLYwggSyBgkrBgEFBQcwAQEEggSjMIIEnzCCAQ+hgYAwfjELMAkGA1UEBhMCQVUx +EzARBgNVBAgTClNvbWUtU3RhdGUxITAfBgNVBAoTGEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZDEV +MBMGA1UEAxMMc25tcGxhYnMuY29tMSAwHgYJKoZIhvcNAQkBFhFpbmZvQHNubXBsYWJzLmNvbRgP +MjAxMjA0MTExNDA5MjJaMFQwUjA9MAkGBSsOAwIaBQAEFLdmsxX0LkOSjTdofXdwRl6mmDfCBBSS +pHUspJ6+gUTrefyKxZWl6xB1cwIENd70z4IAGA8yMDEyMDQxMTE0MDkyMlqhIzAhMB8GCSsGAQUF +BzABAgQSBBBjdJOiIW9EKJGELNNf/rdAMA0GCSqGSIb3DQEBBQUAA4GBADk7oRiCy4ew1u0N52QL +RFpW+tdb0NfkV2Xyu+HChKiTThZPr9ZXalIgkJ1w3BAnzhbB0JX/zq7Pf8yEz/OrQ4GGH7HyD3Vg +PkMu+J6I3A2An+bUQo99AmCbZ5/tSHtDYQMQt3iNbv1fk0yvDmh7UdKuXUNSyJdHeg27dMNy4k8A +oIIC9TCCAvEwggLtMIICVqADAgECAgEBMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAkFVMRMw +EQYDVQQIEwpTb21lLVN0YXRlMSEwHwYDVQQKExhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGQxFTAT +BgNVBAMTDHNubXBsYWJzLmNvbTEgMB4GCSqGSIb3DQEJARYRaW5mb0Bzbm1wbGFicy5jb20wHhcN +MTIwNDExMTMyNTM1WhcNMTMwNDExMTMyNTM1WjB+MQswCQYDVQQGEwJBVTETMBEGA1UECBMKU29t +ZS1TdGF0ZTEhMB8GA1UEChMYSW50ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMRUwEwYDVQQDEwxzbm1w +bGFicy5jb20xIDAeBgkqhkiG9w0BCQEWEWluZm9Ac25tcGxhYnMuY29tMIGfMA0GCSqGSIb3DQEB +AQUAA4GNADCBiQKBgQDDDU5HOnNV8I2CojxB8ilIWRHYQuaAjnjrETMOprouDHFXnwWqQo/I3m0b +XYmocrh9kDefb+cgc7+eJKvAvBqrqXRnU38DmQU/zhypCftGGfP8xjuBZ1n23lR3hplN1yYA0J2X +SgBaAg6e8OsKf1vcX8Es09rDo8mQpt4G2zR56wIDAQABo3sweTAJBgNVHRMEAjAAMCwGCWCGSAGG ++EIBDQQfFh1PcGVuU1NMIEdlbmVyYXRlZCBDZXJ0aWZpY2F0ZTAdBgNVHQ4EFgQU8Ys2dpJFLMHl +yY57D4BNmlqnEcYwHwYDVR0jBBgwFoAU8Ys2dpJFLMHlyY57D4BNmlqnEcYwDQYJKoZIhvcNAQEF +BQADgYEAWR0uFJVlQId6hVpUbgXFTpywtNitNXFiYYkRRv77McSJqLCa/c1wnuLmqcFcuRUK0oN6 +8ZJDP2HDDKe8MCZ8+sx+CF54eM8VCgN9uQ9XyE7x9XrXDd3Uw9RJVaWSIezkNKNeBE0lDM2jUjC4 +HAESdf7nebz1wtqAOXE1jWF/y8g= +""" + + def setUp(self): + self.asn1Spec = rfc2560.OCSPResponse() + + def testDerCodec(self): + substrate = pem.readBase64fromText(self.pem_text) + + asn1Object, rest = der_decoder.decode(substrate, asn1Spec=self.asn1Spec) + + assert not rest + assert asn1Object.prettyPrint() + assert der_encoder.encode(asn1Object) == substrate + + +suite = unittest.TestLoader().loadTestsFromModule(sys.modules[__name__]) + +if __name__ == '__main__': + unittest.TextTestRunner(verbosity=2).run(suite) diff --git a/third_party/python/pyasn1-modules/tests/test_rfc4210.py b/third_party/python/pyasn1-modules/tests/test_rfc4210.py new file mode 100644 index 000000000000..35451c8a456d --- /dev/null +++ b/third_party/python/pyasn1-modules/tests/test_rfc4210.py @@ -0,0 +1,129 @@ +# +# This file is part of pyasn1-modules software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# +import sys +from pyasn1.codec.der import decoder as der_decoder +from pyasn1.codec.der import encoder as der_encoder + +from pyasn1_modules import rfc4210, pem + +try: + import unittest2 as unittest +except ImportError: + import unittest + + +class PKIMessageTestCase(unittest.TestCase): + pem_text = """\ +MIITuTCCARECAQKkWTBXMQswCQYDVQQGEwJUUjEQMA4GA1UEChMHRS1HdXZlbjEUMBIGA1UECxML +VHJ1c3RDZW50ZXIxIDAeBgNVBAMTF1JTQSBTZWN1cml0eSBDTVAgU2VydmVypC0wKzELMAkGA1UE +BhMCVFIxHDAaBgNVBAMME1ZhbGltby1WZXR0b3ItMTdEZWOgERgPMjAxMjA1MDMxMTE2MTdaoQ8w +DQYJKoZIhvcNAQEFBQCiIgQgZWVhMjg5MGU2ZGY5N2IyNzk5NWY2MWE0MzE2MzI1OWGkEgQQQ01Q +VjJUMTIyMzM0NjI3MKUSBBCAAAABgAAAAYAAAAGAAAABphIEEDEzNjY0NDMwMjlSYW5kb22jghIZ +MIISFaGCC84wggvKMIIFwDCCBKigAwIBAgIQfOVE05R616R6Nqgu3drXHzANBgkqhkiG9w0BAQUF +ADBxMQswCQYDVQQGEwJUUjEoMCYGA1UEChMfRWxla3Ryb25payBCaWxnaSBHdXZlbmxpZ2kgQS5T +LjE4MDYGA1UEAxMvZS1HdXZlbiBFbGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2FnbGF5aWNp +c2kwHhcNMDgxMTI0MTAwMzI0WhcNMTYxMjE0MTExNzI0WjBdMQswCQYDVQQGEwJUUjEoMCYGA1UE +CgwfRWxla3Ryb25payBCaWxnaSBHdXZlbmxpZ2kgQS5TLjEkMCIGA1UEAwwbZS1HdXZlbiBNb2Jp +bCBUZXN0VVRGLTgtU09OMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAzqaymRo5chRK +EKrhjWQky1HOm6b/Jy4tSUuo4vq3O9U3G2osOU/hHb6fyMmznLpc6CaZ3qKYiuDMFRW8g1kNjEjV +sFSvH0Yd4qgwP1+qqzhBSe+nCAnEbRUrz+nXJ4fKhmGaQ+ZSic+MeyoqDsf/zENKqdV7ea9l3Ilu +Rj93bmTxas9aWPWQ/U/fpwkwRXaqaONlM5e4GWdgA7T1aq106NvH1z6LDNXcMYw4lSZkj/UjmM/0 +NhVz+57Ib4a0bogTaBmm8a1E5NtzkcA7pgnZT8576T0UoiOpEo+NAELA1B0mRh1/82HK1/0xn1zt +1ym4XZRtn2r2l/wTeEwU79ALVQIDAQABo4ICZjCCAmIwfAYIKwYBBQUHAQEEcDBuMDIGCCsGAQUF +BzABhiZodHRwOi8vdGVzdG9jc3AyLmUtZ3V2ZW4uY29tL29jc3AueHVkYTA4BggrBgEFBQcwAoYs +aHR0cDovL3d3dy5lLWd1dmVuLmNvbS9kb2N1bWVudHMvVGVzdEtvay5jcnQwDgYDVR0PAQH/BAQD +AgEGMA8GA1UdEwEB/wQFMAMBAf8wggElBgNVHSAEggEcMIIBGDCCARQGCWCGGAMAAQECATCCAQUw +NgYIKwYBBQUHAgEWKmh0dHA6Ly93d3cuZS1ndXZlbi5jb20vZG9jdW1lbnRzL05FU1VFLnBkZjCB +ygYIKwYBBQUHAgIwgb0egboAQgB1ACAAcwBlAHIAdABpAGYAaQBrAGEAIABpAGwAZQAgAGkAbABn +AGkAbABpACAAcwBlAHIAdABpAGYAaQBrAGEAIAB1AHkAZwB1AGwAYQBtAGEAIABlAHMAYQBzAGwA +YQByATEAbgExACAAbwBrAHUAbQBhAGsAIABpAOcAaQBuACAAYgBlAGwAaQByAHQAaQBsAGUAbgAg +AGQAbwBrAPwAbQBhAG4BMQAgAGEA5wExAG4BMQB6AC4wWAYDVR0fBFEwTzBNoEugSYZHaHR0cDov +L3Rlc3RzaWwuZS1ndXZlbi5jb20vRWxla3Ryb25pa0JpbGdpR3V2ZW5saWdpQVNSb290L0xhdGVz +dENSTC5jcmwwHQYDVR0OBBYEFLMoTImEKeXbqNjbYZkKshQi2vwzMB8GA1UdIwQYMBaAFGCI4dY9 +qCIkag0hwBgz5haCSNl0MA0GCSqGSIb3DQEBBQUAA4IBAQAWOsmvpoFB9sX2aq1/LjPDJ+A5Fpxm +0XkOGM9yD/FsLfWgyv2HqBY1cVM7mjJfJ1ezkS0ODdlU6TyN5ouvAi21V9CIk69I3eUYSDjPpGia +qcCCvJoMF0QD7B70kj2zW7IJ7pF11cbvPLaatdzojsH9fVfKtxtn/ZLrXtKsyUW5vKHOeniU6BBB +Gl/ZZkFNXNN4mrB+B+wDV9OmdMw+Mc8KPq463hJQRat5a9lrXMdNtMAJOkvsUUzOemAsITjXWlyg +BULijBhi8ZmMp0W7p6oKENX3vH2HCPCGQU29WIrK4iUoscjz93fB6oa4FQpxY0k3JRnWvD5FqkRD +FKJdq/q9MIIDzzCCAregAwIBAgIQa34pJYdDFNXx90OkMkKzIjANBgkqhkiG9w0BAQUFADBxMQsw +CQYDVQQGEwJUUjEoMCYGA1UEChMfRWxla3Ryb25payBCaWxnaSBHdXZlbmxpZ2kgQS5TLjE4MDYG +A1UEAxMvZS1HdXZlbiBFbGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2FnbGF5aWNpc2kwHhcN +MDYxMjE1MTUxMzU0WhcNMTYxMjE1MTExMzU0WjBxMQswCQYDVQQGEwJUUjEoMCYGA1UEChMfRWxl +a3Ryb25payBCaWxnaSBHdXZlbmxpZ2kgQS5TLjE4MDYGA1UEAxMvZS1HdXZlbiBFbGVrdHJvbmlr +IFNlcnRpZmlrYSBIaXptZXQgU2FnbGF5aWNpc2kwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK +AoIBAQCU/PTxSkcWPJMx4UO8L8ep9/JqRgAZ79EqYWgR4K2bNLgENpc5j0hO+QydgovFODzkEIBP +RIBavMz9Cw2PONpSBmxd4K1A/5hGqoGEz8UCA2tIx4+Z2A9AQ2O3BYi9FWM+0D1brJDO+6yvX4m5 +Rf3mLlso52NIVV705fIkmOExHjdAj/xB0/LICZMfwKn8F19Jae/SQv9cFnptbNRCq8hU5zLRngpR +eT1PYrZVV0XLbzbDPwgzLXCzDxG1atdGd5JRTnD58qM1foC3+hGafuyissMQVGnBQFlsx7V6OdlD +bsxUXegCl2li0RpRJXLqyqMdtEplaznKp8NnbddylfrPAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIB +hjAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFGCI4dY9qCIkag0hwBgz5haCSNl0MB0GA1Ud +DgQWBBRgiOHWPagiJGoNIcAYM+YWgkjZdDANBgkqhkiG9w0BAQUFAAOCAQEAKftTVjgltZJxXwDs +MumguOSlljOQjotVVpES1QYwo3a5RQVpKuS4KYDEdWLD4ITtDNOA/iGKYWCNyKsE1BCL66irknZw +iR6p6P+q2Wf7fGYSwUBcSBwWBTA+0EgpvPL3/vRuVVCVgC8XHBr72jKKTg9Nwcj+1FwXGZTDpjX8 +dzPhTXEWceQcDn2FRdNt6BQad9Hdq08lMHiyozsWniYZYuWpud91i8Pl698H9t0KqiJg6rPKc9kd +z9QyC8E/cLIJgYhvfzXMxvmSjeSSFSqTHioqfpU3k8AWXuxqJUxbdQ8QrVaTXRByzEr1Ze0TYpDs +oel1PjC9ouO8bC7cGrbCWzCCAi8wggGYAhBlEjJUo9asY2ISG4oHjcpzMA0GCSqGSIb3DQEBBQUA +MFoxCzAJBgNVBAYTAlRSMRAwDgYDVQQKEwdFLUd1dmVuMRQwEgYDVQQLEwtUcnVzdENlbnRlcjEj +MCEGA1UEAxMaRS1HdXZlblRFU1RDQUhTTSBTeXN0ZW0gQ0EwHhcNMDkxMTMwMjIxMzEzWhcNMTYx +MTMwMTkxMTUxWjBXMQswCQYDVQQGEwJUUjEQMA4GA1UEChMHRS1HdXZlbjEUMBIGA1UECxMLVHJ1 +c3RDZW50ZXIxIDAeBgNVBAMTF1JTQSBTZWN1cml0eSBDTVAgU2VydmVyMIGfMA0GCSqGSIb3DQEB +AQUAA4GNADCBiQKBgQDCaZeJerGULW+1UPSu9T0voPNgzPcihXX6G5Q45nS4RNCe+pOc226EtD51 +wu6Eq2oARpZmCrKPn63EFmHEE04dRDr8MS2LHuZK8xslIx/AvPnV568795EPoAyhGIX9Na9ZHhnI +zSPWmWfBd9bsQiLVF7C9dOvfW125mtywWXELewIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAAiIse/x +aWwRWUM0CIzfnoXfrgyLdKVykK7dTPgoMJgAx229uN6VTPyk+E+lTKq9PhK+e/VJNNg9PjSFjKFd +lfSDOi9ne1xOrb7cNTjw+sGf1mfNWyzizLXa7su7ISFN+GaClmAstH9vXsRxg1oh3pFMJv47I6iw +gUQlwwg8WsY/MIIGPzCCBjsCAQAwAwIBADCCBi+gggYrMIIGJzCCBQ+gAwIBAgIRALGVtVAeoM1x +gjgOX3alZ5MwDQYJKoZIhvcNAQEFBQAwXTELMAkGA1UEBhMCVFIxKDAmBgNVBAoMH0VsZWt0cm9u +aWsgQmlsZ2kgR3V2ZW5saWdpIEEuUy4xJDAiBgNVBAMMG2UtR3V2ZW4gTW9iaWwgVGVzdFVURi04 +LVNPTjAeFw0xMjA1MDMxMTE2MTdaFw0xMzA1MDMxMTE2MTdaMGoxCzAJBgNVBAYTAlRSMREwDwYD +VQQKDAhGaXJlIExMVDEbMBkGA1UECwwScG9wQ29kZSAtIDEyMzQ1Njc4MRQwEgYDVQQFEws3NjU0 +MzQ1Njc2NTEVMBMGA1UEAwwMQnVyYWsgWW9uZGVtMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKB +gQCpfSB7xcsHZR4E27yGHkzUJx1y2iknzX4gRM2acyPljRw/V5Lm7POrfWIX9UF2sxfYfRqxYmD0 ++nw72nx8R/5AFQK0BfjHxIc5W1YekMHF8PSORo9rJqcX+qn+NBYwqcJl4EdObTcOtMWC6ws6n0uA +oDvYYN0ujkua496sp+INiQIDAQABo4IDVzCCA1MwQgYIKwYBBQUHAQEENjA0MDIGCCsGAQUFBzAB +hiZodHRwOi8vdGVzdG9jc3AyLmUtZ3V2ZW4uY29tL29jc3AueHVkYTAfBgNVHSMEGDAWgBSzKEyJ +hCnl26jY22GZCrIUItr8MzCCAXIGA1UdIASCAWkwggFlMIGxBgZghhgDAAEwgaYwNgYIKwYBBQUH +AgEWKmh0dHA6Ly93d3cuZS1ndXZlbi5jb20vZG9jdW1lbnRzL05FU1VFLnBkZjBsBggrBgEFBQcC +AjBgGl5CdSBzZXJ0aWZpa2EsIDUwNzAgc2F5xLFsxLEgRWxla3Ryb25payDEsG16YSBLYW51bnVu +YSBnw7ZyZSBuaXRlbGlrbGkgZWxla3Ryb25payBzZXJ0aWZpa2FkxLFyMIGuBglghhgDAAEBAQMw +gaAwNwYIKwYBBQUHAgEWK2h0dHA6Ly93d3cuZS1ndXZlbi5jb20vZG9jdW1lbnRzL01LTkVTSS5w +ZGYwZQYIKwYBBQUHAgIwWRpXQnUgc2VydGlmaWthLCBNS05FU0kga2Fwc2FtxLFuZGEgeWF5xLFu +bGFubcSxxZ8gYmlyIG5pdGVsaWtsaSBlbGVrdHJvbmlrIHNlcnRpZmlrYWTEsXIuMA4GA1UdDwEB +/wQEAwIGwDCBgwYIKwYBBQUHAQMEdzB1MAgGBgQAjkYBATBpBgtghhgBPQABp04BAQxaQnUgc2Vy +dGlmaWthLCA1MDcwIHNheWlsaSBFbGVrdHJvbmlrIEltemEgS2FudW51bmEgZ8O2cmUgbml0ZWxp +a2xpIGVsZWt0cm9uaWsgc2VydGlmaWthZGlyMEUGA1UdCQQ+MDwwFAYIKwYBBQUHCQIxCAQGQW5r +YXJhMBIGCCsGAQUFBwkBMQYEBDE5NzkwEAYIKwYBBQUHCQQxBAQCVFIwGAYDVR0RBBEwD4ENZmly +ZUBmaXJlLmNvbTBgBgNVHR8EWTBXMFWgU6BRhk9odHRwOi8vdGVzdHNpbC5lLWd1dmVuLmNvbS9F +bGVrdHJvbmlrQmlsZ2lHdXZlbmxpZ2lBU01LTkVTSS1VVEYtOC9MYXRlc3RDUkwuY3JsMB0GA1Ud +DgQWBBSLG9aIb1k2emFLCpM93kXJkWhzuTANBgkqhkiG9w0BAQUFAAOCAQEACoGCn4bzDWLzs799 +rndpB971UD2wbwt8Hkw1MGZkkJVQeVF4IS8FacAyYk5vY8ONuTA/Wsh4x23v9WTCtO89HMTz81eU +BclqZ2Gc2UeMq7Y4FQWR8PNCMdCsxVVhpRRE6jQAyyR9YEBHQYVLfy34e3+9G/h/BR73VGHZJdZI +DDJYd+VWXmUD9kGk/mI35qYdzN3O28KI8sokqX0z2hvkpDKuP4jNXSCHcVkK23tX2x5m6m0LdqVn +vnCx2LfBn1wf1u7q30p/GgMVX+mR3QHs7feGewEjlkxuEyLVVD+uBwWCT6zcad17oaAyXV5RV28L +vH0WNg6pFUpwOP0l+nIOqqCBhAOBgQBAtTB5Qd18sTxEKhSzRiN2OycFPrqoqlZZTHBohe8bE2D4 +Xc1ejkFWUEvQivkqJxCD6C7I37xgDaq8DZnaczIBxbPkY0QMdeL4MiEqlw/tlrJGrWoC5Twb0t/m +JA5RSwQoMDYTj2WrwtM/nsP12T39or4JRZhlLSM43IaTwEBtQw== +""" + + def setUp(self): + self.asn1Spec = rfc4210.PKIMessage() + + def testDerCodec(self): + + substrate = pem.readBase64fromText(self.pem_text) + + asn1Object, rest = der_decoder.decode(substrate, asn1Spec=self.asn1Spec) + + assert not rest + assert asn1Object.prettyPrint() + assert der_encoder.encode(asn1Object) == substrate + + +suite = unittest.TestLoader().loadTestsFromModule(sys.modules[__name__]) + +if __name__ == '__main__': + unittest.TextTestRunner(verbosity=2).run(suite) diff --git a/third_party/python/pyasn1-modules/tests/test_rfc5208.py b/third_party/python/pyasn1-modules/tests/test_rfc5208.py new file mode 100644 index 000000000000..94d1d0e0a537 --- /dev/null +++ b/third_party/python/pyasn1-modules/tests/test_rfc5208.py @@ -0,0 +1,74 @@ +# +# This file is part of pyasn1-modules software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# +import sys +from pyasn1.codec.der import decoder as der_decoder +from pyasn1.codec.der import encoder as der_encoder + +from pyasn1_modules import rfc5208, pem + +try: + import unittest2 as unittest +except ImportError: + import unittest + + +class PrivateKeyInfoTestCase(unittest.TestCase): + pem_text = """\ +MIIBVgIBADANBgkqhkiG9w0BAQEFAASCAUAwggE8AgEAAkEAx8CO8E0MNgEKXXDf +I1xqBmQ+Gp3Srkqp45OApIu4lZ97n5VJ5HljU9wXcPIfx29Le3w8hCPEkugpLsdV +GWx+EQIDAQABAkEAiv3f+DGEh6ddsPszKQXK+LuTwy2CRajKYgJnBxf5zpG50XK4 +899An+x/pGYVmVED1f0JCbk3BUbv7HViLq0qgQIhAOYlQJaQ8KJBijDpjF62lcVr +QrqFPM4+ZrHsw0dVY2CZAiEA3jE5ngkVPfjFWEr7wS50EJhGiYlQeY4l+hADGIhd +XDkCIQDIHt5xzmif/nOGop5/gS7ssp8ch1zfTh2IW4NWlOZMCQIgLZmYo5BlpaRK +jAZHiKwJ8eXuhAeEVo4PyTREDmLeFjECIQCfyUPDclPo2O8ycPpozwoGwvKFrNZJ +VWRpRKqYnOAIXQ== +""" + + def setUp(self): + self.asn1Spec = rfc5208.PrivateKeyInfo() + + def testDerCodec(self): + + substrate = pem.readBase64fromText(self.pem_text) + + asn1Object, rest = der_decoder.decode(substrate, asn1Spec=self.asn1Spec) + + assert not rest + assert asn1Object.prettyPrint() + assert der_encoder.encode(asn1Object) == substrate + + +class EncryptedPrivateKeyInfoInfoTestCase(unittest.TestCase): + pem_text = """\ +MIIBgTAbBgkqhkiG9w0BBQMwDgQIdtFgDWnipT8CAggABIIBYN0hkm2xqkTCt8dJ +iZS8+HNiyHxy8g+rmWSXv/i+bTHFUReZA2GINtTRUkWpXqWcSHxNslgf7QdfgbVJ +xQiUM+lLhwOFh85iAHR3xmPU1wfN9NvY9DiLSpM0DMhF3OvAMZD75zIhA0GSKu7w +dUu7ey7H4fv7bez6RhEyLdKw9/Lf2KNStNOs4ow9CAtCoxeoMSniTt6CNhbvCkve +9vNHKiGavX1tS/YTog4wiiGzh2YxuW1RiQpTdhWiKyECgD8qQVg2tY5t3QRcXrzi +OkStpkiAPAbiwS/gyHpsqiLo0al63SCxRefugbn1ucZyc5Ya59e3xNFQXCNhYl+Z +Hl3hIl3cssdWZkJ455Z/bBE29ks1HtsL+bTfFi+kw/4yuMzoaB8C7rXScpGNI/8E +pvTU2+wtuoOFcttJregtR94ZHu5wgdYqRydmFNG8PnvZT1mRMmQgUe/vp88FMmsZ +dLsZjNQ= +""" + + def setUp(self): + self.asn1Spec = rfc5208.EncryptedPrivateKeyInfo() + + def testDerCodec(self): + substrate = pem.readBase64fromText(self.pem_text) + + asn1Object, rest = der_decoder.decode(substrate, asn1Spec=self.asn1Spec) + + assert not rest + assert asn1Object.prettyPrint() + assert der_encoder.encode(asn1Object) == substrate + + +suite = unittest.TestLoader().loadTestsFromModule(sys.modules[__name__]) + +if __name__ == '__main__': + unittest.TextTestRunner(verbosity=2).run(suite) diff --git a/third_party/python/pyasn1-modules/tests/test_rfc5280.py b/third_party/python/pyasn1-modules/tests/test_rfc5280.py new file mode 100644 index 000000000000..49983ef4b5f7 --- /dev/null +++ b/third_party/python/pyasn1-modules/tests/test_rfc5280.py @@ -0,0 +1,82 @@ +# +# This file is part of pyasn1-modules software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# +import sys +from pyasn1.codec.der import decoder as der_decoder +from pyasn1.codec.der import encoder as der_encoder + +from pyasn1_modules import rfc5280, pem + +try: + import unittest2 as unittest +except ImportError: + import unittest + + +class CertificateTestCase(unittest.TestCase): + pem_text = """\ +MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0 +IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz +BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y +aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG +9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMjIzM1oXDTE5MDYy +NjAwMjIzM1owgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y +azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs +YXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw +Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl +cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDjmFGWHOjVsQaBalfD +cnWTq8+epvzzFlLWLU2fNUSoLgRNB0mKOCn1dzfnt6td3zZxFJmP3MKS8edgkpfs +2Ejcv8ECIMYkpChMMFp2bbFc893enhBxoYjHW5tBbcqwuI4V7q0zK89HBFx1cQqY +JJgpp0lZpd34t0NiYfPT4tBVPwIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFa7AliE +Zwgs3x/be0kz9dNnnfS0ChCzycUs4pJqcXgn8nCDQtM+z6lU9PHYkhaM0QTLS6vJ +n0WuPIqpsHEzXcjFV9+vqDWzf4mH6eglkrh/hXqu1rweN1gqZ8mRzyqBPu3GOd/A +PhmcGcwTTYJBtYze4D1gCCAPRX5ron+jjBXu +""" + + def setUp(self): + self.asn1Spec = rfc5280.Certificate() + + def testDerCodec(self): + + substrate = pem.readBase64fromText(self.pem_text) + + asn1Object, rest = der_decoder.decode(substrate, asn1Spec=self.asn1Spec) + + assert not rest + assert asn1Object.prettyPrint() + assert der_encoder.encode(asn1Object) == substrate + + +class CertificateListTestCase(unittest.TestCase): + pem_text = """\ +MIIBVjCBwAIBATANBgkqhkiG9w0BAQUFADB+MQswCQYDVQQGEwJBVTETMBEGA1UE +CBMKU29tZS1TdGF0ZTEhMB8GA1UEChMYSW50ZXJuZXQgV2lkZ2l0cyBQdHkgTHRk +MRUwEwYDVQQDEwxzbm1wbGFicy5jb20xIDAeBgkqhkiG9w0BCQEWEWluZm9Ac25t +cGxhYnMuY29tFw0xMjA0MTExMzQwNTlaFw0xMjA1MTExMzQwNTlaoA4wDDAKBgNV +HRQEAwIBATANBgkqhkiG9w0BAQUFAAOBgQC1D/wwnrcY/uFBHGc6SyoYss2kn+nY +RTwzXmmldbNTCQ03x5vkWGGIaRJdN8QeCzbEi7gpgxgpxAx6Y5WkxkMQ1UPjNM5n +DGVDOtR0dskFrrbHuNpWqWrDaBN0/ryZiWKjr9JRbrpkHgVY29I1gLooQ6IHuKHY +vjnIhxTFoCb5vA== +""" + + def setUp(self): + self.asn1Spec = rfc5280.CertificateList() + + def testDerCodec(self): + + substrate = pem.readBase64fromText(self.pem_text) + + asn1Object, rest = der_decoder.decode(substrate, asn1Spec=self.asn1Spec) + + assert not rest + assert asn1Object.prettyPrint() + assert der_encoder.encode(asn1Object) == substrate + + +suite = unittest.TestLoader().loadTestsFromModule(sys.modules[__name__]) + +if __name__ == '__main__': + unittest.TextTestRunner(verbosity=2).run(suite) diff --git a/third_party/python/pyasn1-modules/tests/test_rfc5652.py b/third_party/python/pyasn1-modules/tests/test_rfc5652.py new file mode 100644 index 000000000000..5fa4296b9f88 --- /dev/null +++ b/third_party/python/pyasn1-modules/tests/test_rfc5652.py @@ -0,0 +1,87 @@ +# +# This file is part of pyasn1-modules software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# +import sys +from pyasn1.codec.der import decoder as der_decoder +from pyasn1.codec.der import encoder as der_encoder + +from pyasn1_modules import rfc5652, rfc6402, pem + +try: + import unittest2 as unittest +except ImportError: + import unittest + + +class ContentInfoTestCase(unittest.TestCase): + pem_text = """\ +MIIEJQYJKoZIhvcNAQcCoIIEFjCCBBICAQMxCzAJBgUrDgMCGgUAMIIDAgYIKwYBBQUHDAKgggL0 +BIIC8DCCAuwweDB2AgECBgorBgEEAYI3CgoBMWUwYwIBADADAgEBMVkwVwYJKwYBBAGCNxUUMUow +SAIBBQwZcGl0dWNoYTEuZW1lYS5ocHFjb3JwLm5ldAwMRU1FQVxwaXR1Y2hhDBpDTUNSZXFHZW5l +cmF0b3IudnNob3N0LmV4ZTCCAmqgggJmAgEBMIICXzCCAcgCAQAwADCBnzANBgkqhkiG9w0BAQEF +AAOBjQAwgYkCgYEA0jm7SSSm2wyEAzuNKtFZFJKo91SrJq9wQwEhEKHDavZwMQOm1rZ2PF8NWCEb +PqrhToQ7rtiGLSZa4dF4bzgmBqQ9aoSfEX4jISt31Vy+skHidXjHHpbsjT24NPhrZgANivL7CxD6 +Ft+s7qS1gL4HRm2twQkqSwOLrE/q2QeXl2UCAwEAAaCCAR0wGgYKKwYBBAGCNw0CAzEMFgo2LjIu +OTIwMC4yMD4GCSqGSIb3DQEJDjExMC8wHQYDVR0OBBYEFMW2skn88gxhONWZQA4sWGBDb68yMA4G +A1UdDwEB/wQEAwIHgDBXBgkrBgEEAYI3FRQxSjBIAgEFDBlwaXR1Y2hhMS5lbWVhLmhwcWNvcnAu +bmV0DAxFTUVBXHBpdHVjaGEMGkNNQ1JlcUdlbmVyYXRvci52c2hvc3QuZXhlMGYGCisGAQQBgjcN +AgIxWDBWAgECHk4ATQBpAGMAcgBvAHMAbwBmAHQAIABTAHQAcgBvAG4AZwAgAEMAcgB5AHAAdABv +AGcAcgBhAHAAaABpAGMAIABQAHIAbwB2AGkAZABlAHIDAQAwDQYJKoZIhvcNAQEFBQADgYEAJZlu +mxjtCxSOQi27jsVdd3y8NSIlzNv0b3LqmzvAly6L+CstXcnuG2MPQqPH9R7tbJonGUniBQO9sQ7C +KhYWj2gfhiEkSID82lV5chINVUFKoUlSiEhWr0tPGgvOaqdsKQcrHfzrsBbFkhDqrFSVy7Yivbnh +qYszKrOjJKiiCPMwADAAMYH5MIH2AgEDgBTFtrJJ/PIMYTjVmUAOLFhgQ2+vMjAJBgUrDgMCGgUA +oD4wFwYJKoZIhvcNAQkDMQoGCCsGAQUFBwwCMCMGCSqGSIb3DQEJBDEWBBTFTkK/OifaFjwqHiJu +xM7qXcg/VzANBgkqhkiG9w0BAQEFAASBgKfC6jOi1Wgy4xxDCQVK9+e5tktL8wE/j2cb9JSqq+aU +5UxEgXEw7q7BoYZCAzcxMRriGzakXr8aXHcgkRJ7XcFvLPUjpmGg9SOZ2sGW4zQdWAwImN/i8loc +xicQmJP+VoMHo/ZpjFY9fYCjNZUArgKsEwK/s+p9yrVVeB1Nf8Mn +""" + + def setUp(self): + self.asn1Spec = rfc5652.ContentInfo() + + def testDerCodec(self): + + substrate = pem.readBase64fromText(self.pem_text) + + layers = { + rfc5652.id_ct_contentInfo: rfc5652.ContentInfo(), + rfc5652.id_signedData: rfc5652.SignedData(), + rfc6402.id_cct_PKIData: rfc6402.PKIData() + } + + getNextLayer = { + rfc5652.id_ct_contentInfo: lambda x: x['contentType'], + rfc5652.id_signedData: lambda x: x['encapContentInfo']['eContentType'], + rfc6402.id_cct_PKIData: lambda x: None + } + + getNextSubstrate = { + rfc5652.id_ct_contentInfo: lambda x: x['content'], + rfc5652.id_signedData: lambda x: x['encapContentInfo']['eContent'], + rfc6402.id_cct_PKIData: lambda x: None + } + + + next_layer = rfc5652.id_ct_contentInfo + + while next_layer: + + asn1Object, rest = der_decoder.decode( + substrate, asn1Spec=layers[next_layer] + ) + + assert not rest + assert asn1Object.prettyPrint() + assert der_encoder.encode(asn1Object) == substrate + + substrate = getNextSubstrate[next_layer](asn1Object) + next_layer = getNextLayer[next_layer](asn1Object) + + +suite = unittest.TestLoader().loadTestsFromModule(sys.modules[__name__]) + +if __name__ == '__main__': + unittest.TextTestRunner(verbosity=2).run(suite) diff --git a/third_party/python/pyasn1-modules/tools/cmcdump.py b/third_party/python/pyasn1-modules/tools/cmcdump.py new file mode 100755 index 000000000000..bce48b199059 --- /dev/null +++ b/third_party/python/pyasn1-modules/tools/cmcdump.py @@ -0,0 +1,56 @@ +#!/usr/bin/env python +# +# Read CMC certificate request with wrappers on stdin, parse each into +# plain text, then build substrate from it +# +from pyasn1.codec.der import decoder, encoder +from pyasn1_modules import rfc5652, rfc6402, pem +import sys + +if len(sys.argv) != 1: + print("""Usage: +$ cat cmc_request.pem | %s""" % (sys.argv[0],)) + sys.exit(-1) + +reqCnt = 0 + +substrate = pem.readBase64FromFile(sys.stdin) + +_, rest = decoder.decode(substrate, asn1Spec=rfc5652.ContentInfo()) +assert not rest + +next_layer = rfc5652.id_ct_contentInfo +data = substrate +while next_layer: + if next_layer == rfc5652.id_ct_contentInfo: + layer, rest = decoder.decode(data, asn1Spec=rfc5652.ContentInfo()) + assert encoder.encode(layer) == data, 'wrapper recode fails' + assert not rest + + print(" * New layer (wrapper):") + print(layer.prettyPrint()) + + next_layer = layer['contentType'] + data = layer['content'] + + elif next_layer == rfc5652.id_signedData: + layer, rest = decoder.decode(data, asn1Spec=rfc5652.SignedData()) + assert encoder.encode(layer) == data, 'wrapper recode fails' + assert not rest + + print(" * New layer (wrapper):") + print(layer.prettyPrint()) + + next_layer = layer['encapContentInfo']['eContentType'] + data = layer['encapContentInfo']['eContent'] + + elif next_layer == rfc6402.id_cct_PKIData: + layer, rest = decoder.decode(data, asn1Spec=rfc6402.PKIData()) + assert encoder.encode(layer) == data, 'pkidata recode fails' + assert not rest + + print(" * New layer (pkidata):") + print(layer.prettyPrint()) + + next_layer = None + data = None diff --git a/third_party/python/pyasn1-modules/tools/cmpdump.py b/third_party/python/pyasn1-modules/tools/cmpdump.py new file mode 100755 index 000000000000..c89951ac9ff0 --- /dev/null +++ b/third_party/python/pyasn1-modules/tools/cmpdump.py @@ -0,0 +1,33 @@ +#!/usr/bin/env python +# +# This file is part of pyasn1-modules software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# +# Read ASN.1/PEM CMP message on stdin, parse into +# plain text, then build substrate from it +# +from pyasn1.codec.der import decoder, encoder +from pyasn1_modules import rfc4210, pem +from pyasn1 import debug +import sys + +if len(sys.argv) == 2 and sys.argv[1] == '-d': + debug.setLogger(debug.Debug('all')) +elif len(sys.argv) != 1: + print("""Usage: +$ cat cmp.pem | %s [-d]""" % sys.argv[0]) + sys.exit(-1) + +pkiMessage = rfc4210.PKIMessage() + +substrate = pem.readBase64FromFile(sys.stdin) +if not substrate: + sys.exit(0) + +pkiMsg, rest = decoder.decode(substrate, asn1Spec=pkiMessage) + +print(pkiMsg.prettyPrint()) + +assert encoder.encode(pkiMsg) == substrate, 'CMP message recode fails' diff --git a/third_party/python/pyasn1-modules/tools/crldump.py b/third_party/python/pyasn1-modules/tools/crldump.py new file mode 100755 index 000000000000..b871ddd96236 --- /dev/null +++ b/third_party/python/pyasn1-modules/tools/crldump.py @@ -0,0 +1,41 @@ +#!/usr/bin/env python +# +# This file is part of pyasn1-modules software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# +# Read X.509 CRL on stdin, print them pretty and encode back into +# original wire format. +# CRL can be generated with "openssl openssl ca -gencrl ..." commands. +# +from pyasn1_modules import rfc2459, pem +from pyasn1.codec.der import encoder, decoder +import sys + +if len(sys.argv) != 1: + print("""Usage: +$ cat crl.pem | %s""" % sys.argv[0]) + sys.exit(-1) + +asn1Spec = rfc2459.CertificateList() + +cnt = 0 + +while True: + idx, substrate = pem.readPemBlocksFromFile(sys.stdin, ('-----BEGIN X509 CRL-----', '-----END X509 CRL-----')) + if not substrate: + break + + key, rest = decoder.decode(substrate, asn1Spec=asn1Spec) + + if rest: + substrate = substrate[:-len(rest)] + + print(key.prettyPrint()) + + assert encoder.encode(key) == substrate, 'pkcs8 recode fails' + + cnt += 1 + +print('*** %s CRL(s) re/serialized' % cnt) diff --git a/third_party/python/pyasn1-modules/tools/crmfdump.py b/third_party/python/pyasn1-modules/tools/crmfdump.py new file mode 100755 index 000000000000..efb0ffc0e443 --- /dev/null +++ b/third_party/python/pyasn1-modules/tools/crmfdump.py @@ -0,0 +1,30 @@ +#!/usr/bin/env python +# +# This file is part of pyasn1-modules software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# +# Read ASN.1/PEM X.509 CRMF request on stdin, parse into +# plain text, then build substrate from it +# +from pyasn1.codec.der import decoder, encoder +from pyasn1_modules import rfc2511, pem +import sys + +if len(sys.argv) != 1: + print("""Usage: +$ cat crmf.pem | %s""" % sys.argv[0]) + sys.exit(-1) + +certReq = rfc2511.CertReqMessages() + +substrate = pem.readBase64FromFile(sys.stdin) +if not substrate: + sys.exit(0) + +cr, rest = decoder.decode(substrate, asn1Spec=certReq) + +print(cr.prettyPrint()) + +assert encoder.encode(cr) == substrate, 'crmf recode fails' diff --git a/third_party/python/pyasn1-modules/tools/ocspclient.py b/third_party/python/pyasn1-modules/tools/ocspclient.py new file mode 100755 index 000000000000..07ff5ada6841 --- /dev/null +++ b/third_party/python/pyasn1-modules/tools/ocspclient.py @@ -0,0 +1,165 @@ +#!/usr/bin/env python +# +# This file is part of pyasn1-modules software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# +import hashlib +import sys + +try: + import urllib2 +except ImportError: + import urllib.request as urllib2 + +from pyasn1.codec.der import decoder, encoder +from pyasn1.type import univ + +from pyasn1_modules import rfc2560, rfc2459, pem + +sha1oid = univ.ObjectIdentifier((1, 3, 14, 3, 2, 26)) + + +# noinspection PyClassHasNoInit +class ValueOnlyBitStringEncoder(encoder.encoder.BitStringEncoder): + # These methods just do not encode tag and length fields of TLV + def encodeTag(self, *args): + return '' + + def encodeLength(self, *args): + return '' + + def encodeValue(*args): + substrate, isConstructed = encoder.encoder.BitStringEncoder.encodeValue(*args) + # OCSP-specific hack follows: cut off the "unused bit count" + # encoded bit-string value. + return substrate[1:], isConstructed + + def __call__(self, bitStringValue): + return self.encode(None, bitStringValue, defMode=True, maxChunkSize=0) + + +valueOnlyBitStringEncoder = ValueOnlyBitStringEncoder() + + +# noinspection PyShadowingNames +def mkOcspRequest(issuerCert, userCert): + issuerTbsCertificate = issuerCert.getComponentByName('tbsCertificate') + issuerSubject = issuerTbsCertificate.getComponentByName('subject') + + userTbsCertificate = userCert.getComponentByName('tbsCertificate') + userIssuer = userTbsCertificate.getComponentByName('issuer') + + assert issuerSubject == userIssuer, '%s\n%s' % ( + issuerSubject.prettyPrint(), userIssuer.prettyPrint() + ) + + userIssuerHash = hashlib.sha1( + encoder.encode(userIssuer) + ).digest() + + issuerSubjectPublicKey = issuerTbsCertificate.getComponentByName('subjectPublicKeyInfo').getComponentByName( + 'subjectPublicKey') + + issuerKeyHash = hashlib.sha1( + valueOnlyBitStringEncoder(issuerSubjectPublicKey) + ).digest() + + userSerialNumber = userTbsCertificate.getComponentByName('serialNumber') + + # Build request object + + request = rfc2560.Request() + + reqCert = request.setComponentByName('reqCert').getComponentByName('reqCert') + + hashAlgorithm = reqCert.setComponentByName('hashAlgorithm').getComponentByName('hashAlgorithm') + hashAlgorithm.setComponentByName('algorithm', sha1oid) + + reqCert.setComponentByName('issuerNameHash', userIssuerHash) + reqCert.setComponentByName('issuerKeyHash', issuerKeyHash) + reqCert.setComponentByName('serialNumber', userSerialNumber) + + ocspRequest = rfc2560.OCSPRequest() + + tbsRequest = ocspRequest.setComponentByName('tbsRequest').getComponentByName('tbsRequest') + tbsRequest.setComponentByName('version', 'v1') + + requestList = tbsRequest.setComponentByName('requestList').getComponentByName('requestList') + requestList.setComponentByPosition(0, request) + + return ocspRequest + + +def parseOcspResponse(ocspResponse): + responseStatus = ocspResponse.getComponentByName('responseStatus') + assert responseStatus == rfc2560.OCSPResponseStatus('successful'), responseStatus.prettyPrint() + responseBytes = ocspResponse.getComponentByName('responseBytes') + responseType = responseBytes.getComponentByName('responseType') + assert responseType == rfc2560.id_pkix_ocsp_basic, responseType.prettyPrint() + + response = responseBytes.getComponentByName('response') + + basicOCSPResponse, _ = decoder.decode( + response, asn1Spec=rfc2560.BasicOCSPResponse() + ) + + tbsResponseData = basicOCSPResponse.getComponentByName('tbsResponseData') + + response0 = tbsResponseData.getComponentByName('responses').getComponentByPosition(0) + + return ( + tbsResponseData.getComponentByName('producedAt'), + response0.getComponentByName('certID'), + response0.getComponentByName('certStatus').getName(), + response0.getComponentByName('thisUpdate') + ) + + +if len(sys.argv) != 2: + print("""Usage: +$ cat CACertificate.pem userCertificate.pem | %s """ % sys.argv[0]) + sys.exit(-1) +else: + ocspUrl = sys.argv[1] + +# Parse CA and user certificates + +issuerCert, _ = decoder.decode( + pem.readPemBlocksFromFile( + sys.stdin, ('-----BEGIN CERTIFICATE-----', '-----END CERTIFICATE-----') + )[1], + asn1Spec=rfc2459.Certificate() +) +# noinspection PyRedeclaration +userCert, _ = decoder.decode( + pem.readPemBlocksFromFile( + sys.stdin, ('-----BEGIN CERTIFICATE-----', '-----END CERTIFICATE-----') + )[1], + asn1Spec=rfc2459.Certificate() +) + +# Build OCSP request + +ocspReq = mkOcspRequest(issuerCert, userCert) + +# Use HTTP POST to get response (see Appendix A of RFC 2560) +# In case you need proxies, set the http_proxy env variable + +httpReq = urllib2.Request( + ocspUrl, + encoder.encode(ocspReq), + {'Content-Type': 'application/ocsp-request'} +) +httpRsp = urllib2.urlopen(httpReq).read() + +# Process OCSP response + +# noinspection PyRedeclaration +ocspRsp, _ = decoder.decode(httpRsp, asn1Spec=rfc2560.OCSPResponse()) + +producedAt, certId, certStatus, thisUpdate = parseOcspResponse(ocspRsp) + +print('Certificate ID %s is %s at %s till %s\n' % (certId.getComponentByName('serialNumber'), + certStatus, producedAt, thisUpdate)) diff --git a/third_party/python/pyasn1-modules/tools/ocspreqdump.py b/third_party/python/pyasn1-modules/tools/ocspreqdump.py new file mode 100755 index 000000000000..40c088a130e8 --- /dev/null +++ b/third_party/python/pyasn1-modules/tools/ocspreqdump.py @@ -0,0 +1,30 @@ +#!/usr/bin/env python +# +# This file is part of pyasn1-modules software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# +# Read ASN.1/PEM X.509 CRMF request on stdin, parse into +# plain text, then build substrate from it +# +from pyasn1.codec.der import decoder, encoder +from pyasn1_modules import rfc2560, pem +import sys + +if len(sys.argv) != 1: + print("""Usage: +$ cat ocsp-request.pem | %s""" % sys.argv[0]) + sys.exit(-1) + +ocspReq = rfc2560.OCSPRequest() + +substrate = pem.readBase64FromFile(sys.stdin) +if not substrate: + sys.exit(0) + +cr, rest = decoder.decode(substrate, asn1Spec=ocspReq) + +print(cr.prettyPrint()) + +assert encoder.encode(cr) == substrate, 'OCSP request recode fails' diff --git a/third_party/python/pyasn1-modules/tools/ocsprspdump.py b/third_party/python/pyasn1-modules/tools/ocsprspdump.py new file mode 100755 index 000000000000..ca52f64bd7d3 --- /dev/null +++ b/third_party/python/pyasn1-modules/tools/ocsprspdump.py @@ -0,0 +1,30 @@ +#!/usr/bin/env python +# +# This file is part of pyasn1-modules software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# +# Read ASN.1/PEM OCSP response on stdin, parse into +# plain text, then build substrate from it +# +from pyasn1.codec.der import decoder, encoder +from pyasn1_modules import rfc2560, pem +import sys + +if len(sys.argv) != 1: + print("""Usage: +$ cat ocsp-response.pem | %s""" % sys.argv[0]) + sys.exit(-1) + +ocspReq = rfc2560.OCSPResponse() + +substrate = pem.readBase64FromFile(sys.stdin) +if not substrate: + sys.exit(0) + +cr, rest = decoder.decode(substrate, asn1Spec=ocspReq) + +print(cr.prettyPrint()) + +assert encoder.encode(cr) == substrate, 'OCSP request recode fails' diff --git a/third_party/python/pyasn1-modules/tools/pkcs10dump.py b/third_party/python/pyasn1-modules/tools/pkcs10dump.py new file mode 100755 index 000000000000..56417ae483d0 --- /dev/null +++ b/third_party/python/pyasn1-modules/tools/pkcs10dump.py @@ -0,0 +1,43 @@ +#!/usr/bin/env python +# +# This file is part of pyasn1-modules software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# +# Read ASN.1/PEM X.509 certificate requests (PKCS#10 format) on stdin, +# parse each into plain text, then build substrate from it +# +from pyasn1.codec.der import decoder, encoder +from pyasn1_modules import rfc2314, pem +import sys + +if len(sys.argv) != 1: + print("""Usage: +$ cat certificateRequest.pem | %s""" % sys.argv[0]) + sys.exit(-1) + +certType = rfc2314.CertificationRequest() + +certCnt = 0 + +while True: + idx, substrate = pem.readPemBlocksFromFile( + sys.stdin, ('-----BEGIN CERTIFICATE REQUEST-----', + '-----END CERTIFICATE REQUEST-----') + ) + if not substrate: + break + + cert, rest = decoder.decode(substrate, asn1Spec=certType) + + if rest: + substrate = substrate[:-len(rest)] + + print(cert.prettyPrint()) + + assert encoder.encode(cert) == substrate, 'cert recode fails' + + certCnt += 1 + +print('*** %s PEM certificate request(s) de/serialized' % certCnt) diff --git a/third_party/python/pyasn1-modules/tools/pkcs1dump.py b/third_party/python/pyasn1-modules/tools/pkcs1dump.py new file mode 100755 index 000000000000..f205d779cc26 --- /dev/null +++ b/third_party/python/pyasn1-modules/tools/pkcs1dump.py @@ -0,0 +1,50 @@ +#!/usr/bin/env python +# +# This file is part of pyasn1-modules software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# +# Read unencrypted PKCS#1/PKIX-compliant, PEM&DER encoded private keys on +# stdin, print them pretty and encode back into original wire format. +# Private keys can be generated with "openssl genrsa|gendsa" commands. +# +from pyasn1_modules import rfc2459, rfc2437, pem +from pyasn1.codec.der import encoder, decoder +import sys + +if len(sys.argv) != 1: + print("""Usage: +$ cat rsakey.pem | %s""" % sys.argv[0]) + sys.exit(-1) + +cnt = 0 + +while True: + idx, substrate = pem.readPemBlocksFromFile( + sys.stdin, + ('-----BEGIN RSA PRIVATE KEY-----', '-----END RSA PRIVATE KEY-----'), + ('-----BEGIN DSA PRIVATE KEY-----', '-----END DSA PRIVATE KEY-----') + ) + if not substrate: + break + + if idx == 0: + asn1Spec = rfc2437.RSAPrivateKey() + elif idx == 1: + asn1Spec = rfc2459.DSAPrivateKey() + else: + break + + key, rest = decoder.decode(substrate, asn1Spec=asn1Spec) + + if rest: + substrate = substrate[:-len(rest)] + + print(key.prettyPrint()) + + assert encoder.encode(key) == substrate, 'pkcs8 recode fails' + + cnt += 1 + +print('*** %s key(s) re/serialized' % cnt) diff --git a/third_party/python/pyasn1-modules/tools/pkcs7dump.py b/third_party/python/pyasn1-modules/tools/pkcs7dump.py new file mode 100755 index 000000000000..72fe70d7b3f4 --- /dev/null +++ b/third_party/python/pyasn1-modules/tools/pkcs7dump.py @@ -0,0 +1,51 @@ +#!/usr/bin/env python +# +# This file is part of pyasn1-modules software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# +# Read ASN.1/PEM PKCS#7 on stdin, parse it into plain text, +# then build substrate from it +# +from pyasn1_modules import rfc2315, pem +from pyasn1.codec.der import encoder, decoder +import sys + +if len(sys.argv) != 1: + print("""Usage: +$ cat pkcs7Certificate.pem | %s""" % sys.argv[0]) + sys.exit(-1) + +idx, substrate = pem.readPemBlocksFromFile( + sys.stdin, ('-----BEGIN PKCS7-----', '-----END PKCS7-----') +) + +assert substrate, 'bad PKCS7 data on input' + +contentInfo, rest = decoder.decode(substrate, asn1Spec=rfc2315.ContentInfo()) + +if rest: + substrate = substrate[:-len(rest)] + +print(contentInfo.prettyPrint()) + +assert encoder.encode(contentInfo) == substrate, 're-encode fails' + +contentType = contentInfo.getComponentByName('contentType') + +contentInfoMap = { + (1, 2, 840, 113549, 1, 7, 1): rfc2315.Data(), + (1, 2, 840, 113549, 1, 7, 2): rfc2315.SignedData(), + (1, 2, 840, 113549, 1, 7, 3): rfc2315.EnvelopedData(), + (1, 2, 840, 113549, 1, 7, 4): rfc2315.SignedAndEnvelopedData(), + (1, 2, 840, 113549, 1, 7, 5): rfc2315.DigestedData(), + (1, 2, 840, 113549, 1, 7, 6): rfc2315.EncryptedData() +} + +content, _ = decoder.decode( + contentInfo.getComponentByName('content'), + asn1Spec=contentInfoMap[contentType] +) + +print(content.prettyPrint()) diff --git a/third_party/python/pyasn1-modules/tools/pkcs8dump.py b/third_party/python/pyasn1-modules/tools/pkcs8dump.py new file mode 100755 index 000000000000..2bb83884a8c2 --- /dev/null +++ b/third_party/python/pyasn1-modules/tools/pkcs8dump.py @@ -0,0 +1,49 @@ +#!/usr/bin/env python +# +# This file is part of pyasn1-modules software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# +# Read bunch of ASN.1/PEM plain/encrypted private keys in PKCS#8 +# format on stdin, parse each into plain text, then build substrate from it +# +from pyasn1.codec.der import decoder, encoder +from pyasn1_modules import rfc5208, pem +import sys + +if len(sys.argv) != 1: + print("""Usage: +$ cat pkcs8key.pem | %s""" % sys.argv[0]) + sys.exit(-1) + +cnt = 0 + +while True: + idx, substrate = pem.readPemBlocksFromFile( + sys.stdin, + ('-----BEGIN PRIVATE KEY-----', '-----END PRIVATE KEY-----'), + ('-----BEGIN ENCRYPTED PRIVATE KEY-----', '-----END ENCRYPTED PRIVATE KEY-----') + ) + if not substrate: + break + + if idx == 0: + asn1Spec = rfc5208.PrivateKeyInfo() + elif idx == 1: + asn1Spec = rfc5208.EncryptedPrivateKeyInfo() + else: + break + + key, rest = decoder.decode(substrate, asn1Spec=asn1Spec) + + if rest: + substrate = substrate[:-len(rest)] + + print(key.prettyPrint()) + + assert encoder.encode(key) == substrate, 'pkcs8 recode fails' + + cnt += 1 + +print('*** %s PKCS#8 key(s) de/serialized' % cnt) diff --git a/third_party/python/pyasn1-modules/tools/snmpget.py b/third_party/python/pyasn1-modules/tools/snmpget.py new file mode 100755 index 000000000000..cd9fec21a2cb --- /dev/null +++ b/third_party/python/pyasn1-modules/tools/snmpget.py @@ -0,0 +1,44 @@ +#!/usr/bin/env python +# +# This file is part of pyasn1-modules software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# +# Generate SNMPGET request, parse response +# +from pyasn1.codec.ber import encoder, decoder +from pyasn1_modules import rfc1157 +import sys +import socket + +if len(sys.argv) != 4: + print("""Usage: +$ %s """ % sys.argv[0]) + sys.exit(-1) + +msg = rfc1157.Message() +msg.setComponentByPosition(0) +msg.setComponentByPosition(1, sys.argv[1]) +# pdu +pdus = msg.setComponentByPosition(2).getComponentByPosition(2) +pdu = pdus.setComponentByPosition(0).getComponentByPosition(0) +pdu.setComponentByPosition(0, 123) +pdu.setComponentByPosition(1, 0) +pdu.setComponentByPosition(2, 0) +vbl = pdu.setComponentByPosition(3).getComponentByPosition(3) +vb = vbl.setComponentByPosition(0).getComponentByPosition(0) +vb.setComponentByPosition(0, sys.argv[3]) +v = vb.setComponentByPosition(1).getComponentByPosition(1).setComponentByPosition(0).getComponentByPosition(0).setComponentByPosition(3).getComponentByPosition(3) + +print('sending: %s' % msg.prettyPrint()) + +sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) +sock.sendto(encoder.encode(msg), (sys.argv[2], 161)) + +substrate, _ = sock.recvfrom(2048) + +# noinspection PyRedeclaration +rMsg, _ = decoder.decode(substrate, asn1Spec=msg) + +print('received: %s' % rMsg.prettyPrint()) diff --git a/third_party/python/pyasn1-modules/tools/x509dump-rfc5280.py b/third_party/python/pyasn1-modules/tools/x509dump-rfc5280.py new file mode 100755 index 000000000000..482df7e5bfb9 --- /dev/null +++ b/third_party/python/pyasn1-modules/tools/x509dump-rfc5280.py @@ -0,0 +1,46 @@ +#!/usr/bin/env python +# coding: utf-8 +# +# This file is part of pyasn1-modules software. +# +# Created by Stanisław Pitucha with asn1ate tool. +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# +# Read ASN.1/PEM X.509 certificates on stdin, parse each into plain text, +# then build substrate from it (using RFC5280) +# +from pyasn1.codec.der import decoder, encoder +from pyasn1_modules import rfc5280, pem +import sys + +if len(sys.argv) != 1: + print("""Usage: +$ cat CACertificate.pem | %s +$ cat userCertificate.pem | %s""" % (sys.argv[0], sys.argv[0])) + sys.exit(-1) + +certType = rfc5280.Certificate() + +certCnt = 0 + +while 1: + idx, substrate = pem.readPemBlocksFromFile( + sys.stdin, ('-----BEGIN CERTIFICATE-----', + '-----END CERTIFICATE-----') + ) + if not substrate: + break + + cert, rest = decoder.decode(substrate, asn1Spec=certType) + + if rest: + substrate = substrate[:-len(rest)] + + print(cert.prettyPrint()) + + assert encoder.encode(cert) == substrate, 'cert recode fails' + + certCnt += 1 + +print('*** %s PEM cert(s) de/serialized' % certCnt) diff --git a/third_party/python/pyasn1-modules/tools/x509dump.py b/third_party/python/pyasn1-modules/tools/x509dump.py new file mode 100755 index 000000000000..2c51c6a5162f --- /dev/null +++ b/third_party/python/pyasn1-modules/tools/x509dump.py @@ -0,0 +1,44 @@ +#!/usr/bin/env python +# +# This file is part of pyasn1-modules software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# +# Read ASN.1/PEM X.509 certificates on stdin, parse each into plain text, +# then build substrate from it +# +from pyasn1.codec.der import decoder, encoder +from pyasn1_modules import rfc2459, pem +import sys + +if len(sys.argv) != 1: + print("""Usage: +$ cat CACertificate.pem | %s +$ cat userCertificate.pem | %s""" % (sys.argv[0], sys.argv[0])) + sys.exit(-1) + +certType = rfc2459.Certificate() + +certCnt = 0 + +while True: + idx, substrate = pem.readPemBlocksFromFile( + sys.stdin, ('-----BEGIN CERTIFICATE-----', + '-----END CERTIFICATE-----') + ) + if not substrate: + break + + cert, rest = decoder.decode(substrate, asn1Spec=certType) + + if rest: + substrate = substrate[:-len(rest)] + + print(cert.prettyPrint()) + + assert encoder.encode(cert) == substrate, 'cert recode fails' + + certCnt += 1 + +print('*** %s PEM cert(s) de/serialized' % certCnt) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules-0.2.8.dist-info/RECORD b/third_party/python/pyasn1_modules/pyasn1_modules-0.2.8.dist-info/RECORD deleted file mode 100644 index 56851eb2b877..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules-0.2.8.dist-info/RECORD +++ /dev/null @@ -1,113 +0,0 @@ -pyasn1_modules/__init__.py,sha256=dVxDhxuk2UnZm-vUYJxsjuoO1cQuyjimpP5F0Rk3B8M,65 -pyasn1_modules/pem.py,sha256=j3qNWa4Bbgq6NKs343cUGYrhyUzHUDAU-hC23aeCIog,2058 -pyasn1_modules/rfc1155.py,sha256=9xUfGI35hFQ3OF4UxGd3V_B0DMflGfnLqjjmlEngDqs,2683 -pyasn1_modules/rfc1157.py,sha256=GnLq_jcPLdUHREomh7HmsT9ZyNnPDw4NLEEFwppCyJQ,3554 -pyasn1_modules/rfc1901.py,sha256=Uq8zJ4HdCClnV0du14_hF3ggNdHAM-heaxuz23cwoOQ,646 -pyasn1_modules/rfc1902.py,sha256=JUYq7hBfik2w-_Ju17hpe_j00QKAZEguwe20BK8fC9I,3705 -pyasn1_modules/rfc1905.py,sha256=qTqjTF4L4Wz4svQADIjliqyqPs0mnD8GDqhgngJWdN0,4831 -pyasn1_modules/rfc2251.py,sha256=hBEhoVDvkNxSQY6zeX6WxpKL1i2vqrIPZ5Jmvl5KC7M,26931 -pyasn1_modules/rfc2314.py,sha256=r9tTFaflcmVtQVTDoDo_-OYk5W98gD4NglZX78j3a6M,1313 -pyasn1_modules/rfc2315.py,sha256=ZfyNEbwHz-q0y3twhBZlogIeWNrP_lcBUAzIfcXYGUo,9666 -pyasn1_modules/rfc2437.py,sha256=9l6YNwD0BUrdhmg5NAs_K3PqrwfJVDR-5N9_EjeRRk4,2623 -pyasn1_modules/rfc2459.py,sha256=TYZuSTbv868F5dXKi83H-ShqCwy7SQIyUAMBWVDgc2Q,50002 -pyasn1_modules/rfc2511.py,sha256=S6Bggb2UR45IRdSNVdWFVfedsa1Om2VoZILlY-oL6QU,10350 -pyasn1_modules/rfc2560.py,sha256=QfVWkw4GJXKVsjDUPh9ORF2kpi5XQTLlZdIB677qvv8,8406 -pyasn1_modules/rfc2631.py,sha256=Het4nHPVFj6oElpEANYkKQuincUa0ms5SOt94Ph8jhs,1219 -pyasn1_modules/rfc2634.py,sha256=7sTu3YysbHImknLk7CbdQIjJjt6cC849-XqkuEDgFPk,9425 -pyasn1_modules/rfc2985.py,sha256=8GL8jkWGpN1t7sVaEtyhVgfCM80XhlYOUEi9jhcAX0E,14359 -pyasn1_modules/rfc2986.py,sha256=sjlXnV2fnyaYqZjgepsneTqXiwk2N0mrdExEuEHp92I,1896 -pyasn1_modules/rfc3114.py,sha256=02eDCK2blUNybTaGX85vxGfCTnzHXXa9BP9IaVVocK8,1961 -pyasn1_modules/rfc3161.py,sha256=9kz_TvQ5_OpBPuHQDAh2WyqKeOThgxPq8E5iBB-sNp8,4260 -pyasn1_modules/rfc3274.py,sha256=ZULbMN3wksvv_fWvT_C1vskxuh_IzRCAD9QD1hdk-lo,1670 -pyasn1_modules/rfc3279.py,sha256=uRaWfvIw4WXBoJN9gcAhsW8MTDymGoa-FrrC2k033TI,6807 -pyasn1_modules/rfc3280.py,sha256=nra0JN8HEPg3XorP-ry8H1Wb7xiG81VBGSFmKFCEldU,46620 -pyasn1_modules/rfc3281.py,sha256=s0MV7DaVXhap8bIeKqCbjmrwrMytxBTFPFl2TD21g6Y,9866 -pyasn1_modules/rfc3412.py,sha256=_PQEwCmLcxlNlflAv-xQbfwTr_Fks7FvmBfCGQIF3ME,1956 -pyasn1_modules/rfc3414.py,sha256=lbn5t4ycmhbg6smNvpZwcX3L1VaU0ns3VYplyHCyVc0,1167 -pyasn1_modules/rfc3447.py,sha256=c5KidhoTIibl1nvqvEIbBSBFmbQcAns75GDpFwMHUhM,1605 -pyasn1_modules/rfc3560.py,sha256=3Ud7sY7OAV_4KGNn_hg5xZblEkxE_ILH1kP2TI-KbZw,1818 -pyasn1_modules/rfc3565.py,sha256=nRephcXY7ioG5I4iaT6mSQYGwaouRQXoMnp2kFQQOE0,1438 -pyasn1_modules/rfc3709.py,sha256=KAaG7SKTT9Ef-Kza5Zn_qXkZppul8Wt8MPSkzS4qs5o,6469 -pyasn1_modules/rfc3770.py,sha256=ue0Qaiys8J86M-8EtLNrcfuXm87Mr2GQ4f30lSs0vXE,1743 -pyasn1_modules/rfc3779.py,sha256=x8HYKGCaGO3BohCREHQUEa1oYGArWIC2J0PftxiPrjI,3260 -pyasn1_modules/rfc3852.py,sha256=Ekx1BOSu7Bsg1IFO96uDZ4iGCGzu-r5n0KPwvxT18BY,20101 -pyasn1_modules/rfc4043.py,sha256=OWPgVzfK3Hs5sNQJSqUBkInhgikv-x15-xLSg30xwNE,1067 -pyasn1_modules/rfc4055.py,sha256=f2rlyaBeNhl287b_qLLsNpjgwxYRVzBgbOH28UnJZwQ,10392 -pyasn1_modules/rfc4073.py,sha256=bHVssQE3yXwetes1TPWAT30UhOEinHj8vEBaYjWC24g,1636 -pyasn1_modules/rfc4108.py,sha256=-I63Z0crn_Elvr85nSa9BqAlRx7cIJfEb9ItPDkq8JY,10598 -pyasn1_modules/rfc4210.py,sha256=PmJyGAnQGbG3H0Jzo4G4MfIg4kk7Ebd4CTKA0jYGynw,28469 -pyasn1_modules/rfc4211.py,sha256=W2YVMJWUEsRNGvdEmf4Ktoav5mwHfDhJyaPsCso9hFA,12110 -pyasn1_modules/rfc4334.py,sha256=Q-fcYksrunAo1t07HE2jm5WlQgFAf5o39utpel0ZjcI,1586 -pyasn1_modules/rfc4985.py,sha256=oWCBG3tknFLUJOeG4aKF7JrkA4qMjPyJFGTnf7xmPd8,961 -pyasn1_modules/rfc5035.py,sha256=xgw9ztAM_bJKlIUCzni2zcE_z3ErEuXpWRPJpXI1KEw,4523 -pyasn1_modules/rfc5083.py,sha256=ENXIEL0CYrTqvf_iwpvAkBBJpi2pOFNBDFEYc37yqF8,1888 -pyasn1_modules/rfc5084.py,sha256=i9sFdUklbdTQodTya4BNFnpeFxGIB2uS1aNkfFdZpu4,2855 -pyasn1_modules/rfc5208.py,sha256=O2ZDYy-lqXeQcK_9gryuvm71TUnzIF7yL8j_LrIBEQc,1432 -pyasn1_modules/rfc5280.py,sha256=GweilWgd70v1Z0YovOSU3Bnu7vvu4rMscgE6WhksBkg,51236 -pyasn1_modules/rfc5480.py,sha256=GzBTgKQ68V-L-Qy0SBrCQMgqR5mGF7U73uXlBzfV2Jk,4834 -pyasn1_modules/rfc5649.py,sha256=3A--LQL7iw8DGXSDyiSUeh6wwFPKQQGyVY94mNzY0Ek,830 -pyasn1_modules/rfc5652.py,sha256=jmL6fOHqTAQvceW9mtkAZpOaxkemRjWDrdpL4pglQkk,21451 -pyasn1_modules/rfc5751.py,sha256=M8kTLARhdqh3UqmlZv_FWJfuJb-ph7P6MVGxSP7Q4wQ,3198 -pyasn1_modules/rfc5755.py,sha256=RZ28NeCnEAGr2pLRSNFw0BRb_b_eulmxag-lRTmUeTo,12081 -pyasn1_modules/rfc5913.py,sha256=OayMmpi29ZlQI1EszIxXaU8Mhwi41BrH5esoyS80efQ,1161 -pyasn1_modules/rfc5914.py,sha256=nXOb4SvESbEFYI8h0nEYkRArNZ9w5Zqxva_4uAdMXNY,3714 -pyasn1_modules/rfc5915.py,sha256=VqMRd_Ksm0LFvE5XX4_MO6BdFG7Ch7NdQcwT_DMWAK4,1056 -pyasn1_modules/rfc5916.py,sha256=gHrFO9lX21h6Wa3JnEqyjuqXQlcTE0loUIu913Sit0E,800 -pyasn1_modules/rfc5917.py,sha256=nM08rGm9D3O8uqSbmshvp7_fHl2dYaTdhUGVJQHe0xc,1511 -pyasn1_modules/rfc5924.py,sha256=_8TqEJ9Q7cFSd2u3Za6rzlNPqGLl7IA4oHtYVpoJhdA,425 -pyasn1_modules/rfc5934.py,sha256=77z96SeP4iM2R6Rl5-Vx7OaENA8ZQvzrfhDVZRy9lqk,23798 -pyasn1_modules/rfc5940.py,sha256=66rMmgyKBhay-RZsWaKz7PUGwp0bqEAVULPb4Edk1vk,1613 -pyasn1_modules/rfc5958.py,sha256=NZPx-7FvjzgErz2lTURiRq8m3XCZ7D9QbGDhtIF-zCE,2650 -pyasn1_modules/rfc5990.py,sha256=-b0St64ba3LVRGSeNmbGoMIbkU8c8FDpo4zFWF0PCFM,5505 -pyasn1_modules/rfc6010.py,sha256=F43AYVFUwu-2_xjJE2Wmw1Wdt0K7l3vg0_fCa_QHqBU,2347 -pyasn1_modules/rfc6019.py,sha256=vzj5tfG4694-ucpErpAtE1DVOE4-v0dkN894Zr9xm4o,1086 -pyasn1_modules/rfc6031.py,sha256=X2cjNyVnrX3G2zG7kD4Rq__kF6-ftmmnqHlCQJDCuMU,12137 -pyasn1_modules/rfc6032.py,sha256=uNAu5zLHg0b583xxzFNUZxCnJaCzMw1iobzREuejMoM,1950 -pyasn1_modules/rfc6120.py,sha256=JehGZD8Y0Bdhr_ojpMSjHgnRHEdUXauZxqLxRwns6Cc,818 -pyasn1_modules/rfc6170.py,sha256=sL2yPZzO--MI4ToeAwlFEP-x6I0-etuJxT2mgAPjEO4,409 -pyasn1_modules/rfc6187.py,sha256=jOMiIhw4HAUn7hj37gKImNU_hK8TamAfd0V0Jrwh_YU,489 -pyasn1_modules/rfc6210.py,sha256=wLifK_EShv1a4TOhGJ-k9zA1kVVYVDNjS-Rh0ohmCh0,1052 -pyasn1_modules/rfc6211.py,sha256=XotTBQVseK7y0nJB4Fx-npdhRHeH53IM84kGupWIprk,2257 -pyasn1_modules/rfc6402-1.py,sha256=F2t7dYFdqYQ_PiG9JoUlNMcRvIghrbJPoNgdjcKGSuc,17049 -pyasn1_modules/rfc6402.py,sha256=0ciItKf7voeSCTZl1kKYd6gyQ68IZzwMe1-fj16etKs,17148 -pyasn1_modules/rfc6482.py,sha256=10_Xyb2TaPFx72IUCZtu81aH5rmYihhdL0P-PVby1ys,2085 -pyasn1_modules/rfc6486.py,sha256=a3_5OJvkz2G7xWOC0dqbNqJQDsHQAOU62AWin107c4k,1916 -pyasn1_modules/rfc6487.py,sha256=gTUVkFYJyUcr1E4uoeN2cXPNaXyjYbixupbBKFQA4jQ,472 -pyasn1_modules/rfc6664.py,sha256=nq8F5wDeO49FoBGVQDx8ivvg_GsubdWa1bpZM_40Tms,4270 -pyasn1_modules/rfc6955.py,sha256=FBVb8LpHKMZjR3wOJtm-BPbi5EMiRoGuUWh41r1soCU,2814 -pyasn1_modules/rfc6960.py,sha256=BhEDCLLrae4RaCpMuKJc0kw1bGs56V0_F-NxiO9ctuw,7913 -pyasn1_modules/rfc7030.py,sha256=t-s2BDyX3Zk2sy_jMQl-P2I2NXFOn7huu0wFcM-2sqs,1441 -pyasn1_modules/rfc7191.py,sha256=uMsBzJ9167wxsiPYDQUnZQFVFNfgUxnCwRNeKnXxNGM,7062 -pyasn1_modules/rfc7229.py,sha256=GSiUz4QkYODfnIvLRXKiabyno9Gmd6CX0zWR7HoIpCk,743 -pyasn1_modules/rfc7292.py,sha256=wORjDGD_aqHoujB2wu6nNrEjYTw3VO_xDp-Qx0VWLbc,8478 -pyasn1_modules/rfc7296.py,sha256=eAZpZ2dgUhxbJrLLGtDff4UspauG7Tr5dj8WELYHnUM,885 -pyasn1_modules/rfc7508.py,sha256=ZmJFbQO934Fs8wxcpO0gg5fU0d8yEFlkkFD3KMUQbAE,2182 -pyasn1_modules/rfc7585.py,sha256=T0-sdzPJoop1jbB2RJ-wzUnf6t6CeD2eMMXpcz55JEg,1076 -pyasn1_modules/rfc7633.py,sha256=8P_fBWkoGk3rsk7SEAm6QZcPjoRGTRGQuasWMLOrLKY,841 -pyasn1_modules/rfc7773.py,sha256=6UGPWyVYuicKe6snZCnD1wuAu1MOVgzPoSALL2uvTrI,1315 -pyasn1_modules/rfc7894-1.py,sha256=gTmuu4C3BxGdhbZDuWPix84Cm2z0HqaounDjm2bBpXo,2792 -pyasn1_modules/rfc7894.py,sha256=HLaSBoOUB-_cSE5935TXAnuFBVpZBv6jBnLOPp_-LNk,2769 -pyasn1_modules/rfc7906.py,sha256=mDf1pWwVNlCcEQfswUhtQDStAnwS-5xbZtjMlfnWLdI,18921 -pyasn1_modules/rfc7914.py,sha256=JxWGnXV-V13xzOn7c7-_3vxDNpkPtdZIYU4KF2kFXR4,1493 -pyasn1_modules/rfc8017.py,sha256=pwPRSchvMtXuatcCLULHuvSL8kAPEqkC4aIJjd5vEAo,4178 -pyasn1_modules/rfc8018.py,sha256=8_49xA3vEOdlGUhasw2xTUv4TpHBvjRuoonMT_k1TTk,6166 -pyasn1_modules/rfc8103.py,sha256=pNYAFfKCNrg9ZmRKsNNwr2ooptEABF3gMaPbqCroRnQ,1017 -pyasn1_modules/rfc8209.py,sha256=9EQ077rjD9uoTZWIOGmeOaHLDDq0IRXh3Rt0eYB-Ysc,393 -pyasn1_modules/rfc8226.py,sha256=mudlVgrsJ6XeHnFmxBNW_NgcYcFsHUvK04_MTr3UkRM,4291 -pyasn1_modules/rfc8358.py,sha256=aiHaXQAaaP-q5c90x_uZHSpQRTB-yekwhe6V9-EtrFg,1136 -pyasn1_modules/rfc8360.py,sha256=T4sY6o2VLVPnZ9s4yJ8PzfVA8Y60ne-1KcVNtw5yt-s,1075 -pyasn1_modules/rfc8398.py,sha256=i3lwgf__9oJzOaaHJKWmDAx3d_deKNCCuvIDWqQWiJ4,1192 -pyasn1_modules/rfc8410.py,sha256=nteKyTKcIwVlgh1qUl-8kE63kKG-KgWtLrfF92TWyyQ,971 -pyasn1_modules/rfc8418.py,sha256=eTCPTOm6t-RyHd6PlowLogDzUO72lRddESYLiSiOpC0,1109 -pyasn1_modules/rfc8419.py,sha256=qcvBlXxqvsCvG_F6AKKjqBderqbWwBy8zjZOjAPdYU4,1704 -pyasn1_modules/rfc8479.py,sha256=rDKzrp-MmEF0t3E7lqKXhgwcggvx8NoWVbtJHGLxDYM,1142 -pyasn1_modules/rfc8494.py,sha256=GMht1RdAbjHLtSqHdJ2cLO8HXRz6SLIPE254T4oy0S4,2363 -pyasn1_modules/rfc8520.py,sha256=_o00lv2MYciOqo0UKjlZBQNY_MzzgQt1SV9VXCI0T9A,1496 -pyasn1_modules/rfc8619.py,sha256=qSYiBefLSFukLg6VIgR6dnhX-uBwJMItxqHjNXnBgM0,1136 -pyasn1_modules/rfc8649.py,sha256=oHCQK7g4vKs1B0IO9GgiidTyPOk4pz5bYkXSRmBOAHo,982 -pyasn1_modules-0.2.8.dist-info/LICENSE.txt,sha256=IsXMaSKrXWn7oy2MXuTN0UmBUIy1OvwOvYVZOEf9laU,1334 -pyasn1_modules-0.2.8.dist-info/METADATA,sha256=PEBnqiw2gqgX8JBjpbgffFX8MaJHjfU3fOzJvrjjaY8,1852 -pyasn1_modules-0.2.8.dist-info/WHEEL,sha256=8zNYZbwQSXoB9IfXOjPfeNwvAsALAjffgk27FqvCWbo,110 -pyasn1_modules-0.2.8.dist-info/top_level.txt,sha256=e_AojfE1DNY4M8P9LAS7qh8Fx3eOmovobqkr7NEjlg4,15 -pyasn1_modules-0.2.8.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 -pyasn1_modules-0.2.8.dist-info/RECORD,, diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc2631.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc2631.py deleted file mode 100644 index 44e537101c43..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc2631.py +++ /dev/null @@ -1,37 +0,0 @@ -# -# This file is part of pyasn1-modules software. -# -# Created by Russ Housley with assistance from asn1ate v.0.6.0. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# Diffie-Hellman Key Agreement -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc2631.txt -# https://www.rfc-editor.org/errata/eid5897 -# - -from pyasn1.type import constraint -from pyasn1.type import namedtype -from pyasn1.type import tag -from pyasn1.type import univ - - -class KeySpecificInfo(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('algorithm', univ.ObjectIdentifier()), - namedtype.NamedType('counter', univ.OctetString().subtype( - subtypeSpec=constraint.ValueSizeConstraint(4, 4))) - ) - - -class OtherInfo(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('keyInfo', KeySpecificInfo()), - namedtype.OptionalNamedType('partyAInfo', univ.OctetString().subtype( - explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), - namedtype.NamedType('suppPubInfo', univ.OctetString().subtype( - explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))) - ) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc2634.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc2634.py deleted file mode 100644 index 2099a4b206ef..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc2634.py +++ /dev/null @@ -1,336 +0,0 @@ -# -# This file is part of pyasn1-modules software. -# -# Created by Russ Housley with assistance from asn1ate v.0.6.0. -# Modified by Russ Housley to add a map for use with opentypes. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# Enhanced Security Services for S/MIME -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc2634.txt -# - -from pyasn1.type import char -from pyasn1.type import constraint -from pyasn1.type import namedval -from pyasn1.type import namedtype -from pyasn1.type import tag -from pyasn1.type import univ -from pyasn1.type import useful - -from pyasn1_modules import rfc5652 -from pyasn1_modules import rfc5280 - -MAX = float('inf') - -ContentType = rfc5652.ContentType - -IssuerAndSerialNumber = rfc5652.IssuerAndSerialNumber - -SubjectKeyIdentifier = rfc5652.SubjectKeyIdentifier - -PolicyInformation = rfc5280.PolicyInformation - -GeneralNames = rfc5280.GeneralNames - -CertificateSerialNumber = rfc5280.CertificateSerialNumber - - -# Signing Certificate Attribute -# Warning: It is better to use SigningCertificateV2 from RFC 5035 - -id_aa_signingCertificate = univ.ObjectIdentifier('1.2.840.113549.1.9.16.2.12') - -class Hash(univ.OctetString): - pass # SHA-1 hash of entire certificate; RFC 5035 supports other hash algorithms - - -class IssuerSerial(univ.Sequence): - pass - -IssuerSerial.componentType = namedtype.NamedTypes( - namedtype.NamedType('issuer', GeneralNames()), - namedtype.NamedType('serialNumber', CertificateSerialNumber()) -) - - -class ESSCertID(univ.Sequence): - pass - -ESSCertID.componentType = namedtype.NamedTypes( - namedtype.NamedType('certHash', Hash()), - namedtype.OptionalNamedType('issuerSerial', IssuerSerial()) -) - - -class SigningCertificate(univ.Sequence): - pass - -SigningCertificate.componentType = namedtype.NamedTypes( - namedtype.NamedType('certs', univ.SequenceOf( - componentType=ESSCertID())), - namedtype.OptionalNamedType('policies', univ.SequenceOf( - componentType=PolicyInformation())) -) - - -# Mail List Expansion History Attribute - -id_aa_mlExpandHistory = univ.ObjectIdentifier('1.2.840.113549.1.9.16.2.3') - -ub_ml_expansion_history = univ.Integer(64) - - -class EntityIdentifier(univ.Choice): - pass - -EntityIdentifier.componentType = namedtype.NamedTypes( - namedtype.NamedType('issuerAndSerialNumber', IssuerAndSerialNumber()), - namedtype.NamedType('subjectKeyIdentifier', SubjectKeyIdentifier()) -) - - -class MLReceiptPolicy(univ.Choice): - pass - -MLReceiptPolicy.componentType = namedtype.NamedTypes( - namedtype.NamedType('none', univ.Null().subtype(implicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatSimple, 0))), - namedtype.NamedType('insteadOf', univ.SequenceOf( - componentType=GeneralNames()).subtype( - sizeSpec=constraint.ValueSizeConstraint(1, MAX)).subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), - namedtype.NamedType('inAdditionTo', univ.SequenceOf( - componentType=GeneralNames()).subtype( - sizeSpec=constraint.ValueSizeConstraint(1, MAX)).subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))) -) - - -class MLData(univ.Sequence): - pass - -MLData.componentType = namedtype.NamedTypes( - namedtype.NamedType('mailListIdentifier', EntityIdentifier()), - namedtype.NamedType('expansionTime', useful.GeneralizedTime()), - namedtype.OptionalNamedType('mlReceiptPolicy', MLReceiptPolicy()) -) - -class MLExpansionHistory(univ.SequenceOf): - pass - -MLExpansionHistory.componentType = MLData() -MLExpansionHistory.sizeSpec = constraint.ValueSizeConstraint(1, ub_ml_expansion_history) - - -# ESS Security Label Attribute - -id_aa_securityLabel = univ.ObjectIdentifier('1.2.840.113549.1.9.16.2.2') - -ub_privacy_mark_length = univ.Integer(128) - -ub_security_categories = univ.Integer(64) - -ub_integer_options = univ.Integer(256) - - -class ESSPrivacyMark(univ.Choice): - pass - -ESSPrivacyMark.componentType = namedtype.NamedTypes( - namedtype.NamedType('pString', char.PrintableString().subtype( - subtypeSpec=constraint.ValueSizeConstraint(1, ub_privacy_mark_length))), - namedtype.NamedType('utf8String', char.UTF8String().subtype( - subtypeSpec=constraint.ValueSizeConstraint(1, MAX))) -) - - -class SecurityClassification(univ.Integer): - pass - -SecurityClassification.subtypeSpec=constraint.ValueRangeConstraint(0, ub_integer_options) - -SecurityClassification.namedValues = namedval.NamedValues( - ('unmarked', 0), - ('unclassified', 1), - ('restricted', 2), - ('confidential', 3), - ('secret', 4), - ('top-secret', 5) -) - - -class SecurityPolicyIdentifier(univ.ObjectIdentifier): - pass - - -class SecurityCategory(univ.Sequence): - pass - -SecurityCategory.componentType = namedtype.NamedTypes( - namedtype.NamedType('type', univ.ObjectIdentifier().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), - namedtype.NamedType('value', univ.Any().subtype(implicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatSimple, 1))) -) - - -class SecurityCategories(univ.SetOf): - pass - -SecurityCategories.componentType = SecurityCategory() -SecurityCategories.sizeSpec = constraint.ValueSizeConstraint(1, ub_security_categories) - - -class ESSSecurityLabel(univ.Set): - pass - -ESSSecurityLabel.componentType = namedtype.NamedTypes( - namedtype.NamedType('security-policy-identifier', SecurityPolicyIdentifier()), - namedtype.OptionalNamedType('security-classification', SecurityClassification()), - namedtype.OptionalNamedType('privacy-mark', ESSPrivacyMark()), - namedtype.OptionalNamedType('security-categories', SecurityCategories()) -) - - -# Equivalent Labels Attribute - -id_aa_equivalentLabels = univ.ObjectIdentifier('1.2.840.113549.1.9.16.2.9') - -class EquivalentLabels(univ.SequenceOf): - pass - -EquivalentLabels.componentType = ESSSecurityLabel() - - -# Content Identifier Attribute - -id_aa_contentIdentifier = univ.ObjectIdentifier('1.2.840.113549.1.9.16.2.7') - -class ContentIdentifier(univ.OctetString): - pass - - -# Content Reference Attribute - -id_aa_contentReference = univ.ObjectIdentifier('1.2.840.113549.1.9.16.2.10') - -class ContentReference(univ.Sequence): - pass - -ContentReference.componentType = namedtype.NamedTypes( - namedtype.NamedType('contentType', ContentType()), - namedtype.NamedType('signedContentIdentifier', ContentIdentifier()), - namedtype.NamedType('originatorSignatureValue', univ.OctetString()) -) - - -# Message Signature Digest Attribute - -id_aa_msgSigDigest = univ.ObjectIdentifier('1.2.840.113549.1.9.16.2.5') - -class MsgSigDigest(univ.OctetString): - pass - - -# Content Hints Attribute - -id_aa_contentHint = univ.ObjectIdentifier('1.2.840.113549.1.9.16.2.4') - -class ContentHints(univ.Sequence): - pass - -ContentHints.componentType = namedtype.NamedTypes( - namedtype.OptionalNamedType('contentDescription', char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX))), - namedtype.NamedType('contentType', ContentType()) -) - - -# Receipt Request Attribute - -class AllOrFirstTier(univ.Integer): - pass - -AllOrFirstTier.namedValues = namedval.NamedValues( - ('allReceipts', 0), - ('firstTierRecipients', 1) -) - - -class ReceiptsFrom(univ.Choice): - pass - -ReceiptsFrom.componentType = namedtype.NamedTypes( - namedtype.NamedType('allOrFirstTier', AllOrFirstTier().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), - namedtype.NamedType('receiptList', univ.SequenceOf( - componentType=GeneralNames()).subtype(implicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatSimple, 1))) -) - - -id_aa_receiptRequest = univ.ObjectIdentifier('1.2.840.113549.1.9.16.2.1') - -ub_receiptsTo = univ.Integer(16) - -class ReceiptRequest(univ.Sequence): - pass - -ReceiptRequest.componentType = namedtype.NamedTypes( - namedtype.NamedType('signedContentIdentifier', ContentIdentifier()), - namedtype.NamedType('receiptsFrom', ReceiptsFrom()), - namedtype.NamedType('receiptsTo', univ.SequenceOf(componentType=GeneralNames()).subtype(sizeSpec=constraint.ValueSizeConstraint(1, ub_receiptsTo))) -) - -# Receipt Content Type - -class ESSVersion(univ.Integer): - pass - -ESSVersion.namedValues = namedval.NamedValues( - ('v1', 1) -) - - -id_ct_receipt = univ.ObjectIdentifier('1.2.840.113549.1.9.16.1.1') - -class Receipt(univ.Sequence): - pass - -Receipt.componentType = namedtype.NamedTypes( - namedtype.NamedType('version', ESSVersion()), - namedtype.NamedType('contentType', ContentType()), - namedtype.NamedType('signedContentIdentifier', ContentIdentifier()), - namedtype.NamedType('originatorSignatureValue', univ.OctetString()) -) - - -# Map of Attribute Type to the Attribute structure is added to the -# ones that are in rfc5652.py - -_cmsAttributesMapUpdate = { - id_aa_signingCertificate: SigningCertificate(), - id_aa_mlExpandHistory: MLExpansionHistory(), - id_aa_securityLabel: ESSSecurityLabel(), - id_aa_equivalentLabels: EquivalentLabels(), - id_aa_contentIdentifier: ContentIdentifier(), - id_aa_contentReference: ContentReference(), - id_aa_msgSigDigest: MsgSigDigest(), - id_aa_contentHint: ContentHints(), - id_aa_receiptRequest: ReceiptRequest(), -} - -rfc5652.cmsAttributesMap.update(_cmsAttributesMapUpdate) - - -# Map of Content Type OIDs to Content Types is added to the -# ones that are in rfc5652.py - -_cmsContentTypesMapUpdate = { - id_ct_receipt: Receipt(), -} - -rfc5652.cmsContentTypesMap.update(_cmsContentTypesMapUpdate) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc2985.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc2985.py deleted file mode 100644 index 75bccf097dcd..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc2985.py +++ /dev/null @@ -1,588 +0,0 @@ -# -# This file is part of pyasn1-modules software. -# -# Created by Russ Housley with assistance from asn1ate v.0.6.0. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# PKCS#9: Selected Attribute Types (Version 2.0) -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc2985.txt -# - -from pyasn1.type import char -from pyasn1.type import constraint -from pyasn1.type import namedtype -from pyasn1.type import namedval -from pyasn1.type import opentype -from pyasn1.type import tag -from pyasn1.type import univ -from pyasn1.type import useful - -from pyasn1_modules import rfc7292 -from pyasn1_modules import rfc5958 -from pyasn1_modules import rfc5652 -from pyasn1_modules import rfc5280 - - -def _OID(*components): - output = [] - for x in tuple(components): - if isinstance(x, univ.ObjectIdentifier): - output.extend(list(x)) - else: - output.append(int(x)) - - return univ.ObjectIdentifier(output) - - -MAX = float('inf') - - -# Imports from RFC 5280 - -AlgorithmIdentifier = rfc5280.AlgorithmIdentifier - -Attribute = rfc5280.Attribute - -EmailAddress = rfc5280.EmailAddress - -Extensions = rfc5280.Extensions - -Time = rfc5280.Time - -X520countryName = rfc5280.X520countryName - -X520SerialNumber = rfc5280.X520SerialNumber - - -# Imports from RFC 5652 - -ContentInfo = rfc5652.ContentInfo - -ContentType = rfc5652.ContentType - -Countersignature = rfc5652.Countersignature - -MessageDigest = rfc5652.MessageDigest - -SignerInfo = rfc5652.SignerInfo - -SigningTime = rfc5652.SigningTime - - -# Imports from RFC 5958 - -EncryptedPrivateKeyInfo = rfc5958.EncryptedPrivateKeyInfo - - -# Imports from RFC 7292 - -PFX = rfc7292.PFX - - -# TODO: -# Need a place to import PKCS15Token; it does not yet appear in an RFC - - -# SingleAttribute is the same as Attribute in RFC 5280, except that the -# attrValues SET must have one and only one member - -class AttributeType(univ.ObjectIdentifier): - pass - - -class AttributeValue(univ.Any): - pass - - -class AttributeValues(univ.SetOf): - pass - -AttributeValues.componentType = AttributeValue() - - -class SingleAttributeValues(univ.SetOf): - pass - -SingleAttributeValues.componentType = AttributeValue() - - -class SingleAttribute(univ.Sequence): - pass - -SingleAttribute.componentType = namedtype.NamedTypes( - namedtype.NamedType('type', AttributeType()), - namedtype.NamedType('values', - AttributeValues().subtype(sizeSpec=constraint.ValueSizeConstraint(1, 1)), - openType=opentype.OpenType('type', rfc5280.certificateAttributesMap) - ) -) - - -# CMSAttribute is the same as Attribute in RFC 5652, and CMSSingleAttribute -# is the companion where the attrValues SET must have one and only one member - -CMSAttribute = rfc5652.Attribute - - -class CMSSingleAttribute(univ.Sequence): - pass - -CMSSingleAttribute.componentType = namedtype.NamedTypes( - namedtype.NamedType('attrType', AttributeType()), - namedtype.NamedType('attrValues', - AttributeValues().subtype(sizeSpec=constraint.ValueSizeConstraint(1, 1)), - openType=opentype.OpenType('attrType', rfc5652.cmsAttributesMap) - ) -) - - -# DirectoryString is the same as RFC 5280, except the length is limited to 255 - -class DirectoryString(univ.Choice): - pass - -DirectoryString.componentType = namedtype.NamedTypes( - namedtype.NamedType('teletexString', char.TeletexString().subtype( - subtypeSpec=constraint.ValueSizeConstraint(1, 255))), - namedtype.NamedType('printableString', char.PrintableString().subtype( - subtypeSpec=constraint.ValueSizeConstraint(1, 255))), - namedtype.NamedType('universalString', char.UniversalString().subtype( - subtypeSpec=constraint.ValueSizeConstraint(1, 255))), - namedtype.NamedType('utf8String', char.UTF8String().subtype( - subtypeSpec=constraint.ValueSizeConstraint(1, 255))), - namedtype.NamedType('bmpString', char.BMPString().subtype( - subtypeSpec=constraint.ValueSizeConstraint(1, 255))) -) - - -# PKCS9String is DirectoryString with an additional choice of IA5String, -# and the SIZE is limited to 255 - -class PKCS9String(univ.Choice): - pass - -PKCS9String.componentType = namedtype.NamedTypes( - namedtype.NamedType('ia5String', char.IA5String().subtype( - subtypeSpec=constraint.ValueSizeConstraint(1, 255))), - namedtype.NamedType('directoryString', DirectoryString()) -) - - -# Upper Bounds - -pkcs_9_ub_pkcs9String = univ.Integer(255) - -pkcs_9_ub_challengePassword = univ.Integer(pkcs_9_ub_pkcs9String) - -pkcs_9_ub_emailAddress = univ.Integer(pkcs_9_ub_pkcs9String) - -pkcs_9_ub_friendlyName = univ.Integer(pkcs_9_ub_pkcs9String) - -pkcs_9_ub_match = univ.Integer(pkcs_9_ub_pkcs9String) - -pkcs_9_ub_signingDescription = univ.Integer(pkcs_9_ub_pkcs9String) - -pkcs_9_ub_unstructuredAddress = univ.Integer(pkcs_9_ub_pkcs9String) - -pkcs_9_ub_unstructuredName = univ.Integer(pkcs_9_ub_pkcs9String) - - -ub_name = univ.Integer(32768) - -pkcs_9_ub_placeOfBirth = univ.Integer(ub_name) - -pkcs_9_ub_pseudonym = univ.Integer(ub_name) - - -# Object Identifier Arcs - -ietf_at = _OID(1, 3, 6, 1, 5, 5, 7, 9) - -id_at = _OID(2, 5, 4) - -pkcs_9 = _OID(1, 2, 840, 113549, 1, 9) - -pkcs_9_mo = _OID(pkcs_9, 0) - -smime = _OID(pkcs_9, 16) - -certTypes = _OID(pkcs_9, 22) - -crlTypes = _OID(pkcs_9, 23) - -pkcs_9_oc = _OID(pkcs_9, 24) - -pkcs_9_at = _OID(pkcs_9, 25) - -pkcs_9_sx = _OID(pkcs_9, 26) - -pkcs_9_mr = _OID(pkcs_9, 27) - - -# Object Identifiers for Syntaxes for use with LDAP-accessible directories - -pkcs_9_sx_pkcs9String = _OID(pkcs_9_sx, 1) - -pkcs_9_sx_signingTime = _OID(pkcs_9_sx, 2) - - -# Object Identifiers for object classes - -pkcs_9_oc_pkcsEntity = _OID(pkcs_9_oc, 1) - -pkcs_9_oc_naturalPerson = _OID(pkcs_9_oc, 2) - - -# Object Identifiers for matching rules - -pkcs_9_mr_caseIgnoreMatch = _OID(pkcs_9_mr, 1) - -pkcs_9_mr_signingTimeMatch = _OID(pkcs_9_mr, 2) - - -# PKCS #7 PDU - -pkcs_9_at_pkcs7PDU = _OID(pkcs_9_at, 5) - -pKCS7PDU = Attribute() -pKCS7PDU['type'] = pkcs_9_at_pkcs7PDU -pKCS7PDU['values'][0] = ContentInfo() - - -# PKCS #12 token - -pkcs_9_at_userPKCS12 = _OID(2, 16, 840, 1, 113730, 3, 1, 216) - -userPKCS12 = Attribute() -userPKCS12['type'] = pkcs_9_at_userPKCS12 -userPKCS12['values'][0] = PFX() - - -# PKCS #15 token - -pkcs_9_at_pkcs15Token = _OID(pkcs_9_at, 1) - -# TODO: Once PKCS15Token can be imported, this can be included -# -# pKCS15Token = Attribute() -# userPKCS12['type'] = pkcs_9_at_pkcs15Token -# userPKCS12['values'][0] = PKCS15Token() - - -# PKCS #8 encrypted private key information - -pkcs_9_at_encryptedPrivateKeyInfo = _OID(pkcs_9_at, 2) - -encryptedPrivateKeyInfo = Attribute() -encryptedPrivateKeyInfo['type'] = pkcs_9_at_encryptedPrivateKeyInfo -encryptedPrivateKeyInfo['values'][0] = EncryptedPrivateKeyInfo() - - -# Electronic-mail address - -pkcs_9_at_emailAddress = rfc5280.id_emailAddress - -emailAddress = Attribute() -emailAddress['type'] = pkcs_9_at_emailAddress -emailAddress['values'][0] = EmailAddress() - - -# Unstructured name - -pkcs_9_at_unstructuredName = _OID(pkcs_9, 2) - -unstructuredName = Attribute() -unstructuredName['type'] = pkcs_9_at_unstructuredName -unstructuredName['values'][0] = PKCS9String() - - -# Unstructured address - -pkcs_9_at_unstructuredAddress = _OID(pkcs_9, 8) - -unstructuredAddress = Attribute() -unstructuredAddress['type'] = pkcs_9_at_unstructuredAddress -unstructuredAddress['values'][0] = DirectoryString() - - -# Date of birth - -pkcs_9_at_dateOfBirth = _OID(ietf_at, 1) - -dateOfBirth = SingleAttribute() -dateOfBirth['type'] = pkcs_9_at_dateOfBirth -dateOfBirth['values'][0] = useful.GeneralizedTime() - - -# Place of birth - -pkcs_9_at_placeOfBirth = _OID(ietf_at, 2) - -placeOfBirth = SingleAttribute() -placeOfBirth['type'] = pkcs_9_at_placeOfBirth -placeOfBirth['values'][0] = DirectoryString() - - -# Gender - -class GenderString(char.PrintableString): - pass - -GenderString.subtypeSpec = constraint.ValueSizeConstraint(1, 1) -GenderString.subtypeSpec = constraint.SingleValueConstraint("M", "F", "m", "f") - - -pkcs_9_at_gender = _OID(ietf_at, 3) - -gender = SingleAttribute() -gender['type'] = pkcs_9_at_gender -gender['values'][0] = GenderString() - - -# Country of citizenship - -pkcs_9_at_countryOfCitizenship = _OID(ietf_at, 4) - -countryOfCitizenship = Attribute() -countryOfCitizenship['type'] = pkcs_9_at_countryOfCitizenship -countryOfCitizenship['values'][0] = X520countryName() - - -# Country of residence - -pkcs_9_at_countryOfResidence = _OID(ietf_at, 5) - -countryOfResidence = Attribute() -countryOfResidence['type'] = pkcs_9_at_countryOfResidence -countryOfResidence['values'][0] = X520countryName() - - -# Pseudonym - -id_at_pseudonym = _OID(2, 5, 4, 65) - -pseudonym = Attribute() -pseudonym['type'] = id_at_pseudonym -pseudonym['values'][0] = DirectoryString() - - -# Serial number - -id_at_serialNumber = rfc5280.id_at_serialNumber - -serialNumber = Attribute() -serialNumber['type'] = id_at_serialNumber -serialNumber['values'][0] = X520SerialNumber() - - -# Content type - -pkcs_9_at_contentType = rfc5652.id_contentType - -contentType = CMSSingleAttribute() -contentType['attrType'] = pkcs_9_at_contentType -contentType['attrValues'][0] = ContentType() - - -# Message digest - -pkcs_9_at_messageDigest = rfc5652.id_messageDigest - -messageDigest = CMSSingleAttribute() -messageDigest['attrType'] = pkcs_9_at_messageDigest -messageDigest['attrValues'][0] = MessageDigest() - - -# Signing time - -pkcs_9_at_signingTime = rfc5652.id_signingTime - -signingTime = CMSSingleAttribute() -signingTime['attrType'] = pkcs_9_at_signingTime -signingTime['attrValues'][0] = SigningTime() - - -# Random nonce - -class RandomNonce(univ.OctetString): - pass - -RandomNonce.subtypeSpec = constraint.ValueSizeConstraint(4, MAX) - - -pkcs_9_at_randomNonce = _OID(pkcs_9_at, 3) - -randomNonce = CMSSingleAttribute() -randomNonce['attrType'] = pkcs_9_at_randomNonce -randomNonce['attrValues'][0] = RandomNonce() - - -# Sequence number - -class SequenceNumber(univ.Integer): - pass - -SequenceNumber.subtypeSpec = constraint.ValueRangeConstraint(1, MAX) - - -pkcs_9_at_sequenceNumber = _OID(pkcs_9_at, 4) - -sequenceNumber = CMSSingleAttribute() -sequenceNumber['attrType'] = pkcs_9_at_sequenceNumber -sequenceNumber['attrValues'][0] = SequenceNumber() - - -# Countersignature - -pkcs_9_at_counterSignature = rfc5652.id_countersignature - -counterSignature = CMSAttribute() -counterSignature['attrType'] = pkcs_9_at_counterSignature -counterSignature['attrValues'][0] = Countersignature() - - -# Challenge password - -pkcs_9_at_challengePassword = _OID(pkcs_9, 7) - -challengePassword = SingleAttribute() -challengePassword['type'] = pkcs_9_at_challengePassword -challengePassword['values'][0] = DirectoryString() - - -# Extension request - -class ExtensionRequest(Extensions): - pass - - -pkcs_9_at_extensionRequest = _OID(pkcs_9, 14) - -extensionRequest = SingleAttribute() -extensionRequest['type'] = pkcs_9_at_extensionRequest -extensionRequest['values'][0] = ExtensionRequest() - - -# Extended-certificate attributes (deprecated) - -class AttributeSet(univ.SetOf): - pass - -AttributeSet.componentType = Attribute() - - -pkcs_9_at_extendedCertificateAttributes = _OID(pkcs_9, 9) - -extendedCertificateAttributes = SingleAttribute() -extendedCertificateAttributes['type'] = pkcs_9_at_extendedCertificateAttributes -extendedCertificateAttributes['values'][0] = AttributeSet() - - -# Friendly name - -class FriendlyName(char.BMPString): - pass - -FriendlyName.subtypeSpec = constraint.ValueSizeConstraint(1, pkcs_9_ub_friendlyName) - - -pkcs_9_at_friendlyName = _OID(pkcs_9, 20) - -friendlyName = SingleAttribute() -friendlyName['type'] = pkcs_9_at_friendlyName -friendlyName['values'][0] = FriendlyName() - - -# Local key identifier - -pkcs_9_at_localKeyId = _OID(pkcs_9, 21) - -localKeyId = SingleAttribute() -localKeyId['type'] = pkcs_9_at_localKeyId -localKeyId['values'][0] = univ.OctetString() - - -# Signing description - -pkcs_9_at_signingDescription = _OID(pkcs_9, 13) - -signingDescription = CMSSingleAttribute() -signingDescription['attrType'] = pkcs_9_at_signingDescription -signingDescription['attrValues'][0] = DirectoryString() - - -# S/MIME capabilities - -class SMIMECapability(AlgorithmIdentifier): - pass - - -class SMIMECapabilities(univ.SequenceOf): - pass - -SMIMECapabilities.componentType = SMIMECapability() - - -pkcs_9_at_smimeCapabilities = _OID(pkcs_9, 15) - -smimeCapabilities = CMSSingleAttribute() -smimeCapabilities['attrType'] = pkcs_9_at_smimeCapabilities -smimeCapabilities['attrValues'][0] = SMIMECapabilities() - - -# Certificate Attribute Map - -_certificateAttributesMapUpdate = { - # Attribute types for use with the "pkcsEntity" object class - pkcs_9_at_pkcs7PDU: ContentInfo(), - pkcs_9_at_userPKCS12: PFX(), - # TODO: Once PKCS15Token can be imported, this can be included - # pkcs_9_at_pkcs15Token: PKCS15Token(), - pkcs_9_at_encryptedPrivateKeyInfo: EncryptedPrivateKeyInfo(), - # Attribute types for use with the "naturalPerson" object class - pkcs_9_at_emailAddress: EmailAddress(), - pkcs_9_at_unstructuredName: PKCS9String(), - pkcs_9_at_unstructuredAddress: DirectoryString(), - pkcs_9_at_dateOfBirth: useful.GeneralizedTime(), - pkcs_9_at_placeOfBirth: DirectoryString(), - pkcs_9_at_gender: GenderString(), - pkcs_9_at_countryOfCitizenship: X520countryName(), - pkcs_9_at_countryOfResidence: X520countryName(), - id_at_pseudonym: DirectoryString(), - id_at_serialNumber: X520SerialNumber(), - # Attribute types for use with PKCS #10 certificate requests - pkcs_9_at_challengePassword: DirectoryString(), - pkcs_9_at_extensionRequest: ExtensionRequest(), - pkcs_9_at_extendedCertificateAttributes: AttributeSet(), -} - -rfc5280.certificateAttributesMap.update(_certificateAttributesMapUpdate) - - -# CMS Attribute Map - -# Note: pkcs_9_at_smimeCapabilities is not included in the map because -# the definition in RFC 5751 is preferred, which produces the same -# encoding, but it allows different parameters for SMIMECapability -# and AlgorithmIdentifier. - -_cmsAttributesMapUpdate = { - # Attribute types for use in PKCS #7 data (a.k.a. CMS) - pkcs_9_at_contentType: ContentType(), - pkcs_9_at_messageDigest: MessageDigest(), - pkcs_9_at_signingTime: SigningTime(), - pkcs_9_at_randomNonce: RandomNonce(), - pkcs_9_at_sequenceNumber: SequenceNumber(), - pkcs_9_at_counterSignature: Countersignature(), - # Attributes for use in PKCS #12 "PFX" PDUs or PKCS #15 tokens - pkcs_9_at_friendlyName: FriendlyName(), - pkcs_9_at_localKeyId: univ.OctetString(), - pkcs_9_at_signingDescription: DirectoryString(), - # pkcs_9_at_smimeCapabilities: SMIMECapabilities(), -} - -rfc5652.cmsAttributesMap.update(_cmsAttributesMapUpdate) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc2986.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc2986.py deleted file mode 100644 index 34acbd58d0c9..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc2986.py +++ /dev/null @@ -1,75 +0,0 @@ -# coding: utf-8 -# -# This file is part of pyasn1-modules software. -# -# Created by Joel Johnson with asn1ate tool. -# Modified by Russ Housley to add support for opentypes by importing -# definitions from rfc5280 so that the same maps are used. -# -# Copyright (c) 2005-2019, Ilya Etingof -# License: http://snmplabs.com/pyasn1/license.html -# -# PKCS #10: Certification Request Syntax Specification -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc2986.txt -# -from pyasn1.type import namedtype -from pyasn1.type import tag -from pyasn1.type import univ - -from pyasn1_modules import rfc5280 - -MAX = float('inf') - - -AttributeType = rfc5280.AttributeType - -AttributeValue = rfc5280.AttributeValue - -AttributeTypeAndValue = rfc5280.AttributeTypeAndValue - -Attribute = rfc5280.Attribute - -RelativeDistinguishedName = rfc5280.RelativeDistinguishedName - -RDNSequence = rfc5280.RDNSequence - -Name = rfc5280.Name - -AlgorithmIdentifier = rfc5280.AlgorithmIdentifier - -SubjectPublicKeyInfo = rfc5280.SubjectPublicKeyInfo - - -class Attributes(univ.SetOf): - pass - - -Attributes.componentType = Attribute() - - -class CertificationRequestInfo(univ.Sequence): - pass - - -CertificationRequestInfo.componentType = namedtype.NamedTypes( - namedtype.NamedType('version', univ.Integer()), - namedtype.NamedType('subject', Name()), - namedtype.NamedType('subjectPKInfo', SubjectPublicKeyInfo()), - namedtype.NamedType('attributes', - Attributes().subtype(implicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatSimple, 0)) - ) -) - - -class CertificationRequest(univ.Sequence): - pass - - -CertificationRequest.componentType = namedtype.NamedTypes( - namedtype.NamedType('certificationRequestInfo', CertificationRequestInfo()), - namedtype.NamedType('signatureAlgorithm', AlgorithmIdentifier()), - namedtype.NamedType('signature', univ.BitString()) -) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc3114.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc3114.py deleted file mode 100644 index badcb1f21403..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc3114.py +++ /dev/null @@ -1,77 +0,0 @@ -# -# This file is part of pyasn1-modules software. -# -# Created by Russ Housley with assistance from asn1ate v.0.6.0. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# TEST Company Classification Policies -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc3114.txt -# - -from pyasn1.type import char -from pyasn1.type import namedval -from pyasn1.type import univ - -from pyasn1_modules import rfc5755 - - -id_smime = univ.ObjectIdentifier((1, 2, 840, 113549, 1, 9, 16, )) - -id_tsp = id_smime + (7, ) - -id_tsp_TEST_Amoco = id_tsp + (1, ) - -class Amoco_SecurityClassification(univ.Integer): - namedValues = namedval.NamedValues( - ('amoco-general', 6), - ('amoco-confidential', 7), - ('amoco-highly-confidential', 8) - ) - - -id_tsp_TEST_Caterpillar = id_tsp + (2, ) - -class Caterpillar_SecurityClassification(univ.Integer): - namedValues = namedval.NamedValues( - ('caterpillar-public', 6), - ('caterpillar-green', 7), - ('caterpillar-yellow', 8), - ('caterpillar-red', 9) - ) - - -id_tsp_TEST_Whirlpool = id_tsp + (3, ) - -class Whirlpool_SecurityClassification(univ.Integer): - namedValues = namedval.NamedValues( - ('whirlpool-public', 6), - ('whirlpool-internal', 7), - ('whirlpool-confidential', 8) - ) - - -id_tsp_TEST_Whirlpool_Categories = id_tsp + (4, ) - -class SecurityCategoryValues(univ.SequenceOf): - componentType = char.UTF8String() - -# Example SecurityCategoryValues: "LAW DEPARTMENT USE ONLY" -# Example SecurityCategoryValues: "HUMAN RESOURCES USE ONLY" - - -# Also, the privacy mark in the security label can contain a string, -# such as: "ATTORNEY-CLIENT PRIVILEGED INFORMATION" - - -# Map of security category type OIDs to security category added -# to the ones that are in rfc5755.py - -_securityCategoryMapUpdate = { - id_tsp_TEST_Whirlpool_Categories: SecurityCategoryValues(), -} - -rfc5755.securityCategoryMap.update(_securityCategoryMapUpdate) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc3161.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc3161.py deleted file mode 100644 index 0e1dcedb393b..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc3161.py +++ /dev/null @@ -1,142 +0,0 @@ -# -# This file is part of pyasn1-modules software. -# -# Created by Russ Housley with assistance from asn1ate v.0.6.0. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# Time-Stamp Protocol (TSP) -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc3161.txt -# - -from pyasn1.type import constraint -from pyasn1.type import namedtype -from pyasn1.type import namedval -from pyasn1.type import tag -from pyasn1.type import univ -from pyasn1.type import useful - -from pyasn1_modules import rfc4210 -from pyasn1_modules import rfc5280 -from pyasn1_modules import rfc5652 - - -Extensions = rfc5280.Extensions - -AlgorithmIdentifier = rfc5280.AlgorithmIdentifier - -GeneralName = rfc5280.GeneralName - -ContentInfo = rfc5652.ContentInfo - -PKIFreeText = rfc4210.PKIFreeText - - -id_ct_TSTInfo = univ.ObjectIdentifier('1.2.840.113549.1.9.16.1.4') - - -class Accuracy(univ.Sequence): - pass - -Accuracy.componentType = namedtype.NamedTypes( - namedtype.OptionalNamedType('seconds', univ.Integer()), - namedtype.OptionalNamedType('millis', univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(1, 999)).subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), - namedtype.OptionalNamedType('micros', univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(1, 999)).subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))) -) - - -class MessageImprint(univ.Sequence): - pass - -MessageImprint.componentType = namedtype.NamedTypes( - namedtype.NamedType('hashAlgorithm', AlgorithmIdentifier()), - namedtype.NamedType('hashedMessage', univ.OctetString()) -) - - -class PKIFailureInfo(univ.BitString): - pass - -PKIFailureInfo.namedValues = namedval.NamedValues( - ('badAlg', 0), - ('badRequest', 2), - ('badDataFormat', 5), - ('timeNotAvailable', 14), - ('unacceptedPolicy', 15), - ('unacceptedExtension', 16), - ('addInfoNotAvailable', 17), - ('systemFailure', 25) -) - - -class PKIStatus(univ.Integer): - pass - -PKIStatus.namedValues = namedval.NamedValues( - ('granted', 0), - ('grantedWithMods', 1), - ('rejection', 2), - ('waiting', 3), - ('revocationWarning', 4), - ('revocationNotification', 5) -) - - -class PKIStatusInfo(univ.Sequence): - pass - -PKIStatusInfo.componentType = namedtype.NamedTypes( - namedtype.NamedType('status', PKIStatus()), - namedtype.OptionalNamedType('statusString', PKIFreeText()), - namedtype.OptionalNamedType('failInfo', PKIFailureInfo()) -) - - -class TSAPolicyId(univ.ObjectIdentifier): - pass - - -class TSTInfo(univ.Sequence): - pass - -TSTInfo.componentType = namedtype.NamedTypes( - namedtype.NamedType('version', univ.Integer(namedValues=namedval.NamedValues(('v1', 1)))), - namedtype.NamedType('policy', TSAPolicyId()), - namedtype.NamedType('messageImprint', MessageImprint()), - namedtype.NamedType('serialNumber', univ.Integer()), - namedtype.NamedType('genTime', useful.GeneralizedTime()), - namedtype.OptionalNamedType('accuracy', Accuracy()), - namedtype.DefaultedNamedType('ordering', univ.Boolean().subtype(value=0)), - namedtype.OptionalNamedType('nonce', univ.Integer()), - namedtype.OptionalNamedType('tsa', GeneralName().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), - namedtype.OptionalNamedType('extensions', Extensions().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))) -) - - -class TimeStampReq(univ.Sequence): - pass - -TimeStampReq.componentType = namedtype.NamedTypes( - namedtype.NamedType('version', univ.Integer(namedValues=namedval.NamedValues(('v1', 1)))), - namedtype.NamedType('messageImprint', MessageImprint()), - namedtype.OptionalNamedType('reqPolicy', TSAPolicyId()), - namedtype.OptionalNamedType('nonce', univ.Integer()), - namedtype.DefaultedNamedType('certReq', univ.Boolean().subtype(value=0)), - namedtype.OptionalNamedType('extensions', Extensions().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))) -) - - -class TimeStampToken(ContentInfo): - pass - - -class TimeStampResp(univ.Sequence): - pass - -TimeStampResp.componentType = namedtype.NamedTypes( - namedtype.NamedType('status', PKIStatusInfo()), - namedtype.OptionalNamedType('timeStampToken', TimeStampToken()) -) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc3274.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc3274.py deleted file mode 100644 index 425e006f3ddb..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc3274.py +++ /dev/null @@ -1,59 +0,0 @@ -# -# This file is part of pyasn1-modules software. -# -# Created by Russ Housley with assistance from asn1ate v.0.6.0. -# Modified by Russ Housley to add a map for use with opentypes. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# CMS Compressed Data Content Type -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc3274.txt -# - -from pyasn1.type import namedtype -from pyasn1.type import univ - -from pyasn1_modules import rfc5280 -from pyasn1_modules import rfc5652 - - -class CompressionAlgorithmIdentifier(rfc5280.AlgorithmIdentifier): - pass - - -# The CMS Compressed Data Content Type - -id_ct_compressedData = univ.ObjectIdentifier('1.2.840.113549.1.9.16.1.9') - -class CompressedData(univ.Sequence): - pass - -CompressedData.componentType = namedtype.NamedTypes( - namedtype.NamedType('version', rfc5652.CMSVersion()), # Always set to 0 - namedtype.NamedType('compressionAlgorithm', CompressionAlgorithmIdentifier()), - namedtype.NamedType('encapContentInfo', rfc5652.EncapsulatedContentInfo()) -) - - -# Algorithm identifier for the zLib Compression Algorithm -# This includes cpa_zlibCompress as defined in RFC 6268, -# from https://www.rfc-editor.org/rfc/rfc6268.txt - -id_alg_zlibCompress = univ.ObjectIdentifier('1.2.840.113549.1.9.16.3.8') - -cpa_zlibCompress = rfc5280.AlgorithmIdentifier() -cpa_zlibCompress['algorithm'] = id_alg_zlibCompress -# cpa_zlibCompress['parameters'] are absent - - -# Map of Content Type OIDs to Content Types is added to thr -# ones that are in rfc5652.py - -_cmsContentTypesMapUpdate = { - id_ct_compressedData: CompressedData(), -} - -rfc5652.cmsContentTypesMap.update(_cmsContentTypesMapUpdate) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc3560.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc3560.py deleted file mode 100644 index 8365436df57b..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc3560.py +++ /dev/null @@ -1,74 +0,0 @@ -# -# This file is part of pyasn1-modules software. -# -# Created by Russ Housley. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# RSAES-OAEP Key Transport Algorithm in CMS -# -# Notice that all of the things needed in RFC 3560 are also defined -# in RFC 4055. So, they are all pulled from the RFC 4055 module into -# this one so that people looking a RFC 3560 can easily find them. -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc3560.txt -# - -from pyasn1_modules import rfc4055 - -id_sha1 = rfc4055.id_sha1 - -id_sha256 = rfc4055.id_sha256 - -id_sha384 = rfc4055.id_sha384 - -id_sha512 = rfc4055.id_sha512 - -id_mgf1 = rfc4055.id_mgf1 - -rsaEncryption = rfc4055.rsaEncryption - -id_RSAES_OAEP = rfc4055.id_RSAES_OAEP - -id_pSpecified = rfc4055.id_pSpecified - -sha1Identifier = rfc4055.sha1Identifier - -sha256Identifier = rfc4055.sha256Identifier - -sha384Identifier = rfc4055.sha384Identifier - -sha512Identifier = rfc4055.sha512Identifier - -mgf1SHA1Identifier = rfc4055.mgf1SHA1Identifier - -mgf1SHA256Identifier = rfc4055.mgf1SHA256Identifier - -mgf1SHA384Identifier = rfc4055.mgf1SHA384Identifier - -mgf1SHA512Identifier = rfc4055.mgf1SHA512Identifier - -pSpecifiedEmptyIdentifier = rfc4055.pSpecifiedEmptyIdentifier - - -class RSAES_OAEP_params(rfc4055.RSAES_OAEP_params): - pass - - -rSAES_OAEP_Default_Params = RSAES_OAEP_params() - -rSAES_OAEP_Default_Identifier = rfc4055.rSAES_OAEP_Default_Identifier - -rSAES_OAEP_SHA256_Params = rfc4055.rSAES_OAEP_SHA256_Params - -rSAES_OAEP_SHA256_Identifier = rfc4055.rSAES_OAEP_SHA256_Identifier - -rSAES_OAEP_SHA384_Params = rfc4055.rSAES_OAEP_SHA384_Params - -rSAES_OAEP_SHA384_Identifier = rfc4055.rSAES_OAEP_SHA384_Identifier - -rSAES_OAEP_SHA512_Params = rfc4055.rSAES_OAEP_SHA512_Params - -rSAES_OAEP_SHA512_Identifier = rfc4055.rSAES_OAEP_SHA512_Identifier diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc3565.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc3565.py deleted file mode 100644 index ec75e2348925..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc3565.py +++ /dev/null @@ -1,57 +0,0 @@ -# This file is being contributed to pyasn1-modules software. -# -# Created by Russ Housley. -# Modified by Russ Housley to add maps for use with opentypes. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# Use of the Advanced Encryption Standard (AES) Encryption -# Algorithm in the Cryptographic Message Syntax (CMS) -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc3565.txt - - -from pyasn1.type import constraint -from pyasn1.type import univ - -from pyasn1_modules import rfc5280 - - -class AlgorithmIdentifier(rfc5280.AlgorithmIdentifier): - pass - - -class AES_IV(univ.OctetString): - pass - -AES_IV.subtypeSpec = constraint.ValueSizeConstraint(16, 16) - - -id_aes128_CBC = univ.ObjectIdentifier('2.16.840.1.101.3.4.1.2') - -id_aes192_CBC = univ.ObjectIdentifier('2.16.840.1.101.3.4.1.22') - -id_aes256_CBC = univ.ObjectIdentifier('2.16.840.1.101.3.4.1.42') - - -id_aes128_wrap = univ.ObjectIdentifier('2.16.840.1.101.3.4.1.5') - -id_aes192_wrap = univ.ObjectIdentifier('2.16.840.1.101.3.4.1.25') - -id_aes256_wrap = univ.ObjectIdentifier('2.16.840.1.101.3.4.1.45') - - -# Update the Algorithm Identifier map - -_algorithmIdentifierMapUpdate = { - id_aes128_CBC: AES_IV(), - id_aes192_CBC: AES_IV(), - id_aes256_CBC: AES_IV(), - id_aes128_wrap: univ.Null(), - id_aes192_wrap: univ.Null(), - id_aes256_wrap: univ.Null(), -} - -rfc5280.algorithmIdentifierMap.update(_algorithmIdentifierMapUpdate) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc3709.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc3709.py deleted file mode 100644 index aa1d5b6abff1..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc3709.py +++ /dev/null @@ -1,207 +0,0 @@ -# -# This file is part of pyasn1-modules software. -# -# Created by Russ Housley with assistance from asn1ate v.0.6.0. -# Modified by Russ Housley to add maps for use with opentypes. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# Logotypes in X.509 Certificates -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc3709.txt -# - -from pyasn1.type import char -from pyasn1.type import constraint -from pyasn1.type import namedtype -from pyasn1.type import namedval -from pyasn1.type import tag -from pyasn1.type import univ - -from pyasn1_modules import rfc5280 -from pyasn1_modules import rfc6170 - -MAX = float('inf') - - -class HashAlgAndValue(univ.Sequence): - pass - -HashAlgAndValue.componentType = namedtype.NamedTypes( - namedtype.NamedType('hashAlg', rfc5280.AlgorithmIdentifier()), - namedtype.NamedType('hashValue', univ.OctetString()) -) - - -class LogotypeDetails(univ.Sequence): - pass - -LogotypeDetails.componentType = namedtype.NamedTypes( - namedtype.NamedType('mediaType', char.IA5String()), - namedtype.NamedType('logotypeHash', univ.SequenceOf( - componentType=HashAlgAndValue()).subtype( - sizeSpec=constraint.ValueSizeConstraint(1, MAX))), - namedtype.NamedType('logotypeURI', univ.SequenceOf( - componentType=char.IA5String()).subtype( - sizeSpec=constraint.ValueSizeConstraint(1, MAX))) -) - - -class LogotypeAudioInfo(univ.Sequence): - pass - -LogotypeAudioInfo.componentType = namedtype.NamedTypes( - namedtype.NamedType('fileSize', univ.Integer()), - namedtype.NamedType('playTime', univ.Integer()), - namedtype.NamedType('channels', univ.Integer()), - namedtype.OptionalNamedType('sampleRate', univ.Integer().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))), - namedtype.OptionalNamedType('language', char.IA5String().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 4))) -) - - -class LogotypeAudio(univ.Sequence): - pass - -LogotypeAudio.componentType = namedtype.NamedTypes( - namedtype.NamedType('audioDetails', LogotypeDetails()), - namedtype.OptionalNamedType('audioInfo', LogotypeAudioInfo()) -) - - -class LogotypeImageType(univ.Integer): - pass - -LogotypeImageType.namedValues = namedval.NamedValues( - ('grayScale', 0), - ('color', 1) -) - - -class LogotypeImageResolution(univ.Choice): - pass - -LogotypeImageResolution.componentType = namedtype.NamedTypes( - namedtype.NamedType('numBits', - univ.Integer().subtype(implicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatSimple, 1))), - namedtype.NamedType('tableSize', - univ.Integer().subtype(implicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatSimple, 2))) -) - - -class LogotypeImageInfo(univ.Sequence): - pass - -LogotypeImageInfo.componentType = namedtype.NamedTypes( - namedtype.DefaultedNamedType('type', LogotypeImageType().subtype( - implicitTag=tag.Tag(tag.tagClassContext, - tag.tagFormatSimple, 0)).subtype(value='color')), - namedtype.NamedType('fileSize', univ.Integer()), - namedtype.NamedType('xSize', univ.Integer()), - namedtype.NamedType('ySize', univ.Integer()), - namedtype.OptionalNamedType('resolution', LogotypeImageResolution()), - namedtype.OptionalNamedType('language', char.IA5String().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 4))) -) - - -class LogotypeImage(univ.Sequence): - pass - -LogotypeImage.componentType = namedtype.NamedTypes( - namedtype.NamedType('imageDetails', LogotypeDetails()), - namedtype.OptionalNamedType('imageInfo', LogotypeImageInfo()) -) - - -class LogotypeData(univ.Sequence): - pass - -LogotypeData.componentType = namedtype.NamedTypes( - namedtype.OptionalNamedType('image', univ.SequenceOf( - componentType=LogotypeImage())), - namedtype.OptionalNamedType('audio', univ.SequenceOf( - componentType=LogotypeAudio()).subtype( - implicitTag=tag.Tag(tag.tagClassContext, - tag.tagFormatSimple, 1))) -) - - -class LogotypeReference(univ.Sequence): - pass - -LogotypeReference.componentType = namedtype.NamedTypes( - namedtype.NamedType('refStructHash', univ.SequenceOf( - componentType=HashAlgAndValue()).subtype( - sizeSpec=constraint.ValueSizeConstraint(1, MAX))), - namedtype.NamedType('refStructURI', univ.SequenceOf( - componentType=char.IA5String()).subtype( - sizeSpec=constraint.ValueSizeConstraint(1, MAX))) -) - - -class LogotypeInfo(univ.Choice): - pass - -LogotypeInfo.componentType = namedtype.NamedTypes( - namedtype.NamedType('direct', - LogotypeData().subtype(implicitTag=tag.Tag(tag.tagClassContext, - tag.tagFormatConstructed, 0))), - namedtype.NamedType('indirect', LogotypeReference().subtype( - implicitTag=tag.Tag(tag.tagClassContext, - tag.tagFormatConstructed, 1))) -) - -# Other logotype type and associated object identifiers - -id_logo_background = univ.ObjectIdentifier('1.3.6.1.5.5.7.20.2') - -id_logo_loyalty = univ.ObjectIdentifier('1.3.6.1.5.5.7.20.1') - -id_logo_certImage = rfc6170.id_logo_certImage - - -class OtherLogotypeInfo(univ.Sequence): - pass - -OtherLogotypeInfo.componentType = namedtype.NamedTypes( - namedtype.NamedType('logotypeType', univ.ObjectIdentifier()), - namedtype.NamedType('info', LogotypeInfo()) -) - - -# Logotype Certificate Extension - -id_pe_logotype = univ.ObjectIdentifier('1.3.6.1.5.5.7.1.12') - - -class LogotypeExtn(univ.Sequence): - pass - -LogotypeExtn.componentType = namedtype.NamedTypes( - namedtype.OptionalNamedType('communityLogos', univ.SequenceOf( - componentType=LogotypeInfo()).subtype( - explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), - namedtype.OptionalNamedType('issuerLogo', LogotypeInfo().subtype( - explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))), - namedtype.OptionalNamedType('subjectLogo', LogotypeInfo().subtype( - explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2))), - namedtype.OptionalNamedType('otherLogos', univ.SequenceOf( - componentType=OtherLogotypeInfo()).subtype(explicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatSimple, 3))) -) - - -# Map of Certificate Extension OIDs to Extensions added to the -# ones that are in rfc5280.py - -_certificateExtensionsMapUpdate = { - id_pe_logotype: LogotypeExtn(), -} - -rfc5280.certificateExtensionsMap.update(_certificateExtensionsMapUpdate) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc3770.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc3770.py deleted file mode 100644 index 3fefe1d90e2c..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc3770.py +++ /dev/null @@ -1,75 +0,0 @@ -# -# This file is part of pyasn1-modules software. -# -# Created by Russ Housley with assistance from asn1ate v.0.6.0. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# Certificate Extensions and Attributes Supporting Authentication -# in PPP and Wireless LAN Networks -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc3770.txt -# https://www.rfc-editor.org/errata/eid234 -# - -from pyasn1.type import constraint -from pyasn1.type import univ - -from pyasn1_modules import rfc5280 - - -MAX = float('inf') - - -# Extended Key Usage Values - -id_kp_eapOverLAN = univ.ObjectIdentifier('1.3.6.1.5.5.7.3.14') - -id_kp_eapOverPPP = univ.ObjectIdentifier('1.3.6.1.5.5.7.3.13') - - -# Wireless LAN SSID Extension - -id_pe_wlanSSID = univ.ObjectIdentifier('1.3.6.1.5.5.7.1.13') - - -class SSID(univ.OctetString): - pass - -SSID.subtypeSpec = constraint.ValueSizeConstraint(1, 32) - - -class SSIDList(univ.SequenceOf): - pass - -SSIDList.componentType = SSID() -SSIDList.subtypeSpec=constraint.ValueSizeConstraint(1, MAX) - - -# Wireless LAN SSID Attribute Certificate Attribute -# Uses same syntax as the certificate extension: SSIDList -# Correction for https://www.rfc-editor.org/errata/eid234 - -id_aca_wlanSSID = univ.ObjectIdentifier('1.3.6.1.5.5.7.10.7') - - -# Map of Certificate Extension OIDs to Extensions -# To be added to the ones that are in rfc5280.py - -_certificateExtensionsMap = { - id_pe_wlanSSID: SSIDList(), -} - -rfc5280.certificateExtensionsMap.update(_certificateExtensionsMap) - - -# Map of AttributeType OIDs to AttributeValue added to the -# ones that are in rfc5280.py - -_certificateAttributesMapUpdate = { - id_aca_wlanSSID: SSIDList(), -} - -rfc5280.certificateAttributesMap.update(_certificateAttributesMapUpdate) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc3779.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc3779.py deleted file mode 100644 index 8e6eaa3e7b29..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc3779.py +++ /dev/null @@ -1,137 +0,0 @@ -# -# This file is part of pyasn1-modules software. -# -# Created by Russ Housley with assistance from asn1ate v.0.6.0. -# Modified by Russ Housley to add maps for use with opentypes. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# X.509 Extensions for IP Addresses and AS Identifiers -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc3779.txt -# - -from pyasn1.type import constraint -from pyasn1.type import namedtype -from pyasn1.type import tag -from pyasn1.type import univ - -from pyasn1_modules import rfc5280 - - -# IP Address Delegation Extension - -id_pe_ipAddrBlocks = univ.ObjectIdentifier('1.3.6.1.5.5.7.1.7') - - -class IPAddress(univ.BitString): - pass - - -class IPAddressRange(univ.Sequence): - pass - -IPAddressRange.componentType = namedtype.NamedTypes( - namedtype.NamedType('min', IPAddress()), - namedtype.NamedType('max', IPAddress()) -) - - -class IPAddressOrRange(univ.Choice): - pass - -IPAddressOrRange.componentType = namedtype.NamedTypes( - namedtype.NamedType('addressPrefix', IPAddress()), - namedtype.NamedType('addressRange', IPAddressRange()) -) - - -class IPAddressChoice(univ.Choice): - pass - -IPAddressChoice.componentType = namedtype.NamedTypes( - namedtype.NamedType('inherit', univ.Null()), - namedtype.NamedType('addressesOrRanges', univ.SequenceOf( - componentType=IPAddressOrRange()) - ) -) - - -class IPAddressFamily(univ.Sequence): - pass - -IPAddressFamily.componentType = namedtype.NamedTypes( - namedtype.NamedType('addressFamily', univ.OctetString().subtype( - subtypeSpec=constraint.ValueSizeConstraint(2, 3))), - namedtype.NamedType('ipAddressChoice', IPAddressChoice()) -) - - -class IPAddrBlocks(univ.SequenceOf): - pass - -IPAddrBlocks.componentType = IPAddressFamily() - - -# Autonomous System Identifier Delegation Extension - -id_pe_autonomousSysIds = univ.ObjectIdentifier('1.3.6.1.5.5.7.1.8') - - -class ASId(univ.Integer): - pass - - -class ASRange(univ.Sequence): - pass - -ASRange.componentType = namedtype.NamedTypes( - namedtype.NamedType('min', ASId()), - namedtype.NamedType('max', ASId()) -) - - -class ASIdOrRange(univ.Choice): - pass - -ASIdOrRange.componentType = namedtype.NamedTypes( - namedtype.NamedType('id', ASId()), - namedtype.NamedType('range', ASRange()) -) - - -class ASIdentifierChoice(univ.Choice): - pass - -ASIdentifierChoice.componentType = namedtype.NamedTypes( - namedtype.NamedType('inherit', univ.Null()), - namedtype.NamedType('asIdsOrRanges', univ.SequenceOf( - componentType=ASIdOrRange()) - ) -) - - -class ASIdentifiers(univ.Sequence): - pass - -ASIdentifiers.componentType = namedtype.NamedTypes( - namedtype.OptionalNamedType('asnum', ASIdentifierChoice().subtype( - explicitTag=tag.Tag(tag.tagClassContext, - tag.tagFormatConstructed, 0))), - namedtype.OptionalNamedType('rdi', ASIdentifierChoice().subtype( - explicitTag=tag.Tag(tag.tagClassContext, - tag.tagFormatConstructed, 1))) -) - - -# Map of Certificate Extension OIDs to Extensions is added to the -# ones that are in rfc5280.py - -_certificateExtensionsMapUpdate = { - id_pe_ipAddrBlocks: IPAddrBlocks(), - id_pe_autonomousSysIds: ASIdentifiers(), -} - -rfc5280.certificateExtensionsMap.update(_certificateExtensionsMapUpdate) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc4043.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc4043.py deleted file mode 100644 index cf0a801419bb..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc4043.py +++ /dev/null @@ -1,43 +0,0 @@ -# -# This file is part of pyasn1-modules software. -# -# Created by Russ Housley with assistance from asn1ate v.0.6.0. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# Internet X.509 Public Key Infrastructure Permanent Identifier -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc4043.txt -# - -from pyasn1.type import char -from pyasn1.type import namedtype -from pyasn1.type import univ - -from pyasn1_modules import rfc5280 - - -id_pkix = univ.ObjectIdentifier((1, 3, 6, 1, 5, 5, 7, )) - -id_on = id_pkix + (8, ) - -id_on_permanentIdentifier = id_on + (3, ) - - -class PermanentIdentifier(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.OptionalNamedType('identifierValue', char.UTF8String()), - namedtype.OptionalNamedType('assigner', univ.ObjectIdentifier()) - ) - - -# Map of Other Name OIDs to Other Name is added to the -# ones that are in rfc5280.py - -_anotherNameMapUpdate = { - id_on_permanentIdentifier: PermanentIdentifier(), -} - -rfc5280.anotherNameMap.update(_anotherNameMapUpdate) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc4055.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc4055.py deleted file mode 100644 index bdc128632a57..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc4055.py +++ /dev/null @@ -1,258 +0,0 @@ -# -# This file is part of pyasn1-modules software. -# -# Created by Russ Housley with a very small amount of assistance from -# asn1ate v.0.6.0. -# Modified by Russ Housley to add maps for opentypes. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# Additional Algorithms and Identifiers for RSA Cryptography -# for use in Certificates and CRLs -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc4055.txt -# -from pyasn1.type import namedtype -from pyasn1.type import tag -from pyasn1.type import univ - -from pyasn1_modules import rfc5280 - - -def _OID(*components): - output = [] - for x in tuple(components): - if isinstance(x, univ.ObjectIdentifier): - output.extend(list(x)) - else: - output.append(int(x)) - return univ.ObjectIdentifier(output) - - -id_sha1 = _OID(1, 3, 14, 3, 2, 26) - -id_sha256 = _OID(2, 16, 840, 1, 101, 3, 4, 2, 1) - -id_sha384 = _OID(2, 16, 840, 1, 101, 3, 4, 2, 2) - -id_sha512 = _OID(2, 16, 840, 1, 101, 3, 4, 2, 3) - -id_sha224 = _OID(2, 16, 840, 1, 101, 3, 4, 2, 4) - -rsaEncryption = _OID(1, 2, 840, 113549, 1, 1, 1) - -id_mgf1 = _OID(1, 2, 840, 113549, 1, 1, 8) - -id_RSAES_OAEP = _OID(1, 2, 840, 113549, 1, 1, 7) - -id_pSpecified = _OID(1, 2, 840, 113549, 1, 1, 9) - -id_RSASSA_PSS = _OID(1, 2, 840, 113549, 1, 1, 10) - -sha256WithRSAEncryption = _OID(1, 2, 840, 113549, 1, 1, 11) - -sha384WithRSAEncryption = _OID(1, 2, 840, 113549, 1, 1, 12) - -sha512WithRSAEncryption = _OID(1, 2, 840, 113549, 1, 1, 13) - -sha224WithRSAEncryption = _OID(1, 2, 840, 113549, 1, 1, 14) - -sha1Identifier = rfc5280.AlgorithmIdentifier() -sha1Identifier['algorithm'] = id_sha1 -sha1Identifier['parameters'] = univ.Null("") - -sha224Identifier = rfc5280.AlgorithmIdentifier() -sha224Identifier['algorithm'] = id_sha224 -sha224Identifier['parameters'] = univ.Null("") - -sha256Identifier = rfc5280.AlgorithmIdentifier() -sha256Identifier['algorithm'] = id_sha256 -sha256Identifier['parameters'] = univ.Null("") - -sha384Identifier = rfc5280.AlgorithmIdentifier() -sha384Identifier['algorithm'] = id_sha384 -sha384Identifier['parameters'] = univ.Null("") - -sha512Identifier = rfc5280.AlgorithmIdentifier() -sha512Identifier['algorithm'] = id_sha512 -sha512Identifier['parameters'] = univ.Null("") - -mgf1SHA1Identifier = rfc5280.AlgorithmIdentifier() -mgf1SHA1Identifier['algorithm'] = id_mgf1 -mgf1SHA1Identifier['parameters'] = sha1Identifier - -mgf1SHA224Identifier = rfc5280.AlgorithmIdentifier() -mgf1SHA224Identifier['algorithm'] = id_mgf1 -mgf1SHA224Identifier['parameters'] = sha224Identifier - -mgf1SHA256Identifier = rfc5280.AlgorithmIdentifier() -mgf1SHA256Identifier['algorithm'] = id_mgf1 -mgf1SHA256Identifier['parameters'] = sha256Identifier - -mgf1SHA384Identifier = rfc5280.AlgorithmIdentifier() -mgf1SHA384Identifier['algorithm'] = id_mgf1 -mgf1SHA384Identifier['parameters'] = sha384Identifier - -mgf1SHA512Identifier = rfc5280.AlgorithmIdentifier() -mgf1SHA512Identifier['algorithm'] = id_mgf1 -mgf1SHA512Identifier['parameters'] = sha512Identifier - -pSpecifiedEmptyIdentifier = rfc5280.AlgorithmIdentifier() -pSpecifiedEmptyIdentifier['algorithm'] = id_pSpecified -pSpecifiedEmptyIdentifier['parameters'] = univ.OctetString(value='') - - -class RSAPublicKey(univ.Sequence): - pass - -RSAPublicKey.componentType = namedtype.NamedTypes( - namedtype.NamedType('modulus', univ.Integer()), - namedtype.NamedType('publicExponent', univ.Integer()) -) - - -class HashAlgorithm(rfc5280.AlgorithmIdentifier): - pass - - -class MaskGenAlgorithm(rfc5280.AlgorithmIdentifier): - pass - - -class RSAES_OAEP_params(univ.Sequence): - pass - -RSAES_OAEP_params.componentType = namedtype.NamedTypes( - namedtype.OptionalNamedType('hashFunc', rfc5280.AlgorithmIdentifier().subtype( - explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))), - namedtype.OptionalNamedType('maskGenFunc', rfc5280.AlgorithmIdentifier().subtype( - explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))), - namedtype.OptionalNamedType('pSourceFunc', rfc5280.AlgorithmIdentifier().subtype( - explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2))) -) - -rSAES_OAEP_Default_Params = RSAES_OAEP_params() - -rSAES_OAEP_Default_Identifier = rfc5280.AlgorithmIdentifier() -rSAES_OAEP_Default_Identifier['algorithm'] = id_RSAES_OAEP -rSAES_OAEP_Default_Identifier['parameters'] = rSAES_OAEP_Default_Params - -rSAES_OAEP_SHA224_Params = RSAES_OAEP_params() -rSAES_OAEP_SHA224_Params['hashFunc'] = sha224Identifier.subtype( - explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0), cloneValueFlag=True) -rSAES_OAEP_SHA224_Params['maskGenFunc'] = mgf1SHA224Identifier.subtype( - explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1), cloneValueFlag=True) - -rSAES_OAEP_SHA224_Identifier = rfc5280.AlgorithmIdentifier() -rSAES_OAEP_SHA224_Identifier['algorithm'] = id_RSAES_OAEP -rSAES_OAEP_SHA224_Identifier['parameters'] = rSAES_OAEP_SHA224_Params - -rSAES_OAEP_SHA256_Params = RSAES_OAEP_params() -rSAES_OAEP_SHA256_Params['hashFunc'] = sha256Identifier.subtype( - explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0), cloneValueFlag=True) -rSAES_OAEP_SHA256_Params['maskGenFunc'] = mgf1SHA256Identifier.subtype( - explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1), cloneValueFlag=True) - -rSAES_OAEP_SHA256_Identifier = rfc5280.AlgorithmIdentifier() -rSAES_OAEP_SHA256_Identifier['algorithm'] = id_RSAES_OAEP -rSAES_OAEP_SHA256_Identifier['parameters'] = rSAES_OAEP_SHA256_Params - -rSAES_OAEP_SHA384_Params = RSAES_OAEP_params() -rSAES_OAEP_SHA384_Params['hashFunc'] = sha384Identifier.subtype( - explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0), cloneValueFlag=True) -rSAES_OAEP_SHA384_Params['maskGenFunc'] = mgf1SHA384Identifier.subtype( - explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1), cloneValueFlag=True) - -rSAES_OAEP_SHA384_Identifier = rfc5280.AlgorithmIdentifier() -rSAES_OAEP_SHA384_Identifier['algorithm'] = id_RSAES_OAEP -rSAES_OAEP_SHA384_Identifier['parameters'] = rSAES_OAEP_SHA384_Params - -rSAES_OAEP_SHA512_Params = RSAES_OAEP_params() -rSAES_OAEP_SHA512_Params['hashFunc'] = sha512Identifier.subtype( - explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0), cloneValueFlag=True) -rSAES_OAEP_SHA512_Params['maskGenFunc'] = mgf1SHA512Identifier.subtype( - explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1), cloneValueFlag=True) - -rSAES_OAEP_SHA512_Identifier = rfc5280.AlgorithmIdentifier() -rSAES_OAEP_SHA512_Identifier['algorithm'] = id_RSAES_OAEP -rSAES_OAEP_SHA512_Identifier['parameters'] = rSAES_OAEP_SHA512_Params - - -class RSASSA_PSS_params(univ.Sequence): - pass - -RSASSA_PSS_params.componentType = namedtype.NamedTypes( - namedtype.OptionalNamedType('hashAlgorithm', rfc5280.AlgorithmIdentifier().subtype( - explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))), - namedtype.OptionalNamedType('maskGenAlgorithm', rfc5280.AlgorithmIdentifier().subtype( - explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))), - namedtype.DefaultedNamedType('saltLength', univ.Integer(value=20).subtype( - explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))), - namedtype.DefaultedNamedType('trailerField', univ.Integer(value=1).subtype( - explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))) -) - -rSASSA_PSS_Default_Params = RSASSA_PSS_params() - -rSASSA_PSS_Default_Identifier = rfc5280.AlgorithmIdentifier() -rSASSA_PSS_Default_Identifier['algorithm'] = id_RSASSA_PSS -rSASSA_PSS_Default_Identifier['parameters'] = rSASSA_PSS_Default_Params - -rSASSA_PSS_SHA224_Params = RSASSA_PSS_params() -rSASSA_PSS_SHA224_Params['hashAlgorithm'] = sha224Identifier.subtype( - explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0), cloneValueFlag=True) -rSASSA_PSS_SHA224_Params['maskGenAlgorithm'] = mgf1SHA224Identifier.subtype( - explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1), cloneValueFlag=True) - -rSASSA_PSS_SHA224_Identifier = rfc5280.AlgorithmIdentifier() -rSASSA_PSS_SHA224_Identifier['algorithm'] = id_RSASSA_PSS -rSASSA_PSS_SHA224_Identifier['parameters'] = rSASSA_PSS_SHA224_Params - -rSASSA_PSS_SHA256_Params = RSASSA_PSS_params() -rSASSA_PSS_SHA256_Params['hashAlgorithm'] = sha256Identifier.subtype( - explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0), cloneValueFlag=True) -rSASSA_PSS_SHA256_Params['maskGenAlgorithm'] = mgf1SHA256Identifier.subtype( - explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1), cloneValueFlag=True) - -rSASSA_PSS_SHA256_Identifier = rfc5280.AlgorithmIdentifier() -rSASSA_PSS_SHA256_Identifier['algorithm'] = id_RSASSA_PSS -rSASSA_PSS_SHA256_Identifier['parameters'] = rSASSA_PSS_SHA256_Params - -rSASSA_PSS_SHA384_Params = RSASSA_PSS_params() -rSASSA_PSS_SHA384_Params['hashAlgorithm'] = sha384Identifier.subtype( - explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0), cloneValueFlag=True) -rSASSA_PSS_SHA384_Params['maskGenAlgorithm'] = mgf1SHA384Identifier.subtype( - explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1), cloneValueFlag=True) - -rSASSA_PSS_SHA384_Identifier = rfc5280.AlgorithmIdentifier() -rSASSA_PSS_SHA384_Identifier['algorithm'] = id_RSASSA_PSS -rSASSA_PSS_SHA384_Identifier['parameters'] = rSASSA_PSS_SHA384_Params - -rSASSA_PSS_SHA512_Params = RSASSA_PSS_params() -rSASSA_PSS_SHA512_Params['hashAlgorithm'] = sha512Identifier.subtype( - explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0), cloneValueFlag=True) -rSASSA_PSS_SHA512_Params['maskGenAlgorithm'] = mgf1SHA512Identifier.subtype( - explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1), cloneValueFlag=True) - -rSASSA_PSS_SHA512_Identifier = rfc5280.AlgorithmIdentifier() -rSASSA_PSS_SHA512_Identifier['algorithm'] = id_RSASSA_PSS -rSASSA_PSS_SHA512_Identifier['parameters'] = rSASSA_PSS_SHA512_Params - - -# Update the Algorithm Identifier map - -_algorithmIdentifierMapUpdate = { - id_sha1: univ.Null(), - id_sha224: univ.Null(), - id_sha256: univ.Null(), - id_sha384: univ.Null(), - id_sha512: univ.Null(), - id_mgf1: rfc5280.AlgorithmIdentifier(), - id_pSpecified: univ.OctetString(), - id_RSAES_OAEP: RSAES_OAEP_params(), - id_RSASSA_PSS: RSASSA_PSS_params(), -} - -rfc5280.algorithmIdentifierMap.update(_algorithmIdentifierMapUpdate) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc4073.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc4073.py deleted file mode 100644 index 3f425b28eddb..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc4073.py +++ /dev/null @@ -1,59 +0,0 @@ -# -# This file is part of pyasn1-modules software. -# -# Created by Russ Housley with some assistance from asn1ate v.0.6.0. -# Modified by Russ Housley to add a map for use with opentypes. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# Protecting Multiple Contents with the CMS -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc4073.txt -# - -from pyasn1.type import constraint -from pyasn1.type import namedtype -from pyasn1.type import univ - -from pyasn1_modules import rfc5652 - -MAX = float('inf') - - -# Content Collection Content Type and Object Identifier - -id_ct_contentCollection = univ.ObjectIdentifier('1.2.840.113549.1.9.16.1.19') - -class ContentCollection(univ.SequenceOf): - pass - -ContentCollection.componentType = rfc5652.ContentInfo() -ContentCollection.sizeSpec = constraint.ValueSizeConstraint(1, MAX) - - -# Content With Attributes Content Type and Object Identifier - -id_ct_contentWithAttrs = univ.ObjectIdentifier('1.2.840.113549.1.9.16.1.20') - -class ContentWithAttributes(univ.Sequence): - pass - -ContentWithAttributes.componentType = namedtype.NamedTypes( - namedtype.NamedType('content', rfc5652.ContentInfo()), - namedtype.NamedType('attrs', univ.SequenceOf( - componentType=rfc5652.Attribute()).subtype( - sizeSpec=constraint.ValueSizeConstraint(1, MAX))) -) - - -# Map of Content Type OIDs to Content Types is added to the -# ones that are in rfc5652.py - -_cmsContentTypesMapUpdate = { - id_ct_contentCollection: ContentCollection(), - id_ct_contentWithAttrs: ContentWithAttributes(), -} - -rfc5652.cmsContentTypesMap.update(_cmsContentTypesMapUpdate) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc4108.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc4108.py deleted file mode 100644 index ecace9e3ee95..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc4108.py +++ /dev/null @@ -1,350 +0,0 @@ -# -# This file is part of pyasn1-modules software. -# -# Created by Russ Housley with assistance from asn1ate v.0.6.0. -# Modified by Russ Housley to add items from the verified errata. -# Modified by Russ Housley to add maps for use with opentypes. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# CMS Firmware Wrapper -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc4108.txt -# https://www.rfc-editor.org/errata_search.php?rfc=4108 -# - - -from pyasn1.type import univ, char, namedtype, namedval, tag, constraint, useful - -from pyasn1_modules import rfc5280 -from pyasn1_modules import rfc5652 - -MAX = float('inf') - - -class HardwareSerialEntry(univ.Choice): - pass - -HardwareSerialEntry.componentType = namedtype.NamedTypes( - namedtype.NamedType('all', univ.Null()), - namedtype.NamedType('single', univ.OctetString()), - namedtype.NamedType('block', univ.Sequence(componentType=namedtype.NamedTypes( - namedtype.NamedType('low', univ.OctetString()), - namedtype.NamedType('high', univ.OctetString()) - )) - ) -) - - -class HardwareModules(univ.Sequence): - pass - -HardwareModules.componentType = namedtype.NamedTypes( - namedtype.NamedType('hwType', univ.ObjectIdentifier()), - namedtype.NamedType('hwSerialEntries', univ.SequenceOf(componentType=HardwareSerialEntry())) -) - - -class CommunityIdentifier(univ.Choice): - pass - -CommunityIdentifier.componentType = namedtype.NamedTypes( - namedtype.NamedType('communityOID', univ.ObjectIdentifier()), - namedtype.NamedType('hwModuleList', HardwareModules()) -) - - - -class PreferredPackageIdentifier(univ.Sequence): - pass - -PreferredPackageIdentifier.componentType = namedtype.NamedTypes( - namedtype.NamedType('fwPkgID', univ.ObjectIdentifier()), - namedtype.NamedType('verNum', univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(0, MAX))) -) - - -class PreferredOrLegacyPackageIdentifier(univ.Choice): - pass - -PreferredOrLegacyPackageIdentifier.componentType = namedtype.NamedTypes( - namedtype.NamedType('preferred', PreferredPackageIdentifier()), - namedtype.NamedType('legacy', univ.OctetString()) -) - - -class CurrentFWConfig(univ.Sequence): - pass - -CurrentFWConfig.componentType = namedtype.NamedTypes( - namedtype.OptionalNamedType('fwPkgType', univ.Integer()), - namedtype.NamedType('fwPkgName', PreferredOrLegacyPackageIdentifier()) -) - - -class PreferredOrLegacyStalePackageIdentifier(univ.Choice): - pass - -PreferredOrLegacyStalePackageIdentifier.componentType = namedtype.NamedTypes( - namedtype.NamedType('preferredStaleVerNum', univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(0, MAX))), - namedtype.NamedType('legacyStaleVersion', univ.OctetString()) -) - - -class FirmwarePackageLoadErrorCode(univ.Enumerated): - pass - -FirmwarePackageLoadErrorCode.namedValues = namedval.NamedValues( - ('decodeFailure', 1), - ('badContentInfo', 2), - ('badSignedData', 3), - ('badEncapContent', 4), - ('badCertificate', 5), - ('badSignerInfo', 6), - ('badSignedAttrs', 7), - ('badUnsignedAttrs', 8), - ('missingContent', 9), - ('noTrustAnchor', 10), - ('notAuthorized', 11), - ('badDigestAlgorithm', 12), - ('badSignatureAlgorithm', 13), - ('unsupportedKeySize', 14), - ('signatureFailure', 15), - ('contentTypeMismatch', 16), - ('badEncryptedData', 17), - ('unprotectedAttrsPresent', 18), - ('badEncryptContent', 19), - ('badEncryptAlgorithm', 20), - ('missingCiphertext', 21), - ('noDecryptKey', 22), - ('decryptFailure', 23), - ('badCompressAlgorithm', 24), - ('missingCompressedContent', 25), - ('decompressFailure', 26), - ('wrongHardware', 27), - ('stalePackage', 28), - ('notInCommunity', 29), - ('unsupportedPackageType', 30), - ('missingDependency', 31), - ('wrongDependencyVersion', 32), - ('insufficientMemory', 33), - ('badFirmware', 34), - ('unsupportedParameters', 35), - ('breaksDependency', 36), - ('otherError', 99) -) - - -class VendorLoadErrorCode(univ.Integer): - pass - - -# Wrapped Firmware Key Unsigned Attribute and Object Identifier - -id_aa_wrappedFirmwareKey = univ.ObjectIdentifier('1.2.840.113549.1.9.16.2.39') - -class WrappedFirmwareKey(rfc5652.EnvelopedData): - pass - - -# Firmware Package Information Signed Attribute and Object Identifier - -id_aa_firmwarePackageInfo = univ.ObjectIdentifier('1.2.840.113549.1.9.16.2.42') - -class FirmwarePackageInfo(univ.Sequence): - pass - -FirmwarePackageInfo.componentType = namedtype.NamedTypes( - namedtype.OptionalNamedType('fwPkgType', univ.Integer()), - namedtype.OptionalNamedType('dependencies', univ.SequenceOf(componentType=PreferredOrLegacyPackageIdentifier())) -) - -FirmwarePackageInfo.sizeSpec = univ.Sequence.sizeSpec + constraint.ValueSizeConstraint(1, 2) - - -# Community Identifiers Signed Attribute and Object Identifier - -id_aa_communityIdentifiers = univ.ObjectIdentifier('1.2.840.113549.1.9.16.2.40') - -class CommunityIdentifiers(univ.SequenceOf): - pass - -CommunityIdentifiers.componentType = CommunityIdentifier() - - -# Implemented Compression Algorithms Signed Attribute and Object Identifier - -id_aa_implCompressAlgs = univ.ObjectIdentifier('1.2.840.113549.1.9.16.2.43') - -class ImplementedCompressAlgorithms(univ.SequenceOf): - pass - -ImplementedCompressAlgorithms.componentType = univ.ObjectIdentifier() - - -# Implemented Cryptographic Algorithms Signed Attribute and Object Identifier - -id_aa_implCryptoAlgs = univ.ObjectIdentifier('1.2.840.113549.1.9.16.2.38') - -class ImplementedCryptoAlgorithms(univ.SequenceOf): - pass - -ImplementedCryptoAlgorithms.componentType = univ.ObjectIdentifier() - - -# Decrypt Key Identifier Signed Attribute and Object Identifier - -id_aa_decryptKeyID = univ.ObjectIdentifier('1.2.840.113549.1.9.16.2.37') - -class DecryptKeyIdentifier(univ.OctetString): - pass - - -# Target Hardware Identifier Signed Attribute and Object Identifier - -id_aa_targetHardwareIDs = univ.ObjectIdentifier('1.2.840.113549.1.9.16.2.36') - -class TargetHardwareIdentifiers(univ.SequenceOf): - pass - -TargetHardwareIdentifiers.componentType = univ.ObjectIdentifier() - - -# Firmware Package Identifier Signed Attribute and Object Identifier - -id_aa_firmwarePackageID = univ.ObjectIdentifier('1.2.840.113549.1.9.16.2.35') - -class FirmwarePackageIdentifier(univ.Sequence): - pass - -FirmwarePackageIdentifier.componentType = namedtype.NamedTypes( - namedtype.NamedType('name', PreferredOrLegacyPackageIdentifier()), - namedtype.OptionalNamedType('stale', PreferredOrLegacyStalePackageIdentifier()) -) - - -# Firmware Package Message Digest Signed Attribute and Object Identifier - -id_aa_fwPkgMessageDigest = univ.ObjectIdentifier('1.2.840.113549.1.9.16.2.41') - -class FirmwarePackageMessageDigest(univ.Sequence): - pass - -FirmwarePackageMessageDigest.componentType = namedtype.NamedTypes( - namedtype.NamedType('algorithm', rfc5280.AlgorithmIdentifier()), - namedtype.NamedType('msgDigest', univ.OctetString()) -) - - -# Firmware Package Load Error Report Content Type and Object Identifier - -class FWErrorVersion(univ.Integer): - pass - -FWErrorVersion.namedValues = namedval.NamedValues( - ('v1', 1) -) - - -id_ct_firmwareLoadError = univ.ObjectIdentifier('1.2.840.113549.1.9.16.1.18') - -class FirmwarePackageLoadError(univ.Sequence): - pass - -FirmwarePackageLoadError.componentType = namedtype.NamedTypes( - namedtype.DefaultedNamedType('version', FWErrorVersion().subtype(value='v1')), - namedtype.NamedType('hwType', univ.ObjectIdentifier()), - namedtype.NamedType('hwSerialNum', univ.OctetString()), - namedtype.NamedType('errorCode', FirmwarePackageLoadErrorCode()), - namedtype.OptionalNamedType('vendorErrorCode', VendorLoadErrorCode()), - namedtype.OptionalNamedType('fwPkgName', PreferredOrLegacyPackageIdentifier()), - namedtype.OptionalNamedType('config', univ.SequenceOf(componentType=CurrentFWConfig()).subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))) -) - - -# Firmware Package Load Receipt Content Type and Object Identifier - -class FWReceiptVersion(univ.Integer): - pass - -FWReceiptVersion.namedValues = namedval.NamedValues( - ('v1', 1) -) - - -id_ct_firmwareLoadReceipt = univ.ObjectIdentifier('1.2.840.113549.1.9.16.1.17') - -class FirmwarePackageLoadReceipt(univ.Sequence): - pass - -FirmwarePackageLoadReceipt.componentType = namedtype.NamedTypes( - namedtype.DefaultedNamedType('version', FWReceiptVersion().subtype(value='v1')), - namedtype.NamedType('hwType', univ.ObjectIdentifier()), - namedtype.NamedType('hwSerialNum', univ.OctetString()), - namedtype.NamedType('fwPkgName', PreferredOrLegacyPackageIdentifier()), - namedtype.OptionalNamedType('trustAnchorKeyID', univ.OctetString()), - namedtype.OptionalNamedType('decryptKeyID', univ.OctetString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))) -) - - -# Firmware Package Content Type and Object Identifier - -id_ct_firmwarePackage = univ.ObjectIdentifier('1.2.840.113549.1.9.16.1.16') - -class FirmwarePkgData(univ.OctetString): - pass - - -# Other Name syntax for Hardware Module Name - -id_on_hardwareModuleName = univ.ObjectIdentifier('1.3.6.1.5.5.7.8.4') - -class HardwareModuleName(univ.Sequence): - pass - -HardwareModuleName.componentType = namedtype.NamedTypes( - namedtype.NamedType('hwType', univ.ObjectIdentifier()), - namedtype.NamedType('hwSerialNum', univ.OctetString()) -) - - -# Map of Attribute Type OIDs to Attributes is added to the -# ones that are in rfc5652.py - -_cmsAttributesMapUpdate = { - id_aa_wrappedFirmwareKey: WrappedFirmwareKey(), - id_aa_firmwarePackageInfo: FirmwarePackageInfo(), - id_aa_communityIdentifiers: CommunityIdentifiers(), - id_aa_implCompressAlgs: ImplementedCompressAlgorithms(), - id_aa_implCryptoAlgs: ImplementedCryptoAlgorithms(), - id_aa_decryptKeyID: DecryptKeyIdentifier(), - id_aa_targetHardwareIDs: TargetHardwareIdentifiers(), - id_aa_firmwarePackageID: FirmwarePackageIdentifier(), - id_aa_fwPkgMessageDigest: FirmwarePackageMessageDigest(), -} - -rfc5652.cmsAttributesMap.update(_cmsAttributesMapUpdate) - - -# Map of Content Type OIDs to Content Types is added to the -# ones that are in rfc5652.py - -_cmsContentTypesMapUpdate = { - id_ct_firmwareLoadError: FirmwarePackageLoadError(), - id_ct_firmwareLoadReceipt: FirmwarePackageLoadReceipt(), - id_ct_firmwarePackage: FirmwarePkgData(), -} - -rfc5652.cmsContentTypesMap.update(_cmsContentTypesMapUpdate) - - -# Map of Other Name OIDs to Other Name is added to the -# ones that are in rfc5280.py - -_anotherNameMapUpdate = { - id_on_hardwareModuleName: HardwareModuleName(), -} - -rfc5280.anotherNameMap.update(_anotherNameMapUpdate) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc4334.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc4334.py deleted file mode 100644 index 44cd31b16699..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc4334.py +++ /dev/null @@ -1,75 +0,0 @@ -# -# This file is part of pyasn1-modules software. -# -# Created by Russ Housley with assistance from asn1ate v.0.6.0. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# Certificate Extensions and Attributes Supporting Authentication -# in PPP and Wireless LAN Networks -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc4334.txt -# - -from pyasn1.type import constraint -from pyasn1.type import univ - -from pyasn1_modules import rfc5280 - -MAX = float('inf') - - -# OID Arcs - -id_pe = univ.ObjectIdentifier('1.3.6.1.5.5.7.1') - -id_kp = univ.ObjectIdentifier('1.3.6.1.5.5.7.3') - -id_aca = univ.ObjectIdentifier('1.3.6.1.5.5.7.10') - - -# Extended Key Usage Values - -id_kp_eapOverPPP = id_kp + (13, ) - -id_kp_eapOverLAN = id_kp + (14, ) - - -# Wireless LAN SSID Extension - -id_pe_wlanSSID = id_pe + (13, ) - -class SSID(univ.OctetString): - constraint.ValueSizeConstraint(1, 32) - - -class SSIDList(univ.SequenceOf): - componentType = SSID() - subtypeSpec=constraint.ValueSizeConstraint(1, MAX) - - -# Wireless LAN SSID Attribute Certificate Attribute - -id_aca_wlanSSID = id_aca + (7, ) - - -# Map of Certificate Extension OIDs to Extensions -# To be added to the ones that are in rfc5280.py - -_certificateExtensionsMap = { - id_pe_wlanSSID: SSIDList(), -} - -rfc5280.certificateExtensionsMap.update(_certificateExtensionsMap) - - -# Map of AttributeType OIDs to AttributeValue added to the -# ones that are in rfc5280.py - -_certificateAttributesMapUpdate = { - id_aca_wlanSSID: SSIDList(), -} - -rfc5280.certificateAttributesMap.update(_certificateAttributesMapUpdate) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc4985.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc4985.py deleted file mode 100644 index 318e412380df..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc4985.py +++ /dev/null @@ -1,49 +0,0 @@ -# -# This file is part of pyasn1-modules software. -# -# Created by Russ Housley. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# Expression of Service Names in X.509 Certificates -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc4985.txt -# - -from pyasn1.type import char -from pyasn1.type import constraint -from pyasn1.type import univ - -from pyasn1_modules import rfc5280 - -MAX = float('inf') - - -# As specified in Appendix A.2 of RFC 4985 - -id_pkix = rfc5280.id_pkix - -id_on = id_pkix + (8, ) - -id_on_dnsSRV = id_on + (7, ) - - -class SRVName(char.IA5String): - subtypeSpec = constraint.ValueSizeConstraint(1, MAX) - - -srvName = rfc5280.AnotherName() -srvName['type-id'] = id_on_dnsSRV -srvName['value'] = SRVName() - - -# Map of Other Name OIDs to Other Name is added to the -# ones that are in rfc5280.py - -_anotherNameMapUpdate = { - id_on_dnsSRV: SRVName(), -} - -rfc5280.anotherNameMap.update(_anotherNameMapUpdate) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc5035.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc5035.py deleted file mode 100644 index 1cec98249cb7..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc5035.py +++ /dev/null @@ -1,199 +0,0 @@ -# -# This file is part of pyasn1-modules software. -# -# Created by Russ Housley with assistance from asn1ate v.0.6.0. -# Modified by Russ Housley to add a map for use with opentypes. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# Update to Enhanced Security Services for S/MIME -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc5035.txt -# - -from pyasn1.codec.der.encoder import encode as der_encode - -from pyasn1.type import namedtype -from pyasn1.type import univ - -from pyasn1_modules import rfc2634 -from pyasn1_modules import rfc4055 -from pyasn1_modules import rfc5652 -from pyasn1_modules import rfc5280 - -ContentType = rfc5652.ContentType - -IssuerAndSerialNumber = rfc5652.IssuerAndSerialNumber - -SubjectKeyIdentifier = rfc5652.SubjectKeyIdentifier - -AlgorithmIdentifier = rfc5280.AlgorithmIdentifier - -PolicyInformation = rfc5280.PolicyInformation - -GeneralNames = rfc5280.GeneralNames - -CertificateSerialNumber = rfc5280.CertificateSerialNumber - - -# Signing Certificate Attribute V1 and V2 - -id_aa_signingCertificate = rfc2634.id_aa_signingCertificate - -id_aa_signingCertificateV2 = univ.ObjectIdentifier('1.2.840.113549.1.9.16.2.47') - -Hash = rfc2634.Hash - -IssuerSerial = rfc2634.IssuerSerial - -ESSCertID = rfc2634.ESSCertID - -SigningCertificate = rfc2634.SigningCertificate - - -sha256AlgId = AlgorithmIdentifier() -sha256AlgId['algorithm'] = rfc4055.id_sha256 -# A non-schema object for sha256AlgId['parameters'] as absent -sha256AlgId['parameters'] = der_encode(univ.OctetString('')) - - -class ESSCertIDv2(univ.Sequence): - pass - -ESSCertIDv2.componentType = namedtype.NamedTypes( - namedtype.DefaultedNamedType('hashAlgorithm', sha256AlgId), - namedtype.NamedType('certHash', Hash()), - namedtype.OptionalNamedType('issuerSerial', IssuerSerial()) -) - - -class SigningCertificateV2(univ.Sequence): - pass - -SigningCertificateV2.componentType = namedtype.NamedTypes( - namedtype.NamedType('certs', univ.SequenceOf( - componentType=ESSCertIDv2())), - namedtype.OptionalNamedType('policies', univ.SequenceOf( - componentType=PolicyInformation())) -) - - -# Mail List Expansion History Attribute - -id_aa_mlExpandHistory = rfc2634.id_aa_mlExpandHistory - -ub_ml_expansion_history = rfc2634.ub_ml_expansion_history - -EntityIdentifier = rfc2634.EntityIdentifier - -MLReceiptPolicy = rfc2634.MLReceiptPolicy - -MLData = rfc2634.MLData - -MLExpansionHistory = rfc2634.MLExpansionHistory - - -# ESS Security Label Attribute - -id_aa_securityLabel = rfc2634.id_aa_securityLabel - -ub_privacy_mark_length = rfc2634.ub_privacy_mark_length - -ub_security_categories = rfc2634.ub_security_categories - -ub_integer_options = rfc2634.ub_integer_options - -ESSPrivacyMark = rfc2634.ESSPrivacyMark - -SecurityClassification = rfc2634.SecurityClassification - -SecurityPolicyIdentifier = rfc2634.SecurityPolicyIdentifier - -SecurityCategory = rfc2634.SecurityCategory - -SecurityCategories = rfc2634.SecurityCategories - -ESSSecurityLabel = rfc2634.ESSSecurityLabel - - -# Equivalent Labels Attribute - -id_aa_equivalentLabels = rfc2634.id_aa_equivalentLabels - -EquivalentLabels = rfc2634.EquivalentLabels - - -# Content Identifier Attribute - -id_aa_contentIdentifier = rfc2634.id_aa_contentIdentifier - -ContentIdentifier = rfc2634.ContentIdentifier - - -# Content Reference Attribute - -id_aa_contentReference = rfc2634.id_aa_contentReference - -ContentReference = rfc2634.ContentReference - - -# Message Signature Digest Attribute - -id_aa_msgSigDigest = rfc2634.id_aa_msgSigDigest - -MsgSigDigest = rfc2634.MsgSigDigest - - -# Content Hints Attribute - -id_aa_contentHint = rfc2634.id_aa_contentHint - -ContentHints = rfc2634.ContentHints - - -# Receipt Request Attribute - -AllOrFirstTier = rfc2634.AllOrFirstTier - -ReceiptsFrom = rfc2634.ReceiptsFrom - -id_aa_receiptRequest = rfc2634.id_aa_receiptRequest - -ub_receiptsTo = rfc2634.ub_receiptsTo - -ReceiptRequest = rfc2634.ReceiptRequest - - -# Receipt Content Type - -ESSVersion = rfc2634.ESSVersion - -id_ct_receipt = rfc2634.id_ct_receipt - -Receipt = rfc2634.Receipt - -ub_receiptsTo = rfc2634.ub_receiptsTo - -ReceiptRequest = rfc2634.ReceiptRequest - - -# Map of Attribute Type to the Attribute structure is added to the -# ones that are in rfc5652.py - -_cmsAttributesMapUpdate = { - id_aa_signingCertificateV2: SigningCertificateV2(), -} - -rfc5652.cmsAttributesMap.update(_cmsAttributesMapUpdate) - - -# Map of Content Type OIDs to Content Types is added to the -# ones that are in rfc5652.py - -_cmsContentTypesMapUpdate = { - id_ct_receipt: Receipt(), -} - -rfc5652.cmsContentTypesMap.update(_cmsContentTypesMapUpdate) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc5083.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc5083.py deleted file mode 100644 index 26ef550c4795..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc5083.py +++ /dev/null @@ -1,52 +0,0 @@ -# This file is being contributed to of pyasn1-modules software. -# -# Created by Russ Housley without assistance from the asn1ate tool. -# Modified by Russ Housley to add a map for use with opentypes and -# simplify the code for the object identifier assignment. -# -# Copyright (c) 2018, 2019 Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# Authenticated-Enveloped-Data for the Cryptographic Message Syntax (CMS) -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc5083.txt - -from pyasn1.type import namedtype -from pyasn1.type import tag -from pyasn1.type import univ - -from pyasn1_modules import rfc5652 - -MAX = float('inf') - - -# CMS Authenticated-Enveloped-Data Content Type - -id_ct_authEnvelopedData = univ.ObjectIdentifier('1.2.840.113549.1.9.16.1.23') - -class AuthEnvelopedData(univ.Sequence): - pass - -AuthEnvelopedData.componentType = namedtype.NamedTypes( - namedtype.NamedType('version', rfc5652.CMSVersion()), - namedtype.OptionalNamedType('originatorInfo', rfc5652.OriginatorInfo().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))), - namedtype.NamedType('recipientInfos', rfc5652.RecipientInfos()), - namedtype.NamedType('authEncryptedContentInfo', rfc5652.EncryptedContentInfo()), - namedtype.OptionalNamedType('authAttrs', rfc5652.AuthAttributes().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), - namedtype.NamedType('mac', rfc5652.MessageAuthenticationCode()), - namedtype.OptionalNamedType('unauthAttrs', rfc5652.UnauthAttributes().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))) -) - - -# Map of Content Type OIDs to Content Types is added to the -# ones that are in rfc5652.py - -_cmsContentTypesMapUpdate = { - id_ct_authEnvelopedData: AuthEnvelopedData(), -} - -rfc5652.cmsContentTypesMap.update(_cmsContentTypesMapUpdate) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc5084.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc5084.py deleted file mode 100644 index 76868395619c..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc5084.py +++ /dev/null @@ -1,97 +0,0 @@ -# This file is being contributed to pyasn1-modules software. -# -# Created by Russ Housley with assistance from the asn1ate tool, with manual -# changes to AES_CCM_ICVlen.subtypeSpec and added comments -# -# Copyright (c) 2018-2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# AES-CCM and AES-GCM Algorithms fo use with the Authenticated-Enveloped-Data -# protecting content type for the Cryptographic Message Syntax (CMS) -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc5084.txt - -from pyasn1.type import constraint -from pyasn1.type import namedtype -from pyasn1.type import univ - -from pyasn1_modules import rfc5280 - - -def _OID(*components): - output = [] - for x in tuple(components): - if isinstance(x, univ.ObjectIdentifier): - output.extend(list(x)) - else: - output.append(int(x)) - - return univ.ObjectIdentifier(output) - - -class AES_CCM_ICVlen(univ.Integer): - pass - - -class AES_GCM_ICVlen(univ.Integer): - pass - - -AES_CCM_ICVlen.subtypeSpec = constraint.SingleValueConstraint(4, 6, 8, 10, 12, 14, 16) - -AES_GCM_ICVlen.subtypeSpec = constraint.ValueRangeConstraint(12, 16) - - -class CCMParameters(univ.Sequence): - pass - - -CCMParameters.componentType = namedtype.NamedTypes( - namedtype.NamedType('aes-nonce', univ.OctetString().subtype(subtypeSpec=constraint.ValueSizeConstraint(7, 13))), - # The aes-nonce parameter contains 15-L octets, where L is the size of the length field. L=8 is RECOMMENDED. - # Within the scope of any content-authenticated-encryption key, the nonce value MUST be unique. - namedtype.DefaultedNamedType('aes-ICVlen', AES_CCM_ICVlen().subtype(value=12)) -) - - -class GCMParameters(univ.Sequence): - pass - - -GCMParameters.componentType = namedtype.NamedTypes( - namedtype.NamedType('aes-nonce', univ.OctetString()), - # The aes-nonce may have any number of bits between 8 and 2^64, but it MUST be a multiple of 8 bits. - # Within the scope of any content-authenticated-encryption key, the nonce value MUST be unique. - # A nonce value of 12 octets can be processed more efficiently, so that length is RECOMMENDED. - namedtype.DefaultedNamedType('aes-ICVlen', AES_GCM_ICVlen().subtype(value=12)) -) - -aes = _OID(2, 16, 840, 1, 101, 3, 4, 1) - -id_aes128_CCM = _OID(aes, 7) - -id_aes128_GCM = _OID(aes, 6) - -id_aes192_CCM = _OID(aes, 27) - -id_aes192_GCM = _OID(aes, 26) - -id_aes256_CCM = _OID(aes, 47) - -id_aes256_GCM = _OID(aes, 46) - - -# Map of Algorithm Identifier OIDs to Parameters is added to the -# ones in rfc5280.py - -_algorithmIdentifierMapUpdate = { - id_aes128_CCM: CCMParameters(), - id_aes128_GCM: GCMParameters(), - id_aes192_CCM: CCMParameters(), - id_aes192_GCM: GCMParameters(), - id_aes256_CCM: CCMParameters(), - id_aes256_GCM: GCMParameters(), -} - -rfc5280.algorithmIdentifierMap.update(_algorithmIdentifierMapUpdate) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc5480.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc5480.py deleted file mode 100644 index 84c0c11b880a..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc5480.py +++ /dev/null @@ -1,190 +0,0 @@ -# This file is being contributed to pyasn1-modules software. -# -# Created by Russ Housley with assistance from asn1ate v.0.6.0. -# Modified by Russ Housley to add maps for opentypes. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# Elliptic Curve Cryptography Subject Public Key Information -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc5480.txt - - -# What can be imported from rfc4055.py ? - -from pyasn1.type import namedtype -from pyasn1.type import univ - -from pyasn1_modules import rfc3279 -from pyasn1_modules import rfc5280 - - -# These structures are the same as RFC 3279. - -DHPublicKey = rfc3279.DHPublicKey - -DSAPublicKey = rfc3279.DSAPublicKey - -ValidationParms = rfc3279.ValidationParms - -DomainParameters = rfc3279.DomainParameters - -ECDSA_Sig_Value = rfc3279.ECDSA_Sig_Value - -ECPoint = rfc3279.ECPoint - -KEA_Parms_Id = rfc3279.KEA_Parms_Id - -RSAPublicKey = rfc3279.RSAPublicKey - - -# RFC 5480 changed the names of these structures from RFC 3279. - -DSS_Parms = rfc3279.Dss_Parms - -DSA_Sig_Value = rfc3279.Dss_Sig_Value - - -# RFC 3279 defines a more complex alternative for ECParameters. -# RFC 5480 narrows the definition to a single CHOICE: namedCurve. - -class ECParameters(univ.Choice): - pass - -ECParameters.componentType = namedtype.NamedTypes( - namedtype.NamedType('namedCurve', univ.ObjectIdentifier()) -) - - -# OIDs for Message Digest Algorithms - -id_md2 = univ.ObjectIdentifier('1.2.840.113549.2.2') - -id_md5 = univ.ObjectIdentifier('1.2.840.113549.2.5') - -id_sha1 = univ.ObjectIdentifier('1.3.14.3.2.26') - -id_sha224 = univ.ObjectIdentifier('2.16.840.1.101.3.4.2.4') - -id_sha256 = univ.ObjectIdentifier('2.16.840.1.101.3.4.2.1') - -id_sha384 = univ.ObjectIdentifier('2.16.840.1.101.3.4.2.2') - -id_sha512 = univ.ObjectIdentifier('2.16.840.1.101.3.4.2.3') - - -# OID for RSA PK Algorithm and Key - -rsaEncryption = univ.ObjectIdentifier('1.2.840.113549.1.1.1') - - -# OID for DSA PK Algorithm, Key, and Parameters - -id_dsa = univ.ObjectIdentifier('1.2.840.10040.4.1') - - -# OID for Diffie-Hellman PK Algorithm, Key, and Parameters - -dhpublicnumber = univ.ObjectIdentifier('1.2.840.10046.2.1') - -# OID for KEA PK Algorithm and Parameters - -id_keyExchangeAlgorithm = univ.ObjectIdentifier('2.16.840.1.101.2.1.1.22') - - -# OIDs for Elliptic Curve Algorithm ID, Key, and Parameters -# Note that ECDSA keys always use this OID - -id_ecPublicKey = univ.ObjectIdentifier('1.2.840.10045.2.1') - -id_ecDH = univ.ObjectIdentifier('1.3.132.1.12') - -id_ecMQV = univ.ObjectIdentifier('1.3.132.1.13') - - -# OIDs for RSA Signature Algorithms - -md2WithRSAEncryption = univ.ObjectIdentifier('1.2.840.113549.1.1.2') - -md5WithRSAEncryption = univ.ObjectIdentifier('1.2.840.113549.1.1.4') - -sha1WithRSAEncryption = univ.ObjectIdentifier('1.2.840.113549.1.1.5') - - -# OIDs for DSA Signature Algorithms - -id_dsa_with_sha1 = univ.ObjectIdentifier('1.2.840.10040.4.3') - -id_dsa_with_sha224 = univ.ObjectIdentifier('2.16.840.1.101.3.4.3.1') - -id_dsa_with_sha256 = univ.ObjectIdentifier('2.16.840.1.101.3.4.3.2') - - -# OIDs for ECDSA Signature Algorithms - -ecdsa_with_SHA1 = univ.ObjectIdentifier('1.2.840.10045.4.1') - -ecdsa_with_SHA224 = univ.ObjectIdentifier('1.2.840.10045.4.3.1') - -ecdsa_with_SHA256 = univ.ObjectIdentifier('1.2.840.10045.4.3.2') - -ecdsa_with_SHA384 = univ.ObjectIdentifier('1.2.840.10045.4.3.3') - -ecdsa_with_SHA512 = univ.ObjectIdentifier('1.2.840.10045.4.3.4') - - -# OIDs for Named Elliptic Curves - -secp192r1 = univ.ObjectIdentifier('1.2.840.10045.3.1.1') - -sect163k1 = univ.ObjectIdentifier('1.3.132.0.1') - -sect163r2 = univ.ObjectIdentifier('1.3.132.0.15') - -secp224r1 = univ.ObjectIdentifier('1.3.132.0.33') - -sect233k1 = univ.ObjectIdentifier('1.3.132.0.26') - -sect233r1 = univ.ObjectIdentifier('1.3.132.0.27') - -secp256r1 = univ.ObjectIdentifier('1.2.840.10045.3.1.7') - -sect283k1 = univ.ObjectIdentifier('1.3.132.0.16') - -sect283r1 = univ.ObjectIdentifier('1.3.132.0.17') - -secp384r1 = univ.ObjectIdentifier('1.3.132.0.34') - -sect409k1 = univ.ObjectIdentifier('1.3.132.0.36') - -sect409r1 = univ.ObjectIdentifier('1.3.132.0.37') - -secp521r1 = univ.ObjectIdentifier('1.3.132.0.35') - -sect571k1 = univ.ObjectIdentifier('1.3.132.0.38') - -sect571r1 = univ.ObjectIdentifier('1.3.132.0.39') - - -# Map of Algorithm Identifier OIDs to Parameters -# The algorithm is not included if the parameters MUST be absent - -_algorithmIdentifierMapUpdate = { - rsaEncryption: univ.Null(), - md2WithRSAEncryption: univ.Null(), - md5WithRSAEncryption: univ.Null(), - sha1WithRSAEncryption: univ.Null(), - id_dsa: DSS_Parms(), - dhpublicnumber: DomainParameters(), - id_keyExchangeAlgorithm: KEA_Parms_Id(), - id_ecPublicKey: ECParameters(), - id_ecDH: ECParameters(), - id_ecMQV: ECParameters(), -} - - -# Add these Algorithm Identifier map entries to the ones in rfc5280.py - -rfc5280.algorithmIdentifierMap.update(_algorithmIdentifierMapUpdate) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc5649.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc5649.py deleted file mode 100644 index 84809eeb188d..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc5649.py +++ /dev/null @@ -1,33 +0,0 @@ -# This file is being contributed to pyasn1-modules software. -# -# Created by Russ Housley. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# AES Key Wrap with Padding -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc5649.txt - -from pyasn1.type import univ - -from pyasn1_modules import rfc5280 - - -class AlgorithmIdentifier(rfc5280.AlgorithmIdentifier): - pass - - -id_aes128_wrap = univ.ObjectIdentifier('2.16.840.1.101.3.4.1.5') - -id_aes192_wrap = univ.ObjectIdentifier('2.16.840.1.101.3.4.1.25') - -id_aes256_wrap = univ.ObjectIdentifier('2.16.840.1.101.3.4.1.45') - - -id_aes128_wrap_pad = univ.ObjectIdentifier('2.16.840.1.101.3.4.1.8') - -id_aes192_wrap_pad = univ.ObjectIdentifier('2.16.840.1.101.3.4.1.28') - -id_aes256_wrap_pad = univ.ObjectIdentifier('2.16.840.1.101.3.4.1.48') diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc5751.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc5751.py deleted file mode 100644 index 7e200012c6bd..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc5751.py +++ /dev/null @@ -1,124 +0,0 @@ -# This file is being contributed to pyasn1-modules software. -# -# Created by Russ Housley with assistance from asn1ate v.0.6.0. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# S/MIME Version 3.2 Message Specification -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc5751.txt - -from pyasn1.type import namedtype -from pyasn1.type import opentype -from pyasn1.type import tag -from pyasn1.type import univ - -from pyasn1_modules import rfc5652 -from pyasn1_modules import rfc8018 - - -def _OID(*components): - output = [] - for x in tuple(components): - if isinstance(x, univ.ObjectIdentifier): - output.extend(list(x)) - else: - output.append(int(x)) - return univ.ObjectIdentifier(output) - - -# Imports from RFC 5652 and RFC 8018 - -IssuerAndSerialNumber = rfc5652.IssuerAndSerialNumber - -RecipientKeyIdentifier = rfc5652.RecipientKeyIdentifier - -SubjectKeyIdentifier = rfc5652.SubjectKeyIdentifier - -rc2CBC = rfc8018.rc2CBC - - -# S/MIME Capabilities Attribute - -smimeCapabilities = univ.ObjectIdentifier('1.2.840.113549.1.9.15') - - -smimeCapabilityMap = { } - - -class SMIMECapability(univ.Sequence): - pass - -SMIMECapability.componentType = namedtype.NamedTypes( - namedtype.NamedType('capabilityID', univ.ObjectIdentifier()), - namedtype.OptionalNamedType('parameters', univ.Any(), - openType=opentype.OpenType('capabilityID', smimeCapabilityMap)) -) - - -class SMIMECapabilities(univ.SequenceOf): - pass - -SMIMECapabilities.componentType = SMIMECapability() - - -class SMIMECapabilitiesParametersForRC2CBC(univ.Integer): - # which carries the RC2 Key Length (number of bits) - pass - - -# S/MIME Encryption Key Preference Attribute - -id_smime = univ.ObjectIdentifier('1.2.840.113549.1.9.16') - -id_aa = _OID(id_smime, 2) - -id_aa_encrypKeyPref = _OID(id_aa, 11) - - -class SMIMEEncryptionKeyPreference(univ.Choice): - pass - -SMIMEEncryptionKeyPreference.componentType = namedtype.NamedTypes( - namedtype.NamedType('issuerAndSerialNumber', - IssuerAndSerialNumber().subtype(implicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatSimple, 0))), - namedtype.NamedType('receipentKeyId', - # Yes, 'receipentKeyId' is spelled incorrectly, but kept - # this way for alignment with the ASN.1 module in the RFC. - RecipientKeyIdentifier().subtype(implicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatSimple, 1))), - namedtype.NamedType('subjectAltKeyIdentifier', - SubjectKeyIdentifier().subtype(implicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatSimple, 2))) -) - - -# The Prefer Binary Inside SMIMECapabilities attribute - -id_cap = _OID(id_smime, 11) - -id_cap_preferBinaryInside = _OID(id_cap, 1) - - -# CMS Attribute Map - -_cmsAttributesMapUpdate = { - smimeCapabilities: SMIMECapabilities(), - id_aa_encrypKeyPref: SMIMEEncryptionKeyPreference(), -} - -rfc5652.cmsAttributesMap.update(_cmsAttributesMapUpdate) - - -# SMIMECapabilities Attribute Map -# -# Do not include OIDs in the dictionary when the parameters are absent. - -_smimeCapabilityMapUpdate = { - rc2CBC: SMIMECapabilitiesParametersForRC2CBC(), -} - -smimeCapabilityMap.update(_smimeCapabilityMapUpdate) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc5755.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc5755.py deleted file mode 100644 index 14f56fc60005..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc5755.py +++ /dev/null @@ -1,398 +0,0 @@ -# -# This file is part of pyasn1-modules software. -# -# Created by Russ Housley with assistance from asn1ate v.0.6.0. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# An Internet Attribute Certificate Profile for Authorization -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc5755.txt -# https://www.rfc-editor.org/rfc/rfc5912.txt (see Section 13) -# - -from pyasn1.type import char -from pyasn1.type import constraint -from pyasn1.type import namedtype -from pyasn1.type import namedval -from pyasn1.type import opentype -from pyasn1.type import tag -from pyasn1.type import univ -from pyasn1.type import useful - -from pyasn1_modules import rfc5280 -from pyasn1_modules import rfc5652 - -MAX = float('inf') - -# Map for Security Category type to value - -securityCategoryMap = { } - - -# Imports from RFC 5652 - -ContentInfo = rfc5652.ContentInfo - - -# Imports from RFC 5280 - -AlgorithmIdentifier = rfc5280.AlgorithmIdentifier - -Attribute = rfc5280.Attribute - -AuthorityInfoAccessSyntax = rfc5280.AuthorityInfoAccessSyntax - -AuthorityKeyIdentifier = rfc5280.AuthorityKeyIdentifier - -CertificateSerialNumber = rfc5280.CertificateSerialNumber - -CRLDistributionPoints = rfc5280.CRLDistributionPoints - -Extensions = rfc5280.Extensions - -Extension = rfc5280.Extension - -GeneralNames = rfc5280.GeneralNames - -GeneralName = rfc5280.GeneralName - -UniqueIdentifier = rfc5280.UniqueIdentifier - - -# Object Identifier arcs - -id_pkix = univ.ObjectIdentifier((1, 3, 6, 1, 5, 5, 7, )) - -id_pe = id_pkix + (1, ) - -id_kp = id_pkix + (3, ) - -id_aca = id_pkix + (10, ) - -id_ad = id_pkix + (48, ) - -id_at = univ.ObjectIdentifier((2, 5, 4, )) - -id_ce = univ.ObjectIdentifier((2, 5, 29, )) - - -# Attribute Certificate - -class AttCertVersion(univ.Integer): - namedValues = namedval.NamedValues( - ('v2', 1) - ) - - -class IssuerSerial(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('issuer', GeneralNames()), - namedtype.NamedType('serial', CertificateSerialNumber()), - namedtype.OptionalNamedType('issuerUID', UniqueIdentifier()) - ) - - -class ObjectDigestInfo(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('digestedObjectType', - univ.Enumerated(namedValues=namedval.NamedValues( - ('publicKey', 0), - ('publicKeyCert', 1), - ('otherObjectTypes', 2)))), - namedtype.OptionalNamedType('otherObjectTypeID', - univ.ObjectIdentifier()), - namedtype.NamedType('digestAlgorithm', - AlgorithmIdentifier()), - namedtype.NamedType('objectDigest', - univ.BitString()) - ) - - -class Holder(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.OptionalNamedType('baseCertificateID', - IssuerSerial().subtype(implicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatConstructed, 0))), - namedtype.OptionalNamedType('entityName', - GeneralNames().subtype(implicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatSimple, 1))), - namedtype.OptionalNamedType('objectDigestInfo', - ObjectDigestInfo().subtype(implicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatConstructed, 2))) -) - - -class V2Form(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.OptionalNamedType('issuerName', - GeneralNames()), - namedtype.OptionalNamedType('baseCertificateID', - IssuerSerial().subtype(implicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatConstructed, 0))), - namedtype.OptionalNamedType('objectDigestInfo', - ObjectDigestInfo().subtype(implicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatConstructed, 1))) - ) - - -class AttCertIssuer(univ.Choice): - componentType = namedtype.NamedTypes( - namedtype.NamedType('v1Form', GeneralNames()), - namedtype.NamedType('v2Form', V2Form().subtype(implicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatConstructed, 0))) - ) - - -class AttCertValidityPeriod(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('notBeforeTime', useful.GeneralizedTime()), - namedtype.NamedType('notAfterTime', useful.GeneralizedTime()) - ) - - -class AttributeCertificateInfo(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('version', - AttCertVersion()), - namedtype.NamedType('holder', - Holder()), - namedtype.NamedType('issuer', - AttCertIssuer()), - namedtype.NamedType('signature', - AlgorithmIdentifier()), - namedtype.NamedType('serialNumber', - CertificateSerialNumber()), - namedtype.NamedType('attrCertValidityPeriod', - AttCertValidityPeriod()), - namedtype.NamedType('attributes', - univ.SequenceOf(componentType=Attribute())), - namedtype.OptionalNamedType('issuerUniqueID', - UniqueIdentifier()), - namedtype.OptionalNamedType('extensions', - Extensions()) - ) - - -class AttributeCertificate(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('acinfo', AttributeCertificateInfo()), - namedtype.NamedType('signatureAlgorithm', AlgorithmIdentifier()), - namedtype.NamedType('signatureValue', univ.BitString()) - ) - - -# Attribute Certificate Extensions - -id_pe_ac_auditIdentity = id_pe + (4, ) - -id_ce_noRevAvail = id_ce + (56, ) - -id_ce_targetInformation = id_ce + (55, ) - - -class TargetCert(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('targetCertificate', IssuerSerial()), - namedtype.OptionalNamedType('targetName', GeneralName()), - namedtype.OptionalNamedType('certDigestInfo', ObjectDigestInfo()) - ) - - -class Target(univ.Choice): - componentType = namedtype.NamedTypes( - namedtype.NamedType('targetName', - GeneralName().subtype(implicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatSimple, 0))), - namedtype.NamedType('targetGroup', - GeneralName().subtype(implicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatSimple, 1))), - namedtype.NamedType('targetCert', - TargetCert().subtype(implicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatConstructed, 2))) - ) - - -class Targets(univ.SequenceOf): - componentType = Target() - - -id_pe_ac_proxying = id_pe + (10, ) - - -class ProxyInfo(univ.SequenceOf): - componentType = Targets() - - -id_pe_aaControls = id_pe + (6, ) - - -class AttrSpec(univ.SequenceOf): - componentType = univ.ObjectIdentifier() - - -class AAControls(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.OptionalNamedType('pathLenConstraint', - univ.Integer().subtype( - subtypeSpec=constraint.ValueRangeConstraint(0, MAX))), - namedtype.OptionalNamedType('permittedAttrs', - AttrSpec().subtype(implicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatSimple, 0))), - namedtype.OptionalNamedType('excludedAttrs', - AttrSpec().subtype(implicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatSimple, 1))), - namedtype.DefaultedNamedType('permitUnSpecified', - univ.Boolean().subtype(value=1)) - ) - - -# Attribute Certificate Attributes - -id_aca_authenticationInfo = id_aca + (1, ) - - -id_aca_accessIdentity = id_aca + (2, ) - - -class SvceAuthInfo(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('service', GeneralName()), - namedtype.NamedType('ident', GeneralName()), - namedtype.OptionalNamedType('authInfo', univ.OctetString()) - ) - - -id_aca_chargingIdentity = id_aca + (3, ) - - -id_aca_group = id_aca + (4, ) - - -class IetfAttrSyntax(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.OptionalNamedType('policyAuthority', - GeneralNames().subtype(implicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatSimple, 0))), - namedtype.NamedType('values', univ.SequenceOf( - componentType=univ.Choice(componentType=namedtype.NamedTypes( - namedtype.NamedType('octets', univ.OctetString()), - namedtype.NamedType('oid', univ.ObjectIdentifier()), - namedtype.NamedType('string', char.UTF8String()) - )) - )) - ) - - -id_at_role = id_at + (72,) - - -class RoleSyntax(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.OptionalNamedType('roleAuthority', - GeneralNames().subtype(implicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatSimple, 0))), - namedtype.NamedType('roleName', - GeneralName().subtype(implicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatSimple, 1))) - ) - - -class ClassList(univ.BitString): - namedValues = namedval.NamedValues( - ('unmarked', 0), - ('unclassified', 1), - ('restricted', 2), - ('confidential', 3), - ('secret', 4), - ('topSecret', 5) - ) - - -class SecurityCategory(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('type', - univ.ObjectIdentifier().subtype(implicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatSimple, 0))), - namedtype.NamedType('value', - univ.Any().subtype(implicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatSimple, 1)), - openType=opentype.OpenType('type', securityCategoryMap)) - ) - - -id_at_clearance = univ.ObjectIdentifier((2, 5, 4, 55, )) - - -class Clearance(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('policyId', - univ.ObjectIdentifier()), - namedtype.DefaultedNamedType('classList', - ClassList().subtype(value='unclassified')), - namedtype.OptionalNamedType('securityCategories', - univ.SetOf(componentType=SecurityCategory())) - ) - - -id_at_clearance_rfc3281 = univ.ObjectIdentifier((2, 5, 1, 5, 55, )) - - -class Clearance_rfc3281(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('policyId', - univ.ObjectIdentifier().subtype(implicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatSimple, 0))), - namedtype.DefaultedNamedType('classList', - ClassList().subtype(implicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatSimple, 1)).subtype( - value='unclassified')), - namedtype.OptionalNamedType('securityCategories', - univ.SetOf(componentType=SecurityCategory()).subtype( - implicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatSimple, 2))) - ) - - -id_aca_encAttrs = id_aca + (6, ) - - -class ACClearAttrs(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('acIssuer', GeneralName()), - namedtype.NamedType('acSerial', univ.Integer()), - namedtype.NamedType('attrs', univ.SequenceOf(componentType=Attribute())) - ) - - -# Map of Certificate Extension OIDs to Extensions added to the -# ones that are in rfc5280.py - -_certificateExtensionsMapUpdate = { - id_pe_ac_auditIdentity: univ.OctetString(), - id_ce_noRevAvail: univ.Null(), - id_ce_targetInformation: Targets(), - id_pe_ac_proxying: ProxyInfo(), - id_pe_aaControls: AAControls(), -} - -rfc5280.certificateExtensionsMap.update(_certificateExtensionsMapUpdate) - - -# Map of AttributeType OIDs to AttributeValue added to the -# ones that are in rfc5280.py - -_certificateAttributesMapUpdate = { - id_aca_authenticationInfo: SvceAuthInfo(), - id_aca_accessIdentity: SvceAuthInfo(), - id_aca_chargingIdentity: IetfAttrSyntax(), - id_aca_group: IetfAttrSyntax(), - id_at_role: RoleSyntax(), - id_at_clearance: Clearance(), - id_at_clearance_rfc3281: Clearance_rfc3281(), - id_aca_encAttrs: ContentInfo(), -} - -rfc5280.certificateAttributesMap.update(_certificateAttributesMapUpdate) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc5913.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc5913.py deleted file mode 100644 index 0bd065330d5c..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc5913.py +++ /dev/null @@ -1,44 +0,0 @@ -# -# This file is part of pyasn1-modules software. -# -# Created by Russ Housley with assistance from asn1ate v.0.6.0. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# Authority Clearance Constraints Certificate Extension -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc5913.txt -# https://www.rfc-editor.org/errata/eid5890 -# - -from pyasn1.type import constraint -from pyasn1.type import univ - -from pyasn1_modules import rfc5280 -from pyasn1_modules import rfc5755 - -MAX = float('inf') - - -# Authority Clearance Constraints Certificate Extension - -id_pe_clearanceConstraints = univ.ObjectIdentifier('1.3.6.1.5.5.7.1.21') - -id_pe_authorityClearanceConstraints = id_pe_clearanceConstraints - - -class AuthorityClearanceConstraints(univ.SequenceOf): - componentType = rfc5755.Clearance() - subtypeSpec=constraint.ValueSizeConstraint(1, MAX) - - -# Map of Certificate Extension OIDs to Extensions added to the -# ones that are in rfc5280.py - -_certificateExtensionsMapUpdate = { - id_pe_clearanceConstraints: AuthorityClearanceConstraints(), -} - -rfc5280.certificateExtensionsMap.update(_certificateExtensionsMapUpdate) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc5914.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc5914.py deleted file mode 100644 index d125ea2a65f3..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc5914.py +++ /dev/null @@ -1,119 +0,0 @@ -# This file is being contributed to pyasn1-modules software. -# -# Created by Russ Housley with assistance from asn1ate v.0.6.0. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# Trust Anchor Format -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc5914.txt - -from pyasn1.type import char -from pyasn1.type import constraint -from pyasn1.type import namedtype -from pyasn1.type import namedval -from pyasn1.type import tag -from pyasn1.type import univ - -from pyasn1_modules import rfc5280 - - -MAX = float('inf') - -Certificate = rfc5280.Certificate - -Name = rfc5280.Name - -Extensions = rfc5280.Extensions - -SubjectPublicKeyInfo = rfc5280.SubjectPublicKeyInfo - -TBSCertificate = rfc5280.TBSCertificate - -CertificatePolicies = rfc5280.CertificatePolicies - -KeyIdentifier = rfc5280.KeyIdentifier - -NameConstraints = rfc5280.NameConstraints - - -class CertPolicyFlags(univ.BitString): - pass - -CertPolicyFlags.namedValues = namedval.NamedValues( - ('inhibitPolicyMapping', 0), - ('requireExplicitPolicy', 1), - ('inhibitAnyPolicy', 2) -) - - -class CertPathControls(univ.Sequence): - pass - -CertPathControls.componentType = namedtype.NamedTypes( - namedtype.NamedType('taName', Name()), - namedtype.OptionalNamedType('certificate', Certificate().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), - namedtype.OptionalNamedType('policySet', CertificatePolicies().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), - namedtype.OptionalNamedType('policyFlags', CertPolicyFlags().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))), - namedtype.OptionalNamedType('nameConstr', NameConstraints().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))), - namedtype.OptionalNamedType('pathLenConstraint', univ.Integer().subtype( - subtypeSpec=constraint.ValueRangeConstraint(0, MAX)).subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 4))) -) - - -class TrustAnchorTitle(char.UTF8String): - pass - -TrustAnchorTitle.subtypeSpec = constraint.ValueSizeConstraint(1, 64) - - -class TrustAnchorInfoVersion(univ.Integer): - pass - -TrustAnchorInfoVersion.namedValues = namedval.NamedValues( - ('v1', 1) -) - - -class TrustAnchorInfo(univ.Sequence): - pass - -TrustAnchorInfo.componentType = namedtype.NamedTypes( - namedtype.DefaultedNamedType('version', TrustAnchorInfoVersion().subtype(value='v1')), - namedtype.NamedType('pubKey', SubjectPublicKeyInfo()), - namedtype.NamedType('keyId', KeyIdentifier()), - namedtype.OptionalNamedType('taTitle', TrustAnchorTitle()), - namedtype.OptionalNamedType('certPath', CertPathControls()), - namedtype.OptionalNamedType('exts', Extensions().subtype(explicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatSimple, 1))), - namedtype.OptionalNamedType('taTitleLangTag', char.UTF8String().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))) -) - - -class TrustAnchorChoice(univ.Choice): - pass - -TrustAnchorChoice.componentType = namedtype.NamedTypes( - namedtype.NamedType('certificate', Certificate()), - namedtype.NamedType('tbsCert', TBSCertificate().subtype( - explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), - namedtype.NamedType('taInfo', TrustAnchorInfo().subtype( - explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2))) -) - - -id_ct_trustAnchorList = univ.ObjectIdentifier('1.2.840.113549.1.9.16.1.34') - -class TrustAnchorList(univ.SequenceOf): - pass - -TrustAnchorList.componentType = TrustAnchorChoice() -TrustAnchorList.subtypeSpec=constraint.ValueSizeConstraint(1, MAX) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc5915.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc5915.py deleted file mode 100644 index 82ff4a338bc1..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc5915.py +++ /dev/null @@ -1,32 +0,0 @@ -# This file is being contributed to pyasn1-modules software. -# -# Created by Russ Housley with assistance from asn1ate v.0.6.0. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# Elliptic Curve Private Key -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc5915.txt - -from pyasn1.type import namedtype -from pyasn1.type import namedval -from pyasn1.type import tag -from pyasn1.type import univ - -from pyasn1_modules import rfc5480 - - -class ECPrivateKey(univ.Sequence): - pass - -ECPrivateKey.componentType = namedtype.NamedTypes( - namedtype.NamedType('version', univ.Integer( - namedValues=namedval.NamedValues(('ecPrivkeyVer1', 1)))), - namedtype.NamedType('privateKey', univ.OctetString()), - namedtype.OptionalNamedType('parameters', rfc5480.ECParameters().subtype( - explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), - namedtype.OptionalNamedType('publicKey', univ.BitString().subtype( - explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))) -) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc5916.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc5916.py deleted file mode 100644 index ac23c86b79a8..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc5916.py +++ /dev/null @@ -1,35 +0,0 @@ -# -# This file is part of pyasn1-modules software. -# -# Created by Russ Housley. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# Device Owner Attribute -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc5916.txt -# - -from pyasn1.type import univ - -from pyasn1_modules import rfc5280 - - -# Device Owner Attribute - -id_deviceOwner = univ.ObjectIdentifier((2, 16, 840, 1, 101, 2, 1, 5, 69)) - -at_deviceOwner = rfc5280.Attribute() -at_deviceOwner['type'] = id_deviceOwner -at_deviceOwner['values'][0] = univ.ObjectIdentifier() - - -# Add to the map of Attribute Type OIDs to Attributes in rfc5280.py. - -_certificateAttributesMapUpdate = { - id_deviceOwner: univ.ObjectIdentifier(), -} - -rfc5280.certificateAttributesMap.update(_certificateAttributesMapUpdate) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc5917.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc5917.py deleted file mode 100644 index ed9af987db5e..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc5917.py +++ /dev/null @@ -1,55 +0,0 @@ -# -# This file is part of pyasn1-modules software. -# -# Created by Russ Housley. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# Clearance Sponsor Attribute -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc5917.txt -# https://www.rfc-editor.org/errata/eid4558 -# https://www.rfc-editor.org/errata/eid5883 -# - -from pyasn1.type import char -from pyasn1.type import constraint -from pyasn1.type import namedtype -from pyasn1.type import univ - -from pyasn1_modules import rfc5280 - - -# DirectoryString is the same as RFC 5280, except for two things: -# 1. the length is limited to 64; -# 2. only the 'utf8String' choice remains because the ASN.1 -# specification says: ( WITH COMPONENTS { utf8String PRESENT } ) - -class DirectoryString(univ.Choice): - componentType = namedtype.NamedTypes( - namedtype.NamedType('utf8String', char.UTF8String().subtype( - subtypeSpec=constraint.ValueSizeConstraint(1, 64))), - ) - - -# Clearance Sponsor Attribute - -id_clearanceSponsor = univ.ObjectIdentifier((2, 16, 840, 1, 101, 2, 1, 5, 68)) - -ub_clearance_sponsor = univ.Integer(64) - - -at_clearanceSponsor = rfc5280.Attribute() -at_clearanceSponsor['type'] = id_clearanceSponsor -at_clearanceSponsor['values'][0] = DirectoryString() - - -# Add to the map of Attribute Type OIDs to Attributes in rfc5280.py. - -_certificateAttributesMapUpdate = { - id_clearanceSponsor: DirectoryString(), -} - -rfc5280.certificateAttributesMap.update(_certificateAttributesMapUpdate) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc5924.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc5924.py deleted file mode 100644 index 4358e4f52970..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc5924.py +++ /dev/null @@ -1,19 +0,0 @@ -# -# This file is part of pyasn1-modules software. -# -# Created by Russ Housley. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# Extended Key Usage (EKU) for Session Initiation Protocol (SIP) -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc5924.txt -# - -from pyasn1.type import univ - -id_kp = univ.ObjectIdentifier('1.3.6.1.5.5.7.3') - -id_kp_sipDomain = id_kp + (20, ) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc5934.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc5934.py deleted file mode 100644 index e3ad247aa070..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc5934.py +++ /dev/null @@ -1,786 +0,0 @@ -# This file is being contributed to pyasn1-modules software. -# -# Created by Russ Housley with assistance from asn1ate v.0.6.0. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# Trust Anchor Format -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc5934.txt - -from pyasn1.type import univ, char, namedtype, namedval, tag, constraint, useful - -from pyasn1_modules import rfc2985 -from pyasn1_modules import rfc5280 -from pyasn1_modules import rfc5652 -from pyasn1_modules import rfc5914 - -MAX = float('inf') - - -def _OID(*components): - output = [] - for x in tuple(components): - if isinstance(x, univ.ObjectIdentifier): - output.extend(list(x)) - else: - output.append(int(x)) - return univ.ObjectIdentifier(output) - - -# Imports from RFC 2985 - -SingleAttribute = rfc2985.SingleAttribute - - -# Imports from RFC5914 - -CertPathControls = rfc5914.CertPathControls - -TrustAnchorChoice = rfc5914.TrustAnchorChoice - -TrustAnchorTitle = rfc5914.TrustAnchorTitle - - -# Imports from RFC 5280 - -AlgorithmIdentifier = rfc5280.AlgorithmIdentifier - -AnotherName = rfc5280.AnotherName - -Attribute = rfc5280.Attribute - -Certificate = rfc5280.Certificate - -CertificateSerialNumber = rfc5280.CertificateSerialNumber - -Extension = rfc5280.Extension - -Extensions = rfc5280.Extensions - -KeyIdentifier = rfc5280.KeyIdentifier - -Name = rfc5280.Name - -SubjectPublicKeyInfo = rfc5280.SubjectPublicKeyInfo - -TBSCertificate = rfc5280.TBSCertificate - -Validity = rfc5280.Validity - - -# Object Identifier Arc for TAMP Message Content Types - -id_tamp = univ.ObjectIdentifier('2.16.840.1.101.2.1.2.77') - - -# TAMP Status Query Message - -id_ct_TAMP_statusQuery = _OID(id_tamp, 1) - - -class TAMPVersion(univ.Integer): - pass - -TAMPVersion.namedValues = namedval.NamedValues( - ('v1', 1), - ('v2', 2) -) - - -class TerseOrVerbose(univ.Enumerated): - pass - -TerseOrVerbose.namedValues = namedval.NamedValues( - ('terse', 1), - ('verbose', 2) -) - - -class HardwareSerialEntry(univ.Choice): - pass - -HardwareSerialEntry.componentType = namedtype.NamedTypes( - namedtype.NamedType('all', univ.Null()), - namedtype.NamedType('single', univ.OctetString()), - namedtype.NamedType('block', univ.Sequence(componentType=namedtype.NamedTypes( - namedtype.NamedType('low', univ.OctetString()), - namedtype.NamedType('high', univ.OctetString()) - )) - ) -) - - -class HardwareModules(univ.Sequence): - pass - -HardwareModules.componentType = namedtype.NamedTypes( - namedtype.NamedType('hwType', univ.ObjectIdentifier()), - namedtype.NamedType('hwSerialEntries', univ.SequenceOf( - componentType=HardwareSerialEntry()).subtype( - subtypeSpec=constraint.ValueSizeConstraint(1, MAX))) -) - - -class HardwareModuleIdentifierList(univ.SequenceOf): - pass - -HardwareModuleIdentifierList.componentType = HardwareModules() -HardwareModuleIdentifierList.subtypeSpec=constraint.ValueSizeConstraint(1, MAX) - - -class Community(univ.ObjectIdentifier): - pass - - -class CommunityIdentifierList(univ.SequenceOf): - pass - -CommunityIdentifierList.componentType = Community() -CommunityIdentifierList.subtypeSpec=constraint.ValueSizeConstraint(0, MAX) - - -class TargetIdentifier(univ.Choice): - pass - -TargetIdentifier.componentType = namedtype.NamedTypes( - namedtype.NamedType('hwModules', HardwareModuleIdentifierList().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), - namedtype.NamedType('communities', CommunityIdentifierList().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))), - namedtype.NamedType('allModules', univ.Null().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))), - namedtype.NamedType('uri', char.IA5String().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 4))), - namedtype.NamedType('otherName', AnotherName().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 5))) -) - - -class SeqNumber(univ.Integer): - pass - -SeqNumber.subtypeSpec = constraint.ValueRangeConstraint(0, 9223372036854775807) - - -class TAMPMsgRef(univ.Sequence): - pass - -TAMPMsgRef.componentType = namedtype.NamedTypes( - namedtype.NamedType('target', TargetIdentifier()), - namedtype.NamedType('seqNum', SeqNumber()) -) - - -class TAMPStatusQuery(univ.Sequence): - pass - -TAMPStatusQuery.componentType = namedtype.NamedTypes( - namedtype.DefaultedNamedType('version', TAMPVersion().subtype( - implicitTag=tag.Tag(tag.tagClassContext, - tag.tagFormatSimple, 0)).subtype(value='v2')), - namedtype.DefaultedNamedType('terse', TerseOrVerbose().subtype( - implicitTag=tag.Tag(tag.tagClassContext, - tag.tagFormatSimple, 1)).subtype(value='verbose')), - namedtype.NamedType('query', TAMPMsgRef()) -) - - -tamp_status_query = rfc5652.ContentInfo() -tamp_status_query['contentType'] = id_ct_TAMP_statusQuery -tamp_status_query['content'] = TAMPStatusQuery() - - -# TAMP Status Response Message - -id_ct_TAMP_statusResponse = _OID(id_tamp, 2) - - -class KeyIdentifiers(univ.SequenceOf): - pass - -KeyIdentifiers.componentType = KeyIdentifier() -KeyIdentifiers.subtypeSpec=constraint.ValueSizeConstraint(1, MAX) - - -class TrustAnchorChoiceList(univ.SequenceOf): - pass - -TrustAnchorChoiceList.componentType = TrustAnchorChoice() -TrustAnchorChoiceList.subtypeSpec=constraint.ValueSizeConstraint(1, MAX) - - -class TAMPSequenceNumber(univ.Sequence): - pass - -TAMPSequenceNumber.componentType = namedtype.NamedTypes( - namedtype.NamedType('keyId', KeyIdentifier()), - namedtype.NamedType('seqNumber', SeqNumber()) -) - - -class TAMPSequenceNumbers(univ.SequenceOf): - pass - -TAMPSequenceNumbers.componentType = TAMPSequenceNumber() -TAMPSequenceNumbers.subtypeSpec=constraint.ValueSizeConstraint(1, MAX) - - -class TerseStatusResponse(univ.Sequence): - pass - -TerseStatusResponse.componentType = namedtype.NamedTypes( - namedtype.NamedType('taKeyIds', KeyIdentifiers()), - namedtype.OptionalNamedType('communities', CommunityIdentifierList()) -) - - -class VerboseStatusResponse(univ.Sequence): - pass - -VerboseStatusResponse.componentType = namedtype.NamedTypes( - namedtype.NamedType('taInfo', TrustAnchorChoiceList()), - namedtype.OptionalNamedType('continPubKeyDecryptAlg', - AlgorithmIdentifier().subtype(implicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatSimple, 0))), - namedtype.OptionalNamedType('communities', - CommunityIdentifierList().subtype(implicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatSimple, 1))), - namedtype.OptionalNamedType('tampSeqNumbers', - TAMPSequenceNumbers().subtype(implicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatSimple, 2))) -) - - -class StatusResponse(univ.Choice): - pass - -StatusResponse.componentType = namedtype.NamedTypes( - namedtype.NamedType('terseResponse', TerseStatusResponse().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))), - namedtype.NamedType('verboseResponse', VerboseStatusResponse().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))) -) - - -class TAMPStatusResponse(univ.Sequence): - pass - -TAMPStatusResponse.componentType = namedtype.NamedTypes( - namedtype.DefaultedNamedType('version', TAMPVersion().subtype( - implicitTag=tag.Tag(tag.tagClassContext, - tag.tagFormatSimple, 0)).subtype(value='v2')), - namedtype.NamedType('query', TAMPMsgRef()), - namedtype.NamedType('response', StatusResponse()), - namedtype.DefaultedNamedType('usesApex', univ.Boolean().subtype(value=1)) -) - - -tamp_status_response = rfc5652.ContentInfo() -tamp_status_response['contentType'] = id_ct_TAMP_statusResponse -tamp_status_response['content'] = TAMPStatusResponse() - - -# Trust Anchor Update Message - -id_ct_TAMP_update = _OID(id_tamp, 3) - - -class TBSCertificateChangeInfo(univ.Sequence): - pass - -TBSCertificateChangeInfo.componentType = namedtype.NamedTypes( - namedtype.OptionalNamedType('serialNumber', CertificateSerialNumber()), - namedtype.OptionalNamedType('signature', AlgorithmIdentifier().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), - namedtype.OptionalNamedType('issuer', Name().subtype(implicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatSimple, 1))), - namedtype.OptionalNamedType('validity', Validity().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))), - namedtype.OptionalNamedType('subject', Name().subtype(implicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatSimple, 3))), - namedtype.NamedType('subjectPublicKeyInfo', SubjectPublicKeyInfo().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 4))), - namedtype.OptionalNamedType('exts', Extensions().subtype(explicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatSimple, 5))) -) - - -class TrustAnchorChangeInfo(univ.Sequence): - pass - -TrustAnchorChangeInfo.componentType = namedtype.NamedTypes( - namedtype.NamedType('pubKey', SubjectPublicKeyInfo()), - namedtype.OptionalNamedType('keyId', KeyIdentifier()), - namedtype.OptionalNamedType('taTitle', TrustAnchorTitle()), - namedtype.OptionalNamedType('certPath', CertPathControls()), - namedtype.OptionalNamedType('exts', Extensions().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))) -) - - -class TrustAnchorChangeInfoChoice(univ.Choice): - pass - -TrustAnchorChangeInfoChoice.componentType = namedtype.NamedTypes( - namedtype.NamedType('tbsCertChange', TBSCertificateChangeInfo().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))), - namedtype.NamedType('taChange', TrustAnchorChangeInfo().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))) -) - - -class TrustAnchorUpdate(univ.Choice): - pass - -TrustAnchorUpdate.componentType = namedtype.NamedTypes( - namedtype.NamedType('add', TrustAnchorChoice().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), - namedtype.NamedType('remove', SubjectPublicKeyInfo().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))), - namedtype.NamedType('change', TrustAnchorChangeInfoChoice().subtype( - explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 3))) -) - - -class TAMPUpdate(univ.Sequence): - pass - -TAMPUpdate.componentType = namedtype.NamedTypes( - namedtype.DefaultedNamedType('version', - TAMPVersion().subtype(implicitTag=tag.Tag(tag.tagClassContext, - tag.tagFormatSimple, 0)).subtype(value='v2')), - namedtype.DefaultedNamedType('terse', - TerseOrVerbose().subtype(implicitTag=tag.Tag(tag.tagClassContext, - tag.tagFormatSimple, 1)).subtype(value='verbose')), - namedtype.NamedType('msgRef', TAMPMsgRef()), - namedtype.NamedType('updates', - univ.SequenceOf(componentType=TrustAnchorUpdate()).subtype( - subtypeSpec=constraint.ValueSizeConstraint(1, MAX))), - namedtype.OptionalNamedType('tampSeqNumbers', - TAMPSequenceNumbers().subtype(implicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatSimple, 2))) -) - - -tamp_update = rfc5652.ContentInfo() -tamp_update['contentType'] = id_ct_TAMP_update -tamp_update['content'] = TAMPUpdate() - - -# Trust Anchor Update Confirm Message - -id_ct_TAMP_updateConfirm = _OID(id_tamp, 4) - - -class StatusCode(univ.Enumerated): - pass - -StatusCode.namedValues = namedval.NamedValues( - ('success', 0), - ('decodeFailure', 1), - ('badContentInfo', 2), - ('badSignedData', 3), - ('badEncapContent', 4), - ('badCertificate', 5), - ('badSignerInfo', 6), - ('badSignedAttrs', 7), - ('badUnsignedAttrs', 8), - ('missingContent', 9), - ('noTrustAnchor', 10), - ('notAuthorized', 11), - ('badDigestAlgorithm', 12), - ('badSignatureAlgorithm', 13), - ('unsupportedKeySize', 14), - ('unsupportedParameters', 15), - ('signatureFailure', 16), - ('insufficientMemory', 17), - ('unsupportedTAMPMsgType', 18), - ('apexTAMPAnchor', 19), - ('improperTAAddition', 20), - ('seqNumFailure', 21), - ('contingencyPublicKeyDecrypt', 22), - ('incorrectTarget', 23), - ('communityUpdateFailed', 24), - ('trustAnchorNotFound', 25), - ('unsupportedTAAlgorithm', 26), - ('unsupportedTAKeySize', 27), - ('unsupportedContinPubKeyDecryptAlg', 28), - ('missingSignature', 29), - ('resourcesBusy', 30), - ('versionNumberMismatch', 31), - ('missingPolicySet', 32), - ('revokedCertificate', 33), - ('unsupportedTrustAnchorFormat', 34), - ('improperTAChange', 35), - ('malformed', 36), - ('cmsError', 37), - ('unsupportedTargetIdentifier', 38), - ('other', 127) -) - - -class StatusCodeList(univ.SequenceOf): - pass - -StatusCodeList.componentType = StatusCode() -StatusCodeList.subtypeSpec=constraint.ValueSizeConstraint(1, MAX) - - -class TerseUpdateConfirm(StatusCodeList): - pass - - -class VerboseUpdateConfirm(univ.Sequence): - pass - -VerboseUpdateConfirm.componentType = namedtype.NamedTypes( - namedtype.NamedType('status', StatusCodeList()), - namedtype.NamedType('taInfo', TrustAnchorChoiceList()), - namedtype.OptionalNamedType('tampSeqNumbers', TAMPSequenceNumbers()), - namedtype.DefaultedNamedType('usesApex', univ.Boolean().subtype(value=1)) -) - - -class UpdateConfirm(univ.Choice): - pass - -UpdateConfirm.componentType = namedtype.NamedTypes( - namedtype.NamedType('terseConfirm', TerseUpdateConfirm().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), - namedtype.NamedType('verboseConfirm', VerboseUpdateConfirm().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))) -) - - -class TAMPUpdateConfirm(univ.Sequence): - pass - -TAMPUpdateConfirm.componentType = namedtype.NamedTypes( - namedtype.DefaultedNamedType('version', TAMPVersion().subtype( - implicitTag=tag.Tag(tag.tagClassContext, - tag.tagFormatSimple, 0)).subtype(value='v2')), - namedtype.NamedType('update', TAMPMsgRef()), - namedtype.NamedType('confirm', UpdateConfirm()) -) - - -tamp_update_confirm = rfc5652.ContentInfo() -tamp_update_confirm['contentType'] = id_ct_TAMP_updateConfirm -tamp_update_confirm['content'] = TAMPUpdateConfirm() - - -# Apex Trust Anchor Update Message - -id_ct_TAMP_apexUpdate = _OID(id_tamp, 5) - - -class TAMPApexUpdate(univ.Sequence): - pass - -TAMPApexUpdate.componentType = namedtype.NamedTypes( - namedtype.DefaultedNamedType('version', - TAMPVersion().subtype(implicitTag=tag.Tag(tag.tagClassContext, - tag.tagFormatSimple, 0)).subtype(value='v2')), - namedtype.DefaultedNamedType('terse', - TerseOrVerbose().subtype(implicitTag=tag.Tag(tag.tagClassContext, - tag.tagFormatSimple, 1)).subtype(value='verbose')), - namedtype.NamedType('msgRef', TAMPMsgRef()), - namedtype.NamedType('clearTrustAnchors', univ.Boolean()), - namedtype.NamedType('clearCommunities', univ.Boolean()), - namedtype.OptionalNamedType('seqNumber', SeqNumber()), - namedtype.NamedType('apexTA', TrustAnchorChoice()) -) - - -tamp_apex_update = rfc5652.ContentInfo() -tamp_apex_update['contentType'] = id_ct_TAMP_apexUpdate -tamp_apex_update['content'] = TAMPApexUpdate() - - -# Apex Trust Anchor Update Confirm Message - -id_ct_TAMP_apexUpdateConfirm = _OID(id_tamp, 6) - - -class TerseApexUpdateConfirm(StatusCode): - pass - - -class VerboseApexUpdateConfirm(univ.Sequence): - pass - -VerboseApexUpdateConfirm.componentType = namedtype.NamedTypes( - namedtype.NamedType('status', StatusCode()), - namedtype.NamedType('taInfo', TrustAnchorChoiceList()), - namedtype.OptionalNamedType('communities', - CommunityIdentifierList().subtype(implicitTag=tag.Tag(tag.tagClassContext, - tag.tagFormatSimple, 0))), - namedtype.OptionalNamedType('tampSeqNumbers', - TAMPSequenceNumbers().subtype(implicitTag=tag.Tag(tag.tagClassContext, - tag.tagFormatSimple, 1))) -) - - -class ApexUpdateConfirm(univ.Choice): - pass - -ApexUpdateConfirm.componentType = namedtype.NamedTypes( - namedtype.NamedType('terseApexConfirm', - TerseApexUpdateConfirm().subtype(implicitTag=tag.Tag(tag.tagClassContext, - tag.tagFormatSimple, 0))), - namedtype.NamedType('verboseApexConfirm', - VerboseApexUpdateConfirm().subtype(implicitTag=tag.Tag(tag.tagClassContext, - tag.tagFormatConstructed, 1))) -) - - -class TAMPApexUpdateConfirm(univ.Sequence): - pass - -TAMPApexUpdateConfirm.componentType = namedtype.NamedTypes( - namedtype.DefaultedNamedType('version', - TAMPVersion().subtype(implicitTag=tag.Tag(tag.tagClassContext, - tag.tagFormatSimple, 0)).subtype(value='v2')), - namedtype.NamedType('apexReplace', TAMPMsgRef()), - namedtype.NamedType('apexConfirm', ApexUpdateConfirm()) -) - - -tamp_apex_update_confirm = rfc5652.ContentInfo() -tamp_apex_update_confirm['contentType'] = id_ct_TAMP_apexUpdateConfirm -tamp_apex_update_confirm['content'] = TAMPApexUpdateConfirm() - - -# Community Update Message - -id_ct_TAMP_communityUpdate = _OID(id_tamp, 7) - - -class CommunityUpdates(univ.Sequence): - pass - -CommunityUpdates.componentType = namedtype.NamedTypes( - namedtype.OptionalNamedType('remove', - CommunityIdentifierList().subtype(implicitTag=tag.Tag(tag.tagClassContext, - tag.tagFormatSimple, 1))), - namedtype.OptionalNamedType('add', - CommunityIdentifierList().subtype(implicitTag=tag.Tag(tag.tagClassContext, - tag.tagFormatSimple, 2))) -) - - -class TAMPCommunityUpdate(univ.Sequence): - pass - -TAMPCommunityUpdate.componentType = namedtype.NamedTypes( - namedtype.DefaultedNamedType('version', - TAMPVersion().subtype(implicitTag=tag.Tag(tag.tagClassContext, - tag.tagFormatSimple, 0)).subtype(value='v2')), - namedtype.DefaultedNamedType('terse', - TerseOrVerbose().subtype(implicitTag=tag.Tag(tag.tagClassContext, - tag.tagFormatSimple, 1)).subtype(value='verbose')), - namedtype.NamedType('msgRef', TAMPMsgRef()), - namedtype.NamedType('updates', CommunityUpdates()) -) - - -tamp_community_update = rfc5652.ContentInfo() -tamp_community_update['contentType'] = id_ct_TAMP_communityUpdate -tamp_community_update['content'] = TAMPCommunityUpdate() - - -# Community Update Confirm Message - -id_ct_TAMP_communityUpdateConfirm = _OID(id_tamp, 8) - - -class TerseCommunityConfirm(StatusCode): - pass - - -class VerboseCommunityConfirm(univ.Sequence): - pass - -VerboseCommunityConfirm.componentType = namedtype.NamedTypes( - namedtype.NamedType('status', StatusCode()), - namedtype.OptionalNamedType('communities', CommunityIdentifierList()) -) - - -class CommunityConfirm(univ.Choice): - pass - -CommunityConfirm.componentType = namedtype.NamedTypes( - namedtype.NamedType('terseCommConfirm', - TerseCommunityConfirm().subtype(implicitTag=tag.Tag(tag.tagClassContext, - tag.tagFormatSimple, 0))), - namedtype.NamedType('verboseCommConfirm', - VerboseCommunityConfirm().subtype(implicitTag=tag.Tag(tag.tagClassContext, - tag.tagFormatConstructed, 1))) -) - - -class TAMPCommunityUpdateConfirm(univ.Sequence): - pass - -TAMPCommunityUpdateConfirm.componentType = namedtype.NamedTypes( - namedtype.DefaultedNamedType('version', - TAMPVersion().subtype(implicitTag=tag.Tag(tag.tagClassContext, - tag.tagFormatSimple, 0)).subtype(value='v2')), - namedtype.NamedType('update', TAMPMsgRef()), - namedtype.NamedType('commConfirm', CommunityConfirm()) -) - - -tamp_community_update_confirm = rfc5652.ContentInfo() -tamp_community_update_confirm['contentType'] = id_ct_TAMP_communityUpdateConfirm -tamp_community_update_confirm['content'] = TAMPCommunityUpdateConfirm() - - -# Sequence Number Adjust Message - -id_ct_TAMP_seqNumAdjust = _OID(id_tamp, 10) - - - -class SequenceNumberAdjust(univ.Sequence): - pass - -SequenceNumberAdjust.componentType = namedtype.NamedTypes( - namedtype.DefaultedNamedType('version', - TAMPVersion().subtype(implicitTag=tag.Tag(tag.tagClassContext, - tag.tagFormatSimple, 0)).subtype(value='v2')), - namedtype.NamedType('msgRef', TAMPMsgRef()) -) - - -tamp_sequence_number_adjust = rfc5652.ContentInfo() -tamp_sequence_number_adjust['contentType'] = id_ct_TAMP_seqNumAdjust -tamp_sequence_number_adjust['content'] = SequenceNumberAdjust() - - -# Sequence Number Adjust Confirm Message - -id_ct_TAMP_seqNumAdjustConfirm = _OID(id_tamp, 11) - - -class SequenceNumberAdjustConfirm(univ.Sequence): - pass - -SequenceNumberAdjustConfirm.componentType = namedtype.NamedTypes( - namedtype.DefaultedNamedType('version', - TAMPVersion().subtype(implicitTag=tag.Tag(tag.tagClassContext, - tag.tagFormatSimple, 0)).subtype(value='v2')), - namedtype.NamedType('adjust', TAMPMsgRef()), - namedtype.NamedType('status', StatusCode()) -) - - -tamp_sequence_number_adjust_confirm = rfc5652.ContentInfo() -tamp_sequence_number_adjust_confirm['contentType'] = id_ct_TAMP_seqNumAdjustConfirm -tamp_sequence_number_adjust_confirm['content'] = SequenceNumberAdjustConfirm() - - -# TAMP Error Message - -id_ct_TAMP_error = _OID(id_tamp, 9) - - -class TAMPError(univ.Sequence): - pass - -TAMPError.componentType = namedtype.NamedTypes( - namedtype.DefaultedNamedType('version', - TAMPVersion().subtype(implicitTag=tag.Tag(tag.tagClassContext, - tag.tagFormatSimple, 0)).subtype(value='v2')), - namedtype.NamedType('msgType', univ.ObjectIdentifier()), - namedtype.NamedType('status', StatusCode()), - namedtype.OptionalNamedType('msgRef', TAMPMsgRef()) -) - - -tamp_error = rfc5652.ContentInfo() -tamp_error['contentType'] = id_ct_TAMP_error -tamp_error['content'] = TAMPError() - - -# Object Identifier Arc for Attributes - -id_attributes = univ.ObjectIdentifier('2.16.840.1.101.2.1.5') - - -# contingency-public-key-decrypt-key unsigned attribute - -id_aa_TAMP_contingencyPublicKeyDecryptKey = _OID(id_attributes, 63) - - -class PlaintextSymmetricKey(univ.OctetString): - pass - - -contingency_public_key_decrypt_key = Attribute() -contingency_public_key_decrypt_key['type'] = id_aa_TAMP_contingencyPublicKeyDecryptKey -contingency_public_key_decrypt_key['values'][0] = PlaintextSymmetricKey() - - -# id-pe-wrappedApexContinKey extension - -id_pe_wrappedApexContinKey =univ.ObjectIdentifier('1.3.6.1.5.5.7.1.20') - - -class ApexContingencyKey(univ.Sequence): - pass - -ApexContingencyKey.componentType = namedtype.NamedTypes( - namedtype.NamedType('wrapAlgorithm', AlgorithmIdentifier()), - namedtype.NamedType('wrappedContinPubKey', univ.OctetString()) -) - - -wrappedApexContinKey = Extension() -wrappedApexContinKey['extnID'] = id_pe_wrappedApexContinKey -wrappedApexContinKey['critical'] = 0 -wrappedApexContinKey['extnValue'] = univ.OctetString() - - -# Add to the map of CMS Content Type OIDs to Content Types in -# rfc5652.py - -_cmsContentTypesMapUpdate = { - id_ct_TAMP_statusQuery: TAMPStatusQuery(), - id_ct_TAMP_statusResponse: TAMPStatusResponse(), - id_ct_TAMP_update: TAMPUpdate(), - id_ct_TAMP_updateConfirm: TAMPUpdateConfirm(), - id_ct_TAMP_apexUpdate: TAMPApexUpdate(), - id_ct_TAMP_apexUpdateConfirm: TAMPApexUpdateConfirm(), - id_ct_TAMP_communityUpdate: TAMPCommunityUpdate(), - id_ct_TAMP_communityUpdateConfirm: TAMPCommunityUpdateConfirm(), - id_ct_TAMP_seqNumAdjust: SequenceNumberAdjust(), - id_ct_TAMP_seqNumAdjustConfirm: SequenceNumberAdjustConfirm(), - id_ct_TAMP_error: TAMPError(), -} - -rfc5652.cmsContentTypesMap.update(_cmsContentTypesMapUpdate) - - -# Add to the map of CMS Attribute OIDs to Attribute Values in -# rfc5652.py - -_cmsAttributesMapUpdate = { - id_aa_TAMP_contingencyPublicKeyDecryptKey: PlaintextSymmetricKey(), -} - -rfc5652.cmsAttributesMap.update(_cmsAttributesMapUpdate) - - -# Add to the map of Certificate Extension OIDs to Extensions in -# rfc5280.py - -_certificateExtensionsMap = { - id_pe_wrappedApexContinKey: ApexContingencyKey(), -} - -rfc5280.certificateExtensionsMap.update(_certificateExtensionsMap) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc5940.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc5940.py deleted file mode 100644 index e105923358b7..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc5940.py +++ /dev/null @@ -1,59 +0,0 @@ -# -# This file is part of pyasn1-modules software. -# -# Created by Russ Housley with assistance from asn1ate v.0.6.0. -# Modified by Russ Housley to add map for use with opentypes. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# Additional CMS Revocation Information Choices -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc5940.txt -# - -from pyasn1.type import namedtype -from pyasn1.type import tag -from pyasn1.type import univ - -from pyasn1_modules import rfc2560 -from pyasn1_modules import rfc5652 - - -# RevocationInfoChoice for OCSP response: -# The OID is included in otherRevInfoFormat, and -# signed OCSPResponse is included in otherRevInfo - -id_ri_ocsp_response = univ.ObjectIdentifier('1.3.6.1.5.5.7.16.2') - -OCSPResponse = rfc2560.OCSPResponse - - -# RevocationInfoChoice for SCVP request/response: -# The OID is included in otherRevInfoFormat, and -# SCVPReqRes is included in otherRevInfo - -id_ri_scvp = univ.ObjectIdentifier('1.3.6.1.5.5.7.16.4') - -ContentInfo = rfc5652.ContentInfo - -class SCVPReqRes(univ.Sequence): - pass - -SCVPReqRes.componentType = namedtype.NamedTypes( - namedtype.OptionalNamedType('request', - ContentInfo().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), - namedtype.NamedType('response', ContentInfo()) -) - - -# Map of Revocation Info Format OIDs to Revocation Info Format -# is added to the ones that are in rfc5652.py - -_otherRevInfoFormatMapUpdate = { - id_ri_ocsp_response: OCSPResponse(), - id_ri_scvp: SCVPReqRes(), -} - -rfc5652.otherRevInfoFormatMap.update(_otherRevInfoFormatMapUpdate) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc5958.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc5958.py deleted file mode 100644 index 1aaa9286aded..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc5958.py +++ /dev/null @@ -1,98 +0,0 @@ -# -# This file is being contributed to pyasn1-modules software. -# -# Created by Russ Housley. -# Modified by Russ Housley to add a map for use with opentypes. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# Asymmetric Key Packages, which is essentially version 2 of -# the PrivateKeyInfo structure in PKCS#8 in RFC 5208 -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc5958.txt - -from pyasn1.type import univ, constraint, namedtype, namedval, tag - -from pyasn1_modules import rfc5280 -from pyasn1_modules import rfc5652 - - -MAX = float('inf') - - -class KeyEncryptionAlgorithmIdentifier(rfc5280.AlgorithmIdentifier): - pass - - -class PrivateKeyAlgorithmIdentifier(rfc5280.AlgorithmIdentifier): - pass - - -class EncryptedData(univ.OctetString): - pass - - -class EncryptedPrivateKeyInfo(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('encryptionAlgorithm', KeyEncryptionAlgorithmIdentifier()), - namedtype.NamedType('encryptedData', EncryptedData()) - ) - - -class Version(univ.Integer): - namedValues = namedval.NamedValues(('v1', 0), ('v2', 1)) - - -class PrivateKey(univ.OctetString): - pass - - -class Attributes(univ.SetOf): - componentType = rfc5652.Attribute() - - -class PublicKey(univ.BitString): - pass - - -# OneAsymmetricKey is essentially version 2 of PrivateKeyInfo. -# If publicKey is present, then the version must be v2; -# otherwise, the version should be v1. - -class OneAsymmetricKey(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('version', Version()), - namedtype.NamedType('privateKeyAlgorithm', PrivateKeyAlgorithmIdentifier()), - namedtype.NamedType('privateKey', PrivateKey()), - namedtype.OptionalNamedType('attributes', Attributes().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))), - namedtype.OptionalNamedType('publicKey', PublicKey().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))) - ) - - -class PrivateKeyInfo(OneAsymmetricKey): - pass - - -# The CMS AsymmetricKeyPackage Content Type - -id_ct_KP_aKeyPackage = univ.ObjectIdentifier('2.16.840.1.101.2.1.2.78.5') - -class AsymmetricKeyPackage(univ.SequenceOf): - pass - -AsymmetricKeyPackage.componentType = OneAsymmetricKey() -AsymmetricKeyPackage.sizeSpec=constraint.ValueSizeConstraint(1, MAX) - - -# Map of Content Type OIDs to Content Types is added to the -# ones that are in rfc5652.py - -_cmsContentTypesMapUpdate = { - id_ct_KP_aKeyPackage: AsymmetricKeyPackage(), -} - -rfc5652.cmsContentTypesMap.update(_cmsContentTypesMapUpdate) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc5990.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc5990.py deleted file mode 100644 index 281316fb81a0..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc5990.py +++ /dev/null @@ -1,237 +0,0 @@ -# -# This file is part of pyasn1-modules software. -# -# Created by Russ Housley with assistance from asn1ate v.0.6.0. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# Use of the RSA-KEM Key Transport Algorithm in the CMS -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc5990.txt -# - -from pyasn1.type import constraint -from pyasn1.type import namedtype -from pyasn1.type import univ - -from pyasn1_modules import rfc5280 - -MAX = float('inf') - -def _OID(*components): - output = [] - for x in tuple(components): - if isinstance(x, univ.ObjectIdentifier): - output.extend(list(x)) - else: - output.append(int(x)) - return univ.ObjectIdentifier(output) - - -# Imports from RFC 5280 - -AlgorithmIdentifier = rfc5280.AlgorithmIdentifier - - -# Useful types and definitions - -class NullParms(univ.Null): - pass - - -# Object identifier arcs - -is18033_2 = _OID(1, 0, 18033, 2) - -nistAlgorithm = _OID(2, 16, 840, 1, 101, 3, 4) - -pkcs_1 = _OID(1, 2, 840, 113549, 1, 1) - -x9_44 = _OID(1, 3, 133, 16, 840, 9, 44) - -x9_44_components = _OID(x9_44, 1) - - -# Types for algorithm identifiers - -class Camellia_KeyWrappingScheme(AlgorithmIdentifier): - pass - -class DataEncapsulationMechanism(AlgorithmIdentifier): - pass - -class KDF2_HashFunction(AlgorithmIdentifier): - pass - -class KDF3_HashFunction(AlgorithmIdentifier): - pass - -class KeyDerivationFunction(AlgorithmIdentifier): - pass - -class KeyEncapsulationMechanism(AlgorithmIdentifier): - pass - -class X9_SymmetricKeyWrappingScheme(AlgorithmIdentifier): - pass - - -# RSA-KEM Key Transport Algorithm - -id_rsa_kem = _OID(1, 2, 840, 113549, 1, 9, 16, 3, 14) - - -class GenericHybridParameters(univ.Sequence): - pass - -GenericHybridParameters.componentType = namedtype.NamedTypes( - namedtype.NamedType('kem', KeyEncapsulationMechanism()), - namedtype.NamedType('dem', DataEncapsulationMechanism()) -) - - -rsa_kem = AlgorithmIdentifier() -rsa_kem['algorithm'] = id_rsa_kem -rsa_kem['parameters'] = GenericHybridParameters() - - -# KEM-RSA Key Encapsulation Mechanism - -id_kem_rsa = _OID(is18033_2, 2, 4) - - -class KeyLength(univ.Integer): - pass - -KeyLength.subtypeSpec = constraint.ValueRangeConstraint(1, MAX) - - -class RsaKemParameters(univ.Sequence): - pass - -RsaKemParameters.componentType = namedtype.NamedTypes( - namedtype.NamedType('keyDerivationFunction', KeyDerivationFunction()), - namedtype.NamedType('keyLength', KeyLength()) -) - - -kem_rsa = AlgorithmIdentifier() -kem_rsa['algorithm'] = id_kem_rsa -kem_rsa['parameters'] = RsaKemParameters() - - -# Key Derivation Functions - -id_kdf_kdf2 = _OID(x9_44_components, 1) - -id_kdf_kdf3 = _OID(x9_44_components, 2) - - -kdf2 = AlgorithmIdentifier() -kdf2['algorithm'] = id_kdf_kdf2 -kdf2['parameters'] = KDF2_HashFunction() - -kdf3 = AlgorithmIdentifier() -kdf3['algorithm'] = id_kdf_kdf3 -kdf3['parameters'] = KDF3_HashFunction() - - -# Hash Functions - -id_sha1 = _OID(1, 3, 14, 3, 2, 26) - -id_sha224 = _OID(2, 16, 840, 1, 101, 3, 4, 2, 4) - -id_sha256 = _OID(2, 16, 840, 1, 101, 3, 4, 2, 1) - -id_sha384 = _OID(2, 16, 840, 1, 101, 3, 4, 2, 2) - -id_sha512 = _OID(2, 16, 840, 1, 101, 3, 4, 2, 3) - - -sha1 = AlgorithmIdentifier() -sha1['algorithm'] = id_sha1 -sha1['parameters'] = univ.Null("") - -sha224 = AlgorithmIdentifier() -sha224['algorithm'] = id_sha224 -sha224['parameters'] = univ.Null("") - -sha256 = AlgorithmIdentifier() -sha256['algorithm'] = id_sha256 -sha256['parameters'] = univ.Null("") - -sha384 = AlgorithmIdentifier() -sha384['algorithm'] = id_sha384 -sha384['parameters'] = univ.Null("") - -sha512 = AlgorithmIdentifier() -sha512['algorithm'] = id_sha512 -sha512['parameters'] = univ.Null("") - - -# Symmetric Key-Wrapping Schemes - -id_aes128_Wrap = _OID(nistAlgorithm, 1, 5) - -id_aes192_Wrap = _OID(nistAlgorithm, 1, 25) - -id_aes256_Wrap = _OID(nistAlgorithm, 1, 45) - -id_alg_CMS3DESwrap = _OID(1, 2, 840, 113549, 1, 9, 16, 3, 6) - -id_camellia128_Wrap = _OID(1, 2, 392, 200011, 61, 1, 1, 3, 2) - -id_camellia192_Wrap = _OID(1, 2, 392, 200011, 61, 1, 1, 3, 3) - -id_camellia256_Wrap = _OID(1, 2, 392, 200011, 61, 1, 1, 3, 4) - - -aes128_Wrap = AlgorithmIdentifier() -aes128_Wrap['algorithm'] = id_aes128_Wrap -# aes128_Wrap['parameters'] are absent - -aes192_Wrap = AlgorithmIdentifier() -aes192_Wrap['algorithm'] = id_aes128_Wrap -# aes192_Wrap['parameters'] are absent - -aes256_Wrap = AlgorithmIdentifier() -aes256_Wrap['algorithm'] = id_sha256 -# aes256_Wrap['parameters'] are absent - -tdes_Wrap = AlgorithmIdentifier() -tdes_Wrap['algorithm'] = id_alg_CMS3DESwrap -tdes_Wrap['parameters'] = univ.Null("") - -camellia128_Wrap = AlgorithmIdentifier() -camellia128_Wrap['algorithm'] = id_camellia128_Wrap -# camellia128_Wrap['parameters'] are absent - -camellia192_Wrap = AlgorithmIdentifier() -camellia192_Wrap['algorithm'] = id_camellia192_Wrap -# camellia192_Wrap['parameters'] are absent - -camellia256_Wrap = AlgorithmIdentifier() -camellia256_Wrap['algorithm'] = id_camellia256_Wrap -# camellia256_Wrap['parameters'] are absent - - -# Update the Algorithm Identifier map in rfc5280.py. -# Note that the ones that must not have parameters are not added to the map. - -_algorithmIdentifierMapUpdate = { - id_rsa_kem: GenericHybridParameters(), - id_kem_rsa: RsaKemParameters(), - id_kdf_kdf2: KDF2_HashFunction(), - id_kdf_kdf3: KDF3_HashFunction(), - id_sha1: univ.Null(), - id_sha224: univ.Null(), - id_sha256: univ.Null(), - id_sha384: univ.Null(), - id_sha512: univ.Null(), - id_alg_CMS3DESwrap: univ.Null(), -} - -rfc5280.algorithmIdentifierMap.update(_algorithmIdentifierMapUpdate) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc6010.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc6010.py deleted file mode 100644 index 250e207ba4e8..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc6010.py +++ /dev/null @@ -1,88 +0,0 @@ -# -# This file is part of pyasn1-modules software. -# -# Created by Russ Housley with assistance from asn1ate v.0.6.0. -# Modified by Russ Housley to add maps for use with opentypes. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# Certificate Extension for CMS Content Constraints (CCC) -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc6010.txt -# - -from pyasn1.type import constraint -from pyasn1.type import namedtype -from pyasn1.type import namedval -from pyasn1.type import univ - -from pyasn1_modules import rfc5280 - -MAX = float('inf') - - -AttributeType = rfc5280.AttributeType - -AttributeValue = rfc5280.AttributeValue - - -id_ct_anyContentType = univ.ObjectIdentifier('1.2.840.113549.1.9.16.1.0') - - -class AttrConstraint(univ.Sequence): - pass - -AttrConstraint.componentType = namedtype.NamedTypes( - namedtype.NamedType('attrType', AttributeType()), - namedtype.NamedType('attrValues', univ.SetOf( - componentType=AttributeValue()).subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX))) -) - - -class AttrConstraintList(univ.SequenceOf): - pass - -AttrConstraintList.componentType = AttrConstraint() -AttrConstraintList.subtypeSpec=constraint.ValueSizeConstraint(1, MAX) - - -class ContentTypeGeneration(univ.Enumerated): - pass - -ContentTypeGeneration.namedValues = namedval.NamedValues( - ('canSource', 0), - ('cannotSource', 1) -) - - -class ContentTypeConstraint(univ.Sequence): - pass - -ContentTypeConstraint.componentType = namedtype.NamedTypes( - namedtype.NamedType('contentType', univ.ObjectIdentifier()), - namedtype.DefaultedNamedType('canSource', ContentTypeGeneration().subtype(value='canSource')), - namedtype.OptionalNamedType('attrConstraints', AttrConstraintList()) -) - - -# CMS Content Constraints (CCC) Extension and Object Identifier - -id_pe_cmsContentConstraints = univ.ObjectIdentifier('1.3.6.1.5.5.7.1.18') - -class CMSContentConstraints(univ.SequenceOf): - pass - -CMSContentConstraints.componentType = ContentTypeConstraint() -CMSContentConstraints.subtypeSpec=constraint.ValueSizeConstraint(1, MAX) - - -# Map of Certificate Extension OIDs to Extensions -# To be added to the ones that are in rfc5280.py - -_certificateExtensionsMap = { - id_pe_cmsContentConstraints: CMSContentConstraints(), -} - -rfc5280.certificateExtensionsMap.update(_certificateExtensionsMap) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc6019.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc6019.py deleted file mode 100644 index c6872c76699c..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc6019.py +++ /dev/null @@ -1,45 +0,0 @@ -# This file is being contributed to pyasn1-modules software. -# -# Created by Russ Housley. -# Modified by Russ Housley to add a map for use with opentypes. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# BinaryTime: An Alternate Format for Representing Date and Time -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc6019.txt - -from pyasn1.type import constraint -from pyasn1.type import univ - -from pyasn1_modules import rfc5652 - -MAX = float('inf') - - -# BinaryTime: Represent date and time as an integer - -class BinaryTime(univ.Integer): - pass - -BinaryTime.subtypeSpec = constraint.ValueRangeConstraint(0, MAX) - - -# CMS Attribute for representing signing time in BinaryTime - -id_aa_binarySigningTime = univ.ObjectIdentifier('1.2.840.113549.1.9.16.2.46') - -class BinarySigningTime(BinaryTime): - pass - - -# Map of Attribute Type OIDs to Attributes ia added to the -# ones that are in rfc5652.py - -_cmsAttributesMapUpdate = { - id_aa_binarySigningTime: BinarySigningTime(), -} - -rfc5652.cmsAttributesMap.update(_cmsAttributesMapUpdate) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc6031.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc6031.py deleted file mode 100644 index 6e1bb2261d57..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc6031.py +++ /dev/null @@ -1,469 +0,0 @@ -# -# This file is part of pyasn1-modules software. -# -# Created by Russ Housley with assistance from asn1ate v.0.6.0. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# CMS Symmetric Key Package Content Type -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc6031.txt -# - -from pyasn1.type import char -from pyasn1.type import constraint -from pyasn1.type import namedtype -from pyasn1.type import namedval -from pyasn1.type import opentype -from pyasn1.type import tag -from pyasn1.type import univ -from pyasn1.type import useful - -from pyasn1_modules import rfc5652 -from pyasn1_modules import rfc6019 - - -def _OID(*components): - output = [] - for x in tuple(components): - if isinstance(x, univ.ObjectIdentifier): - output.extend(list(x)) - else: - output.append(int(x)) - return univ.ObjectIdentifier(output) - - -MAX = float('inf') - -id_pskc = univ.ObjectIdentifier('1.2.840.113549.1.9.16.12') - - -# Symmetric Key Package Attributes - -id_pskc_manufacturer = _OID(id_pskc, 1) - -class at_pskc_manufacturer(char.UTF8String): - pass - - -id_pskc_serialNo = _OID(id_pskc, 2) - -class at_pskc_serialNo(char.UTF8String): - pass - - -id_pskc_model = _OID(id_pskc, 3) - -class at_pskc_model(char.UTF8String): - pass - - -id_pskc_issueNo = _OID(id_pskc, 4) - -class at_pskc_issueNo(char.UTF8String): - pass - - -id_pskc_deviceBinding = _OID(id_pskc, 5) - -class at_pskc_deviceBinding(char.UTF8String): - pass - - -id_pskc_deviceStartDate = _OID(id_pskc, 6) - -class at_pskc_deviceStartDate(useful.GeneralizedTime): - pass - - -id_pskc_deviceExpiryDate = _OID(id_pskc, 7) - -class at_pskc_deviceExpiryDate(useful.GeneralizedTime): - pass - - -id_pskc_moduleId = _OID(id_pskc, 8) - -class at_pskc_moduleId(char.UTF8String): - pass - - -id_pskc_deviceUserId = _OID(id_pskc, 26) - -class at_pskc_deviceUserId(char.UTF8String): - pass - - -# Symmetric Key Attributes - -id_pskc_keyId = _OID(id_pskc, 9) - -class at_pskc_keyUserId(char.UTF8String): - pass - - -id_pskc_algorithm = _OID(id_pskc, 10) - -class at_pskc_algorithm(char.UTF8String): - pass - - -id_pskc_issuer = _OID(id_pskc, 11) - -class at_pskc_issuer(char.UTF8String): - pass - - -id_pskc_keyProfileId = _OID(id_pskc, 12) - -class at_pskc_keyProfileId(char.UTF8String): - pass - - -id_pskc_keyReference = _OID(id_pskc, 13) - -class at_pskc_keyReference(char.UTF8String): - pass - - -id_pskc_friendlyName = _OID(id_pskc, 14) - -class FriendlyName(univ.Sequence): - pass - -FriendlyName.componentType = namedtype.NamedTypes( - namedtype.NamedType('friendlyName', char.UTF8String()), - namedtype.OptionalNamedType('friendlyNameLangTag', char.UTF8String()) -) - -class at_pskc_friendlyName(FriendlyName): - pass - - -id_pskc_algorithmParameters = _OID(id_pskc, 15) - -class Encoding(char.UTF8String): - pass - -Encoding.namedValues = namedval.NamedValues( - ('dec', "DECIMAL"), - ('hex', "HEXADECIMAL"), - ('alpha', "ALPHANUMERIC"), - ('b64', "BASE64"), - ('bin', "BINARY") -) - -Encoding.subtypeSpec = constraint.SingleValueConstraint( - "DECIMAL", "HEXADECIMAL", "ALPHANUMERIC", "BASE64", "BINARY" ) - -class ChallengeFormat(univ.Sequence): - pass - -ChallengeFormat.componentType = namedtype.NamedTypes( - namedtype.NamedType('encoding', Encoding()), - namedtype.DefaultedNamedType('checkDigit', - univ.Boolean().subtype(value=0)), - namedtype.NamedType('min', univ.Integer().subtype( - subtypeSpec=constraint.ValueRangeConstraint(0, MAX))), - namedtype.NamedType('max', univ.Integer().subtype( - subtypeSpec=constraint.ValueRangeConstraint(0, MAX))) -) - -class ResponseFormat(univ.Sequence): - pass - -ResponseFormat.componentType = namedtype.NamedTypes( - namedtype.NamedType('encoding', Encoding()), - namedtype.NamedType('length', univ.Integer().subtype( - subtypeSpec=constraint.ValueRangeConstraint(0, MAX))), - namedtype.DefaultedNamedType('checkDigit', - univ.Boolean().subtype(value=0)) -) - -class PSKCAlgorithmParameters(univ.Choice): - pass - -PSKCAlgorithmParameters.componentType = namedtype.NamedTypes( - namedtype.NamedType('suite', char.UTF8String()), - namedtype.NamedType('challengeFormat', ChallengeFormat().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))), - namedtype.NamedType('responseFormat', ResponseFormat().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))) -) - -class at_pskc_algorithmParameters(PSKCAlgorithmParameters): - pass - - -id_pskc_counter = _OID(id_pskc, 16) - -class at_pskc_counter(univ.Integer): - pass - -at_pskc_counter.subtypeSpec = constraint.ValueRangeConstraint(0, MAX) - - -id_pskc_time = _OID(id_pskc, 17) - -class at_pskc_time(rfc6019.BinaryTime): - pass - - -id_pskc_timeInterval = _OID(id_pskc, 18) - -class at_pskc_timeInterval(univ.Integer): - pass - -at_pskc_timeInterval.subtypeSpec = constraint.ValueRangeConstraint(0, MAX) - - -id_pskc_timeDrift = _OID(id_pskc, 19) - -class at_pskc_timeDrift(univ.Integer): - pass - -at_pskc_timeDrift.subtypeSpec = constraint.ValueRangeConstraint(0, MAX) - - -id_pskc_valueMAC = _OID(id_pskc, 20) - -class ValueMac(univ.Sequence): - pass - -ValueMac.componentType = namedtype.NamedTypes( - namedtype.NamedType('macAlgorithm', char.UTF8String()), - namedtype.NamedType('mac', char.UTF8String()) -) - -class at_pskc_valueMAC(ValueMac): - pass - - -id_pskc_keyUserId = _OID(id_pskc, 27) - -class at_pskc_keyId(char.UTF8String): - pass - - -id_pskc_keyStartDate = _OID(id_pskc, 21) - -class at_pskc_keyStartDate(useful.GeneralizedTime): - pass - - -id_pskc_keyExpiryDate = _OID(id_pskc, 22) - -class at_pskc_keyExpiryDate(useful.GeneralizedTime): - pass - - -id_pskc_numberOfTransactions = _OID(id_pskc, 23) - -class at_pskc_numberOfTransactions(univ.Integer): - pass - -at_pskc_numberOfTransactions.subtypeSpec = constraint.ValueRangeConstraint(0, MAX) - - -id_pskc_keyUsages = _OID(id_pskc, 24) - -class PSKCKeyUsage(char.UTF8String): - pass - -PSKCKeyUsage.namedValues = namedval.NamedValues( - ('otp', "OTP"), - ('cr', "CR"), - ('encrypt', "Encrypt"), - ('integrity', "Integrity"), - ('verify', "Verify"), - ('unlock', "Unlock"), - ('decrypt', "Decrypt"), - ('keywrap', "KeyWrap"), - ('unwrap', "Unwrap"), - ('derive', "Derive"), - ('generate', "Generate") -) - -PSKCKeyUsage.subtypeSpec = constraint.SingleValueConstraint( - "OTP", "CR", "Encrypt", "Integrity", "Verify", "Unlock", - "Decrypt", "KeyWrap", "Unwrap", "Derive", "Generate" ) - -class PSKCKeyUsages(univ.SequenceOf): - pass - -PSKCKeyUsages.componentType = PSKCKeyUsage() - -class at_pskc_keyUsage(PSKCKeyUsages): - pass - - -id_pskc_pinPolicy = _OID(id_pskc, 25) - -class PINUsageMode(char.UTF8String): - pass - -PINUsageMode.namedValues = namedval.NamedValues( - ("local", "Local"), - ("prepend", "Prepend"), - ("append", "Append"), - ("algorithmic", "Algorithmic") -) - -PINUsageMode.subtypeSpec = constraint.SingleValueConstraint( - "Local", "Prepend", "Append", "Algorithmic" ) - -class PINPolicy(univ.Sequence): - pass - -PINPolicy.componentType = namedtype.NamedTypes( - namedtype.OptionalNamedType('pinKeyId', char.UTF8String().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), - namedtype.NamedType('pinUsageMode', PINUsageMode().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), - namedtype.OptionalNamedType('maxFailedAttempts', univ.Integer().subtype( - subtypeSpec=constraint.ValueRangeConstraint(0, MAX)).subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))), - namedtype.OptionalNamedType('minLength', univ.Integer().subtype( - subtypeSpec=constraint.ValueRangeConstraint(0, MAX)).subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))), - namedtype.OptionalNamedType('maxLength', univ.Integer().subtype( - subtypeSpec=constraint.ValueRangeConstraint(0, MAX)).subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 4))), - namedtype.OptionalNamedType('pinEncoding', Encoding().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 5))) -) - -class at_pskc_pinPolicy(PINPolicy): - pass - - -# Map of Symmetric Key Package Attribute OIDs to Attributes - -sKeyPkgAttributesMap = { - id_pskc_manufacturer: at_pskc_manufacturer(), - id_pskc_serialNo: at_pskc_serialNo(), - id_pskc_model: at_pskc_model(), - id_pskc_issueNo: at_pskc_issueNo(), - id_pskc_deviceBinding: at_pskc_deviceBinding(), - id_pskc_deviceStartDate: at_pskc_deviceStartDate(), - id_pskc_deviceExpiryDate: at_pskc_deviceExpiryDate(), - id_pskc_moduleId: at_pskc_moduleId(), - id_pskc_deviceUserId: at_pskc_deviceUserId(), -} - - -# Map of Symmetric Key Attribute OIDs to Attributes - -sKeyAttributesMap = { - id_pskc_keyId: at_pskc_keyId(), - id_pskc_algorithm: at_pskc_algorithm(), - id_pskc_issuer: at_pskc_issuer(), - id_pskc_keyProfileId: at_pskc_keyProfileId(), - id_pskc_keyReference: at_pskc_keyReference(), - id_pskc_friendlyName: at_pskc_friendlyName(), - id_pskc_algorithmParameters: at_pskc_algorithmParameters(), - id_pskc_counter: at_pskc_counter(), - id_pskc_time: at_pskc_time(), - id_pskc_timeInterval: at_pskc_timeInterval(), - id_pskc_timeDrift: at_pskc_timeDrift(), - id_pskc_valueMAC: at_pskc_valueMAC(), - id_pskc_keyUserId: at_pskc_keyUserId(), - id_pskc_keyStartDate: at_pskc_keyStartDate(), - id_pskc_keyExpiryDate: at_pskc_keyExpiryDate(), - id_pskc_numberOfTransactions: at_pskc_numberOfTransactions(), - id_pskc_keyUsages: at_pskc_keyUsage(), - id_pskc_pinPolicy: at_pskc_pinPolicy(), -} - - -# This definition replaces Attribute() from rfc5652.py; it is the same except -# that opentype is added with sKeyPkgAttributesMap and sKeyAttributesMap - -class AttributeType(univ.ObjectIdentifier): - pass - - -class AttributeValue(univ.Any): - pass - - -class SKeyAttribute(univ.Sequence): - pass - -SKeyAttribute.componentType = namedtype.NamedTypes( - namedtype.NamedType('attrType', AttributeType()), - namedtype.NamedType('attrValues', - univ.SetOf(componentType=AttributeValue()), - openType=opentype.OpenType('attrType', sKeyAttributesMap) - ) -) - - -class SKeyPkgAttribute(univ.Sequence): - pass - -SKeyPkgAttribute.componentType = namedtype.NamedTypes( - namedtype.NamedType('attrType', AttributeType()), - namedtype.NamedType('attrValues', - univ.SetOf(componentType=AttributeValue()), - openType=opentype.OpenType('attrType', sKeyPkgAttributesMap) - ) -) - - -# Symmetric Key Package Content Type - -id_ct_KP_sKeyPackage = univ.ObjectIdentifier('1.2.840.113549.1.9.16.1.25') - - -class KeyPkgVersion(univ.Integer): - pass - -KeyPkgVersion.namedValues = namedval.NamedValues( - ('v1', 1) -) - - -class OneSymmetricKey(univ.Sequence): - pass - -OneSymmetricKey.componentType = namedtype.NamedTypes( - namedtype.OptionalNamedType('sKeyAttrs', - univ.SequenceOf(componentType=SKeyAttribute()).subtype( - subtypeSpec=constraint.ValueSizeConstraint(1, MAX))), - namedtype.OptionalNamedType('sKey', univ.OctetString()) -) - -OneSymmetricKey.sizeSpec = univ.Sequence.sizeSpec + constraint.ValueSizeConstraint(1, 2) - - -class SymmetricKeys(univ.SequenceOf): - pass - -SymmetricKeys.componentType = OneSymmetricKey() -SymmetricKeys.subtypeSpec=constraint.ValueSizeConstraint(1, MAX) - - -class SymmetricKeyPackage(univ.Sequence): - pass - -SymmetricKeyPackage.componentType = namedtype.NamedTypes( - namedtype.DefaultedNamedType('version', KeyPkgVersion().subtype(value='v1')), - namedtype.OptionalNamedType('sKeyPkgAttrs', - univ.SequenceOf(componentType=SKeyPkgAttribute()).subtype( - subtypeSpec=constraint.ValueSizeConstraint(1, MAX), - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), - namedtype.NamedType('sKeys', SymmetricKeys()) -) - - -# Map of Content Type OIDs to Content Types are -# added to the ones that are in rfc5652.py - -_cmsContentTypesMapUpdate = { - id_ct_KP_sKeyPackage: SymmetricKeyPackage(), -} - -rfc5652.cmsContentTypesMap.update(_cmsContentTypesMapUpdate) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc6032.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc6032.py deleted file mode 100644 index 563639a8d66e..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc6032.py +++ /dev/null @@ -1,68 +0,0 @@ -# -# This file is part of pyasn1-modules software. -# -# Created by Russ Housley with assistance from asn1ate v.0.6.0. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# CMS Encrypted Key Package Content Type -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc6032.txt -# - -from pyasn1.type import namedtype -from pyasn1.type import tag -from pyasn1.type import univ - -from pyasn1_modules import rfc5652 -from pyasn1_modules import rfc5083 - - -# Content Decryption Key Identifier attribute - -id_aa_KP_contentDecryptKeyID = univ.ObjectIdentifier('2.16.840.1.101.2.1.5.66') - -class ContentDecryptKeyID(univ.OctetString): - pass - -aa_content_decrypt_key_identifier = rfc5652.Attribute() -aa_content_decrypt_key_identifier['attrType'] = id_aa_KP_contentDecryptKeyID -aa_content_decrypt_key_identifier['attrValues'][0] = ContentDecryptKeyID() - - -# Encrypted Key Package Content Type - -id_ct_KP_encryptedKeyPkg = univ.ObjectIdentifier('2.16.840.1.101.2.1.2.78.2') - -class EncryptedKeyPackage(univ.Choice): - pass - -EncryptedKeyPackage.componentType = namedtype.NamedTypes( - namedtype.NamedType('encrypted', rfc5652.EncryptedData()), - namedtype.NamedType('enveloped', rfc5652.EnvelopedData().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), - namedtype.NamedType('authEnveloped', rfc5083.AuthEnvelopedData().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))) -) - - -# Map of Attribute Type OIDs to Attributes are -# added to the ones that are in rfc5652.py - -_cmsAttributesMapUpdate = { - id_aa_KP_contentDecryptKeyID: ContentDecryptKeyID(), -} - -rfc5652.cmsAttributesMap.update(_cmsAttributesMapUpdate) - - -# Map of Content Type OIDs to Content Types are -# added to the ones that are in rfc5652.py - -_cmsContentTypesMapUpdate = { - id_ct_KP_encryptedKeyPkg: EncryptedKeyPackage(), -} - -rfc5652.cmsContentTypesMap.update(_cmsContentTypesMapUpdate) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc6120.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc6120.py deleted file mode 100644 index ab256203a08e..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc6120.py +++ /dev/null @@ -1,43 +0,0 @@ -# -# This file is part of pyasn1-modules software. -# -# Created by Russ Housley. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# Extensible Messaging and Presence Protocol (XMPP) -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc6120.txt -# - -from pyasn1.type import char -from pyasn1.type import univ - -from pyasn1_modules import rfc5280 - -MAX = float('inf') - - -# XmppAddr Identifier Type as specified in Section 13.7.1.4. of RFC 6120 - -id_pkix = rfc5280.id_pkix - -id_on = id_pkix + (8, ) - -id_on_xmppAddr = id_on + (5, ) - - -class XmppAddr(char.UTF8String): - pass - - -# Map of Other Name OIDs to Other Name is added to the -# ones that are in rfc5280.py - -_anotherNameMapUpdate = { - id_on_xmppAddr: XmppAddr(), -} - -rfc5280.anotherNameMap.update(_anotherNameMapUpdate) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc6170.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc6170.py deleted file mode 100644 index e2876167b705..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc6170.py +++ /dev/null @@ -1,17 +0,0 @@ -# -# This file is part of pyasn1-modules software. -# -# Created by Russ Housley. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# Certificate Image in the Internet X.509 Public Key Infrastructure -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc6170.txt -# - -from pyasn1.type import univ - -id_logo_certImage = univ.ObjectIdentifier('1.3.6.1.5.5.7.20.3') diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc6187.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc6187.py deleted file mode 100644 index 4be005471623..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc6187.py +++ /dev/null @@ -1,22 +0,0 @@ -# -# This file is part of pyasn1-modules software. -# -# Created by Russ Housley. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# X.509v3 Certificates for Secure Shell Authentication -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc6187.txt -# - -from pyasn1.type import univ - -id_pkix = univ.ObjectIdentifier('1.3.6.1.5.5.7') - -id_kp = id_pkix + (3, ) - -id_kp_secureShellClient = id_kp + (21, ) -id_kp_secureShellServer = id_kp + (22, ) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc6210.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc6210.py deleted file mode 100644 index 28587b9e70b0..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc6210.py +++ /dev/null @@ -1,42 +0,0 @@ -# -# This file is part of pyasn1-modules software. -# -# Created by Russ Housley. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# Experiment for Hash Functions with Parameters in the CMS -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc6210.txt -# - -from pyasn1.type import constraint -from pyasn1.type import univ - -from pyasn1_modules import rfc5280 - - -id_alg_MD5_XOR_EXPERIMENT = univ.ObjectIdentifier('1.2.840.113549.1.9.16.3.13') - - -class MD5_XOR_EXPERIMENT(univ.OctetString): - pass - -MD5_XOR_EXPERIMENT.subtypeSpec = constraint.ValueSizeConstraint(64, 64) - - -mda_xor_md5_EXPERIMENT = rfc5280.AlgorithmIdentifier() -mda_xor_md5_EXPERIMENT['algorithm'] = id_alg_MD5_XOR_EXPERIMENT -mda_xor_md5_EXPERIMENT['parameters'] = MD5_XOR_EXPERIMENT() - - -# Map of Algorithm Identifier OIDs to Parameters added to the -# ones that are in rfc5280.py. - -_algorithmIdentifierMapUpdate = { - id_alg_MD5_XOR_EXPERIMENT: MD5_XOR_EXPERIMENT(), -} - -rfc5280.algorithmIdentifierMap.update(_algorithmIdentifierMapUpdate) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc6211.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc6211.py deleted file mode 100644 index abd7a8688d0c..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc6211.py +++ /dev/null @@ -1,72 +0,0 @@ -# -# This file is part of pyasn1-modules software. -# -# Created by Russ Housley with assistance from asn1ate v.0.6.0. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# CMS Algorithm Identifier Protection Attribute -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc6211.txt -# - -from pyasn1.type import constraint -from pyasn1.type import namedtype -from pyasn1.type import tag -from pyasn1.type import univ - -from pyasn1_modules import rfc5652 - - -# Imports from RFC 5652 - -DigestAlgorithmIdentifier = rfc5652.DigestAlgorithmIdentifier - -MessageAuthenticationCodeAlgorithm = rfc5652.MessageAuthenticationCodeAlgorithm - -SignatureAlgorithmIdentifier = rfc5652.SignatureAlgorithmIdentifier - - -# CMS Algorithm Protection attribute - -id_aa_cmsAlgorithmProtect = univ.ObjectIdentifier('1.2.840.113549.1.9.52') - - -class CMSAlgorithmProtection(univ.Sequence): - pass - -CMSAlgorithmProtection.componentType = namedtype.NamedTypes( - namedtype.NamedType('digestAlgorithm', DigestAlgorithmIdentifier()), - namedtype.OptionalNamedType('signatureAlgorithm', - SignatureAlgorithmIdentifier().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), - namedtype.OptionalNamedType('macAlgorithm', - MessageAuthenticationCodeAlgorithm().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))) -) - -CMSAlgorithmProtection.subtypeSpec = constraint.ConstraintsUnion( - constraint.WithComponentsConstraint( - ('signatureAlgorithm', constraint.ComponentPresentConstraint()), - ('macAlgorithm', constraint.ComponentAbsentConstraint())), - constraint.WithComponentsConstraint( - ('signatureAlgorithm', constraint.ComponentAbsentConstraint()), - ('macAlgorithm', constraint.ComponentPresentConstraint())) -) - - -aa_cmsAlgorithmProtection = rfc5652.Attribute() -aa_cmsAlgorithmProtection['attrType'] = id_aa_cmsAlgorithmProtect -aa_cmsAlgorithmProtection['attrValues'][0] = CMSAlgorithmProtection() - - -# Map of Attribute Type OIDs to Attributes are -# added to the ones that are in rfc5652.py - -_cmsAttributesMapUpdate = { - id_aa_cmsAlgorithmProtect: CMSAlgorithmProtection(), -} - -rfc5652.cmsAttributesMap.update(_cmsAttributesMapUpdate) \ No newline at end of file diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc6402-1.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc6402-1.py deleted file mode 100644 index 322e35e0c7d0..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc6402-1.py +++ /dev/null @@ -1,627 +0,0 @@ -# coding: utf-8 -# -# This file is part of pyasn1-modules software. -# -# Created by Stanisław Pitucha with asn1ate tool. -# Modified by Russ Housley to add a maps for CMC Control Attributes -# and CMC Content Types for use with opentypes. -# -# Copyright (c) 2005-2019, Ilya Etingof -# License: http://snmplabs.com/pyasn1/license.html -# -# Certificate Management over CMS (CMC) Updates -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc6402.txt -# -from pyasn1.type import char -from pyasn1.type import constraint -from pyasn1.type import namedtype -from pyasn1.type import namedval -from pyasn1.type import opentype -from pyasn1.type import tag -from pyasn1.type import univ -from pyasn1.type import useful - -from pyasn1_modules import rfc4211 -from pyasn1_modules import rfc5280 -from pyasn1_modules import rfc5652 - -MAX = float('inf') - - -def _buildOid(*components): - output = [] - for x in tuple(components): - if isinstance(x, univ.ObjectIdentifier): - output.extend(list(x)) - else: - output.append(int(x)) - - return univ.ObjectIdentifier(output) - - -cmcControlAttributesMap = { } - - -class ChangeSubjectName(univ.Sequence): - pass - - -ChangeSubjectName.componentType = namedtype.NamedTypes( - namedtype.OptionalNamedType('subject', rfc5280.Name()), - namedtype.OptionalNamedType('subjectAlt', rfc5280.GeneralNames()) -) - - -class AttributeValue(univ.Any): - pass - - -class CMCStatus(univ.Integer): - pass - - -CMCStatus.namedValues = namedval.NamedValues( - ('success', 0), - ('failed', 2), - ('pending', 3), - ('noSupport', 4), - ('confirmRequired', 5), - ('popRequired', 6), - ('partial', 7) -) - - -class PendInfo(univ.Sequence): - pass - - -PendInfo.componentType = namedtype.NamedTypes( - namedtype.NamedType('pendToken', univ.OctetString()), - namedtype.NamedType('pendTime', useful.GeneralizedTime()) -) - -bodyIdMax = univ.Integer(4294967295) - - -class BodyPartID(univ.Integer): - pass - - -BodyPartID.subtypeSpec = constraint.ValueRangeConstraint(0, bodyIdMax) - - -class BodyPartPath(univ.SequenceOf): - pass - - -BodyPartPath.componentType = BodyPartID() -BodyPartPath.sizeSpec = constraint.ValueSizeConstraint(1, MAX) - - -class BodyPartReference(univ.Choice): - pass - - -BodyPartReference.componentType = namedtype.NamedTypes( - namedtype.NamedType('bodyPartID', BodyPartID()), - namedtype.NamedType('bodyPartPath', BodyPartPath()) -) - - -class CMCFailInfo(univ.Integer): - pass - - -CMCFailInfo.namedValues = namedval.NamedValues( - ('badAlg', 0), - ('badMessageCheck', 1), - ('badRequest', 2), - ('badTime', 3), - ('badCertId', 4), - ('unsupportedExt', 5), - ('mustArchiveKeys', 6), - ('badIdentity', 7), - ('popRequired', 8), - ('popFailed', 9), - ('noKeyReuse', 10), - ('internalCAError', 11), - ('tryLater', 12), - ('authDataFail', 13) -) - - -class CMCStatusInfoV2(univ.Sequence): - pass - - -CMCStatusInfoV2.componentType = namedtype.NamedTypes( - namedtype.NamedType('cMCStatus', CMCStatus()), - namedtype.NamedType('bodyList', univ.SequenceOf(componentType=BodyPartReference())), - namedtype.OptionalNamedType('statusString', char.UTF8String()), - namedtype.OptionalNamedType( - 'otherInfo', univ.Choice( - componentType=namedtype.NamedTypes( - namedtype.NamedType('failInfo', CMCFailInfo()), - namedtype.NamedType('pendInfo', PendInfo()), - namedtype.NamedType( - 'extendedFailInfo', univ.Sequence( - componentType=namedtype.NamedTypes( - namedtype.NamedType('failInfoOID', univ.ObjectIdentifier()), - namedtype.NamedType('failInfoValue', AttributeValue())) - ) - ) - ) - ) - ) -) - - -class GetCRL(univ.Sequence): - pass - - -GetCRL.componentType = namedtype.NamedTypes( - namedtype.NamedType('issuerName', rfc5280.Name()), - namedtype.OptionalNamedType('cRLName', rfc5280.GeneralName()), - namedtype.OptionalNamedType('time', useful.GeneralizedTime()), - namedtype.OptionalNamedType('reasons', rfc5280.ReasonFlags()) -) - -id_pkix = _buildOid(1, 3, 6, 1, 5, 5, 7) - -id_cmc = _buildOid(id_pkix, 7) - -id_cmc_batchResponses = _buildOid(id_cmc, 29) - -id_cmc_popLinkWitness = _buildOid(id_cmc, 23) - - -class PopLinkWitnessV2(univ.Sequence): - pass - - -PopLinkWitnessV2.componentType = namedtype.NamedTypes( - namedtype.NamedType('keyGenAlgorithm', rfc5280.AlgorithmIdentifier()), - namedtype.NamedType('macAlgorithm', rfc5280.AlgorithmIdentifier()), - namedtype.NamedType('witness', univ.OctetString()) -) - -id_cmc_popLinkWitnessV2 = _buildOid(id_cmc, 33) - -id_cmc_identityProofV2 = _buildOid(id_cmc, 34) - -id_cmc_revokeRequest = _buildOid(id_cmc, 17) - -id_cmc_recipientNonce = _buildOid(id_cmc, 7) - - -class ControlsProcessed(univ.Sequence): - pass - - -ControlsProcessed.componentType = namedtype.NamedTypes( - namedtype.NamedType('bodyList', univ.SequenceOf(componentType=BodyPartReference())) -) - - -class CertificationRequest(univ.Sequence): - pass - - -CertificationRequest.componentType = namedtype.NamedTypes( - namedtype.NamedType( - 'certificationRequestInfo', univ.Sequence( - componentType=namedtype.NamedTypes( - namedtype.NamedType('version', univ.Integer()), - namedtype.NamedType('subject', rfc5280.Name()), - namedtype.NamedType( - 'subjectPublicKeyInfo', univ.Sequence( - componentType=namedtype.NamedTypes( - namedtype.NamedType('algorithm', rfc5280.AlgorithmIdentifier()), - namedtype.NamedType('subjectPublicKey', univ.BitString()) - ) - ) - ), - namedtype.NamedType( - 'attributes', univ.SetOf( - componentType=rfc5652.Attribute()).subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)) - ) - ) - ) - ), - namedtype.NamedType('signatureAlgorithm', rfc5280.AlgorithmIdentifier()), - namedtype.NamedType('signature', univ.BitString()) -) - - -class TaggedCertificationRequest(univ.Sequence): - pass - - -TaggedCertificationRequest.componentType = namedtype.NamedTypes( - namedtype.NamedType('bodyPartID', BodyPartID()), - namedtype.NamedType('certificationRequest', CertificationRequest()) -) - - -class TaggedRequest(univ.Choice): - pass - - -TaggedRequest.componentType = namedtype.NamedTypes( - namedtype.NamedType('tcr', TaggedCertificationRequest().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))), - namedtype.NamedType('crm', - rfc4211.CertReqMsg().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), - namedtype.NamedType('orm', univ.Sequence(componentType=namedtype.NamedTypes( - namedtype.NamedType('bodyPartID', BodyPartID()), - namedtype.NamedType('requestMessageType', univ.ObjectIdentifier()), - namedtype.NamedType('requestMessageValue', univ.Any()) - )) - .subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2))) -) - -id_cmc_popLinkRandom = _buildOid(id_cmc, 22) - -id_cmc_statusInfo = _buildOid(id_cmc, 1) - -id_cmc_trustedAnchors = _buildOid(id_cmc, 26) - -id_cmc_transactionId = _buildOid(id_cmc, 5) - -id_cmc_encryptedPOP = _buildOid(id_cmc, 9) - - -class PublishTrustAnchors(univ.Sequence): - pass - - -PublishTrustAnchors.componentType = namedtype.NamedTypes( - namedtype.NamedType('seqNumber', univ.Integer()), - namedtype.NamedType('hashAlgorithm', rfc5280.AlgorithmIdentifier()), - namedtype.NamedType('anchorHashes', univ.SequenceOf(componentType=univ.OctetString())) -) - - -class RevokeRequest(univ.Sequence): - pass - - -RevokeRequest.componentType = namedtype.NamedTypes( - namedtype.NamedType('issuerName', rfc5280.Name()), - namedtype.NamedType('serialNumber', univ.Integer()), - namedtype.NamedType('reason', rfc5280.CRLReason()), - namedtype.OptionalNamedType('invalidityDate', useful.GeneralizedTime()), - namedtype.OptionalNamedType('passphrase', univ.OctetString()), - namedtype.OptionalNamedType('comment', char.UTF8String()) -) - -id_cmc_senderNonce = _buildOid(id_cmc, 6) - -id_cmc_authData = _buildOid(id_cmc, 27) - - -class TaggedContentInfo(univ.Sequence): - pass - - -TaggedContentInfo.componentType = namedtype.NamedTypes( - namedtype.NamedType('bodyPartID', BodyPartID()), - namedtype.NamedType('contentInfo', rfc5652.ContentInfo()) -) - - -class IdentifyProofV2(univ.Sequence): - pass - - -IdentifyProofV2.componentType = namedtype.NamedTypes( - namedtype.NamedType('proofAlgID', rfc5280.AlgorithmIdentifier()), - namedtype.NamedType('macAlgId', rfc5280.AlgorithmIdentifier()), - namedtype.NamedType('witness', univ.OctetString()) -) - - -class CMCPublicationInfo(univ.Sequence): - pass - - -CMCPublicationInfo.componentType = namedtype.NamedTypes( - namedtype.NamedType('hashAlg', rfc5280.AlgorithmIdentifier()), - namedtype.NamedType('certHashes', univ.SequenceOf(componentType=univ.OctetString())), - namedtype.NamedType('pubInfo', rfc4211.PKIPublicationInfo()) -) - -id_kp_cmcCA = _buildOid(rfc5280.id_kp, 27) - -id_cmc_confirmCertAcceptance = _buildOid(id_cmc, 24) - -id_cmc_raIdentityWitness = _buildOid(id_cmc, 35) - -id_ExtensionReq = _buildOid(1, 2, 840, 113549, 1, 9, 14) - -id_cct = _buildOid(id_pkix, 12) - -id_cct_PKIData = _buildOid(id_cct, 2) - -id_kp_cmcRA = _buildOid(rfc5280.id_kp, 28) - - -class CMCStatusInfo(univ.Sequence): - pass - - -CMCStatusInfo.componentType = namedtype.NamedTypes( - namedtype.NamedType('cMCStatus', CMCStatus()), - namedtype.NamedType('bodyList', univ.SequenceOf(componentType=BodyPartID())), - namedtype.OptionalNamedType('statusString', char.UTF8String()), - namedtype.OptionalNamedType( - 'otherInfo', univ.Choice( - componentType=namedtype.NamedTypes( - namedtype.NamedType('failInfo', CMCFailInfo()), - namedtype.NamedType('pendInfo', PendInfo()) - ) - ) - ) -) - - -class DecryptedPOP(univ.Sequence): - pass - - -DecryptedPOP.componentType = namedtype.NamedTypes( - namedtype.NamedType('bodyPartID', BodyPartID()), - namedtype.NamedType('thePOPAlgID', rfc5280.AlgorithmIdentifier()), - namedtype.NamedType('thePOP', univ.OctetString()) -) - -id_cmc_addExtensions = _buildOid(id_cmc, 8) - -id_cmc_modCertTemplate = _buildOid(id_cmc, 31) - - -class TaggedAttribute(univ.Sequence): - pass - - -TaggedAttribute.componentType = namedtype.NamedTypes( - namedtype.NamedType('bodyPartID', BodyPartID()), - namedtype.NamedType('attrType', univ.ObjectIdentifier()), - namedtype.NamedType('attrValues', univ.SetOf(componentType=AttributeValue()), - openType=opentype.OpenType('attrType', cmcControlAttributesMap) - ) -) - - -class OtherMsg(univ.Sequence): - pass - - -OtherMsg.componentType = namedtype.NamedTypes( - namedtype.NamedType('bodyPartID', BodyPartID()), - namedtype.NamedType('otherMsgType', univ.ObjectIdentifier()), - namedtype.NamedType('otherMsgValue', univ.Any()) -) - - -class PKIData(univ.Sequence): - pass - - -PKIData.componentType = namedtype.NamedTypes( - namedtype.NamedType('controlSequence', univ.SequenceOf(componentType=TaggedAttribute())), - namedtype.NamedType('reqSequence', univ.SequenceOf(componentType=TaggedRequest())), - namedtype.NamedType('cmsSequence', univ.SequenceOf(componentType=TaggedContentInfo())), - namedtype.NamedType('otherMsgSequence', univ.SequenceOf(componentType=OtherMsg())) -) - - -class BodyPartList(univ.SequenceOf): - pass - - -BodyPartList.componentType = BodyPartID() -BodyPartList.sizeSpec = constraint.ValueSizeConstraint(1, MAX) - -id_cmc_responseBody = _buildOid(id_cmc, 37) - - -class AuthPublish(BodyPartID): - pass - - -class CMCUnsignedData(univ.Sequence): - pass - - -CMCUnsignedData.componentType = namedtype.NamedTypes( - namedtype.NamedType('bodyPartPath', BodyPartPath()), - namedtype.NamedType('identifier', univ.ObjectIdentifier()), - namedtype.NamedType('content', univ.Any()) -) - - -class CMCCertId(rfc5652.IssuerAndSerialNumber): - pass - - -class PKIResponse(univ.Sequence): - pass - - -PKIResponse.componentType = namedtype.NamedTypes( - namedtype.NamedType('controlSequence', univ.SequenceOf(componentType=TaggedAttribute())), - namedtype.NamedType('cmsSequence', univ.SequenceOf(componentType=TaggedContentInfo())), - namedtype.NamedType('otherMsgSequence', univ.SequenceOf(componentType=OtherMsg())) -) - - -class ResponseBody(PKIResponse): - pass - - -id_cmc_statusInfoV2 = _buildOid(id_cmc, 25) - -id_cmc_lraPOPWitness = _buildOid(id_cmc, 11) - - -class ModCertTemplate(univ.Sequence): - pass - - -ModCertTemplate.componentType = namedtype.NamedTypes( - namedtype.NamedType('pkiDataReference', BodyPartPath()), - namedtype.NamedType('certReferences', BodyPartList()), - namedtype.DefaultedNamedType('replace', univ.Boolean().subtype(value=1)), - namedtype.NamedType('certTemplate', rfc4211.CertTemplate()) -) - -id_cmc_regInfo = _buildOid(id_cmc, 18) - -id_cmc_identityProof = _buildOid(id_cmc, 3) - - -class ExtensionReq(univ.SequenceOf): - pass - - -ExtensionReq.componentType = rfc5280.Extension() -ExtensionReq.sizeSpec = constraint.ValueSizeConstraint(1, MAX) - -id_kp_cmcArchive = _buildOid(rfc5280.id_kp, 28) - -id_cmc_publishCert = _buildOid(id_cmc, 30) - -id_cmc_dataReturn = _buildOid(id_cmc, 4) - - -class LraPopWitness(univ.Sequence): - pass - - -LraPopWitness.componentType = namedtype.NamedTypes( - namedtype.NamedType('pkiDataBodyid', BodyPartID()), - namedtype.NamedType('bodyIds', univ.SequenceOf(componentType=BodyPartID())) -) - -id_aa = _buildOid(1, 2, 840, 113549, 1, 9, 16, 2) - -id_aa_cmc_unsignedData = _buildOid(id_aa, 34) - -id_cmc_getCert = _buildOid(id_cmc, 15) - -id_cmc_batchRequests = _buildOid(id_cmc, 28) - -id_cmc_decryptedPOP = _buildOid(id_cmc, 10) - -id_cmc_responseInfo = _buildOid(id_cmc, 19) - -id_cmc_changeSubjectName = _buildOid(id_cmc, 36) - - -class GetCert(univ.Sequence): - pass - - -GetCert.componentType = namedtype.NamedTypes( - namedtype.NamedType('issuerName', rfc5280.GeneralName()), - namedtype.NamedType('serialNumber', univ.Integer()) -) - -id_cmc_identification = _buildOid(id_cmc, 2) - -id_cmc_queryPending = _buildOid(id_cmc, 21) - - -class AddExtensions(univ.Sequence): - pass - - -AddExtensions.componentType = namedtype.NamedTypes( - namedtype.NamedType('pkiDataReference', BodyPartID()), - namedtype.NamedType('certReferences', univ.SequenceOf(componentType=BodyPartID())), - namedtype.NamedType('extensions', univ.SequenceOf(componentType=rfc5280.Extension())) -) - - -class EncryptedPOP(univ.Sequence): - pass - - -EncryptedPOP.componentType = namedtype.NamedTypes( - namedtype.NamedType('request', TaggedRequest()), - namedtype.NamedType('cms', rfc5652.ContentInfo()), - namedtype.NamedType('thePOPAlgID', rfc5280.AlgorithmIdentifier()), - namedtype.NamedType('witnessAlgID', rfc5280.AlgorithmIdentifier()), - namedtype.NamedType('witness', univ.OctetString()) -) - -id_cmc_getCRL = _buildOid(id_cmc, 16) - -id_cct_PKIResponse = _buildOid(id_cct, 3) - -id_cmc_controlProcessed = _buildOid(id_cmc, 32) - - -class NoSignatureValue(univ.OctetString): - pass - - -id_ad_cmc = _buildOid(rfc5280.id_ad, 12) - -id_alg_noSignature = _buildOid(id_pkix, 6, 2) - - -# Map of CMC Control OIDs to CMC Control Attributes - -_cmcControlAttributesMapUpdate = { - id_cmc_statusInfo: CMCStatusInfo(), - id_cmc_statusInfoV2: CMCStatusInfoV2(), - id_cmc_identification: char.UTF8String(), - id_cmc_identityProof: univ.OctetString(), - id_cmc_identityProofV2: IdentifyProofV2(), - id_cmc_dataReturn: univ.OctetString(), - id_cmc_transactionId: univ.Integer(), - id_cmc_senderNonce: univ.OctetString(), - id_cmc_recipientNonce: univ.OctetString(), - id_cmc_addExtensions: AddExtensions(), - id_cmc_encryptedPOP: EncryptedPOP(), - id_cmc_decryptedPOP: DecryptedPOP(), - id_cmc_lraPOPWitness: LraPopWitness(), - id_cmc_getCert: GetCert(), - id_cmc_getCRL: GetCRL(), - id_cmc_revokeRequest: RevokeRequest(), - id_cmc_regInfo: univ.OctetString(), - id_cmc_responseInfo: univ.OctetString(), - id_cmc_queryPending: univ.OctetString(), - id_cmc_popLinkRandom: univ.OctetString(), - id_cmc_popLinkWitness: univ.OctetString(), - id_cmc_popLinkWitnessV2: PopLinkWitnessV2(), - id_cmc_confirmCertAcceptance: CMCCertId(), - id_cmc_trustedAnchors: PublishTrustAnchors(), - id_cmc_authData: AuthPublish(), - id_cmc_batchRequests: BodyPartList(), - id_cmc_batchResponses: BodyPartList(), - id_cmc_publishCert: CMCPublicationInfo(), - id_cmc_modCertTemplate: ModCertTemplate(), - id_cmc_controlProcessed: ControlsProcessed(), - id_ExtensionReq: ExtensionReq(), -} - -cmcControlAttributesMap.update(_cmcControlAttributesMapUpdate) - - -# Map of CMC Content Type OIDs to CMC Content Types are added to -# the ones that are in rfc5652.py - -_cmsContentTypesMapUpdate = { - id_cct_PKIData: PKIData(), - id_cct_PKIResponse: PKIResponse(), -} - -rfc5652.cmsContentTypesMap.update(_cmsContentTypesMapUpdate) - diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc6482.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc6482.py deleted file mode 100644 index d213a46f8de4..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc6482.py +++ /dev/null @@ -1,74 +0,0 @@ -# -# This file is part of pyasn1-modules software. -# -# Created by Russ Housley with assistance from asn1ate v.0.6.0. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# RPKI Route Origin Authorizations (ROAs) -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc6482.txt -# https://www.rfc-editor.org/errata/eid5881 -# - -from pyasn1.type import constraint -from pyasn1.type import namedtype -from pyasn1.type import tag -from pyasn1.type import univ - -from pyasn1_modules import rfc5652 - -MAX = float('inf') - - -id_ct_routeOriginAuthz = univ.ObjectIdentifier('1.2.840.113549.1.9.16.1.24') - - -class ASID(univ.Integer): - pass - - -class IPAddress(univ.BitString): - pass - - -class ROAIPAddress(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('address', IPAddress()), - namedtype.OptionalNamedType('maxLength', univ.Integer()) - ) - - -class ROAIPAddressFamily(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('addressFamily', - univ.OctetString().subtype( - subtypeSpec=constraint.ValueSizeConstraint(2, 3))), - namedtype.NamedType('addresses', - univ.SequenceOf(componentType=ROAIPAddress()).subtype( - subtypeSpec=constraint.ValueSizeConstraint(1, MAX))) - ) - - -class RouteOriginAttestation(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.DefaultedNamedType('version', - univ.Integer().subtype(explicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatSimple, 0)).subtype(value=0)), - namedtype.NamedType('asID', ASID()), - namedtype.NamedType('ipAddrBlocks', - univ.SequenceOf(componentType=ROAIPAddressFamily()).subtype( - subtypeSpec=constraint.ValueSizeConstraint(1, MAX))) - ) - - -# Map of Content Type OIDs to Content Types added to the -# ones that are in rfc5652.py - -_cmsContentTypesMapUpdate = { - id_ct_routeOriginAuthz: RouteOriginAttestation(), -} - -rfc5652.cmsContentTypesMap.update(_cmsContentTypesMapUpdate) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc6486.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc6486.py deleted file mode 100644 index 31c936a4f259..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc6486.py +++ /dev/null @@ -1,68 +0,0 @@ -# -# This file is part of pyasn1-modules software. -# -# Created by Russ Housley with assistance from asn1ate v.0.6.0. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# RPKI Manifests -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc6486.txt -# - -from pyasn1.type import char -from pyasn1.type import constraint -from pyasn1.type import namedtype -from pyasn1.type import tag -from pyasn1.type import useful -from pyasn1.type import univ - -from pyasn1_modules import rfc5652 - -MAX = float('inf') - - -id_smime = univ.ObjectIdentifier('1.2.840.113549.1.9.16') - -id_ct = id_smime + (1, ) - -id_ct_rpkiManifest = id_ct + (26, ) - - -class FileAndHash(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('file', char.IA5String()), - namedtype.NamedType('hash', univ.BitString()) - ) - - -class Manifest(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.DefaultedNamedType('version', - univ.Integer().subtype(explicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatSimple, 0)).subtype(value=0)), - namedtype.NamedType('manifestNumber', - univ.Integer().subtype( - subtypeSpec=constraint.ValueRangeConstraint(0, MAX))), - namedtype.NamedType('thisUpdate', - useful.GeneralizedTime()), - namedtype.NamedType('nextUpdate', - useful.GeneralizedTime()), - namedtype.NamedType('fileHashAlg', - univ.ObjectIdentifier()), - namedtype.NamedType('fileList', - univ.SequenceOf(componentType=FileAndHash()).subtype( - subtypeSpec=constraint.ValueSizeConstraint(0, MAX))) - ) - - -# Map of Content Type OIDs to Content Types added to the -# ones that are in rfc5652.py - -_cmsContentTypesMapUpdate = { - id_ct_rpkiManifest: Manifest(), -} - -rfc5652.cmsContentTypesMap.update(_cmsContentTypesMapUpdate) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc6487.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc6487.py deleted file mode 100644 index d8c2f87423f9..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc6487.py +++ /dev/null @@ -1,22 +0,0 @@ -# -# This file is part of pyasn1-modules software. -# -# Created by Russ Housley. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# Profile for X.509 PKIX Resource Certificates -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc6487.txt -# - -from pyasn1.type import univ - -id_pkix = univ.ObjectIdentifier('1.3.6.1.5.5.7') - -id_ad = id_pkix + (48, ) - -id_ad_rpkiManifest = id_ad + (10, ) -id_ad_signedObject = id_ad + (11, ) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc6664.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc6664.py deleted file mode 100644 index 41629d8d7f85..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc6664.py +++ /dev/null @@ -1,147 +0,0 @@ -# -# This file is part of pyasn1-modules software. -# -# Created by Russ Housley with some assistance from asn1ate v.0.6.0. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# S/MIME Capabilities for Public Key Definitions -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc6664.txt -# - -from pyasn1.type import constraint -from pyasn1.type import namedtype -from pyasn1.type import tag -from pyasn1.type import univ - -from pyasn1_modules import rfc5280 -from pyasn1_modules import rfc5751 -from pyasn1_modules import rfc5480 -from pyasn1_modules import rfc4055 -from pyasn1_modules import rfc3279 - -MAX = float('inf') - - -# Imports from RFC 5280 - -AlgorithmIdentifier = rfc5280.AlgorithmIdentifier - - -# Imports from RFC 3279 - -dhpublicnumber = rfc3279.dhpublicnumber - -Dss_Parms = rfc3279.Dss_Parms - -id_dsa = rfc3279.id_dsa - -id_ecPublicKey = rfc3279.id_ecPublicKey - -rsaEncryption = rfc3279.rsaEncryption - - -# Imports from RFC 4055 - -id_mgf1 = rfc4055.id_mgf1 - -id_RSAES_OAEP = rfc4055.id_RSAES_OAEP - -id_RSASSA_PSS = rfc4055.id_RSASSA_PSS - - -# Imports from RFC 5480 - -ECParameters = rfc5480.ECParameters - -id_ecDH = rfc5480.id_ecDH - -id_ecMQV = rfc5480.id_ecMQV - - -# RSA - -class RSAKeySize(univ.Integer): - # suggested values are 1024, 2048, 3072, 4096, 7680, 8192, and 15360; - # however, the integer value is not limited to these suggestions - pass - - -class RSAKeyCapabilities(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('minKeySize', RSAKeySize()), - namedtype.OptionalNamedType('maxKeySize', RSAKeySize()) - ) - - -class RsaSsa_Pss_sig_caps(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('hashAlg', AlgorithmIdentifier()), - namedtype.OptionalNamedType('maskAlg', AlgorithmIdentifier()), - namedtype.DefaultedNamedType('trailerField', univ.Integer().subtype(value=1)) - ) - - -# Diffie-Hellman and DSA - -class DSAKeySize(univ.Integer): - subtypeSpec = constraint.SingleValueConstraint(1024, 2048, 3072, 7680, 15360) - - -class DSAKeyCapabilities(univ.Choice): - componentType = namedtype.NamedTypes( - namedtype.NamedType('keySizes', univ.Sequence(componentType=namedtype.NamedTypes( - namedtype.NamedType('minKeySize', - DSAKeySize()), - namedtype.OptionalNamedType('maxKeySize', - DSAKeySize()), - namedtype.OptionalNamedType('maxSizeP', - univ.Integer().subtype(explicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatSimple, 1))), - namedtype.OptionalNamedType('maxSizeQ', - univ.Integer().subtype(explicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatSimple, 2))), - namedtype.OptionalNamedType('maxSizeG', - univ.Integer().subtype(explicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatSimple, 3))) - )).subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))), - namedtype.NamedType('keyParams', - Dss_Parms().subtype(explicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatConstructed, 1))) - ) - - -# Elliptic Curve - -class EC_SMimeCaps(univ.SequenceOf): - componentType = ECParameters() - subtypeSpec=constraint.ValueSizeConstraint(1, MAX) - - -# Update the SMIMECapabilities Attribute Map in rfc5751.py -# -# The map can either include an entry for scap-sa-rsaSSA-PSS or -# scap-pk-rsaSSA-PSS, but not both. One is associated with the -# public key and the other is associated with the signature -# algorithm; however, they use the same OID. If you need the -# other one in your application, copy the map into a local dict, -# adjust as needed, and pass the local dict to the decoder with -# openTypes=your_local_map. - -_smimeCapabilityMapUpdate = { - rsaEncryption: RSAKeyCapabilities(), - id_RSASSA_PSS: RSAKeyCapabilities(), - # id_RSASSA_PSS: RsaSsa_Pss_sig_caps(), - id_RSAES_OAEP: RSAKeyCapabilities(), - id_dsa: DSAKeyCapabilities(), - dhpublicnumber: DSAKeyCapabilities(), - id_ecPublicKey: EC_SMimeCaps(), - id_ecDH: EC_SMimeCaps(), - id_ecMQV: EC_SMimeCaps(), - id_mgf1: AlgorithmIdentifier(), -} - -rfc5751.smimeCapabilityMap.update(_smimeCapabilityMapUpdate) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc6955.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc6955.py deleted file mode 100644 index 09f2d6562ee6..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc6955.py +++ /dev/null @@ -1,108 +0,0 @@ -# -# This file is part of pyasn1-modules software. -# -# Created by Russ Housley. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# Diffie-Hellman Proof-of-Possession Algorithms -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc6955.txt -# - -from pyasn1.type import namedtype -from pyasn1.type import univ - -from pyasn1_modules import rfc3279 -from pyasn1_modules import rfc5280 -from pyasn1_modules import rfc5652 - - -# Imports from RFC 5652 - -MessageDigest = rfc5652.MessageDigest - -IssuerAndSerialNumber = rfc5652.IssuerAndSerialNumber - - -# Imports from RFC 5280 - -id_pkix = rfc5280.id_pkix - - -# Imports from RFC 3279 - -Dss_Sig_Value = rfc3279.Dss_Sig_Value - -DomainParameters = rfc3279.DomainParameters - - -# Static DH Proof-of-Possession - -class DhSigStatic(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.OptionalNamedType('issuerAndSerial', IssuerAndSerialNumber()), - namedtype.NamedType('hashValue', MessageDigest()) - ) - - -# Object Identifiers - -id_dh_sig_hmac_sha1 = id_pkix + (6, 3, ) - -id_dhPop_static_sha1_hmac_sha1 = univ.ObjectIdentifier(id_dh_sig_hmac_sha1) - - -id_alg_dh_pop = id_pkix + (6, 4, ) - -id_alg_dhPop_sha1 = univ.ObjectIdentifier(id_alg_dh_pop) - -id_alg_dhPop_sha224 = id_pkix + (6, 5, ) - -id_alg_dhPop_sha256 = id_pkix + (6, 6, ) - -id_alg_dhPop_sha384 = id_pkix + (6, 7, ) - -id_alg_dhPop_sha512 = id_pkix + (6, 8, ) - - -id_alg_dhPop_static_sha224_hmac_sha224 = id_pkix + (6, 15, ) - -id_alg_dhPop_static_sha256_hmac_sha256 = id_pkix + (6, 16, ) - -id_alg_dhPop_static_sha384_hmac_sha384 = id_pkix + (6, 17, ) - -id_alg_dhPop_static_sha512_hmac_sha512 = id_pkix + (6, 18, ) - - -id_alg_ecdhPop_static_sha224_hmac_sha224 = id_pkix + (6, 25, ) - -id_alg_ecdhPop_static_sha256_hmac_sha256 = id_pkix + (6, 26, ) - -id_alg_ecdhPop_static_sha384_hmac_sha384 = id_pkix + (6, 27, ) - -id_alg_ecdhPop_static_sha512_hmac_sha512 = id_pkix + (6, 28, ) - - -# Update the Algorithm Identifier map in rfc5280.py - -_algorithmIdentifierMapUpdate = { - id_alg_dh_pop: DomainParameters(), - id_alg_dhPop_sha224: DomainParameters(), - id_alg_dhPop_sha256: DomainParameters(), - id_alg_dhPop_sha384: DomainParameters(), - id_alg_dhPop_sha512: DomainParameters(), - id_dh_sig_hmac_sha1: univ.Null(""), - id_alg_dhPop_static_sha224_hmac_sha224: univ.Null(""), - id_alg_dhPop_static_sha256_hmac_sha256: univ.Null(""), - id_alg_dhPop_static_sha384_hmac_sha384: univ.Null(""), - id_alg_dhPop_static_sha512_hmac_sha512: univ.Null(""), - id_alg_ecdhPop_static_sha224_hmac_sha224: univ.Null(""), - id_alg_ecdhPop_static_sha256_hmac_sha256: univ.Null(""), - id_alg_ecdhPop_static_sha384_hmac_sha384: univ.Null(""), - id_alg_ecdhPop_static_sha512_hmac_sha512: univ.Null(""), -} - -rfc5280.algorithmIdentifierMap.update(_algorithmIdentifierMapUpdate) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc6960.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc6960.py deleted file mode 100644 index e5f130564901..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc6960.py +++ /dev/null @@ -1,223 +0,0 @@ -# -# This file is part of pyasn1-modules software. -# -# Created by Russ Housley. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# Online Certificate Status Protocol (OCSP) -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc6960.txt -# - -from pyasn1.type import univ, char, namedtype, namedval, tag, constraint, useful - -from pyasn1_modules import rfc2560 -from pyasn1_modules import rfc5280 - -MAX = float('inf') - - -# Imports from RFC 5280 - -AlgorithmIdentifier = rfc5280.AlgorithmIdentifier -AuthorityInfoAccessSyntax = rfc5280.AuthorityInfoAccessSyntax -Certificate = rfc5280.Certificate -CertificateSerialNumber = rfc5280.CertificateSerialNumber -CRLReason = rfc5280.CRLReason -Extensions = rfc5280.Extensions -GeneralName = rfc5280.GeneralName -Name = rfc5280.Name - -id_kp = rfc5280.id_kp - -id_ad_ocsp = rfc5280.id_ad_ocsp - - -# Imports from the original OCSP module in RFC 2560 - -AcceptableResponses = rfc2560.AcceptableResponses -ArchiveCutoff = rfc2560.ArchiveCutoff -CertStatus = rfc2560.CertStatus -KeyHash = rfc2560.KeyHash -OCSPResponse = rfc2560.OCSPResponse -OCSPResponseStatus = rfc2560.OCSPResponseStatus -ResponseBytes = rfc2560.ResponseBytes -RevokedInfo = rfc2560.RevokedInfo -UnknownInfo = rfc2560.UnknownInfo -Version = rfc2560.Version - -id_kp_OCSPSigning = rfc2560.id_kp_OCSPSigning - -id_pkix_ocsp = rfc2560.id_pkix_ocsp -id_pkix_ocsp_archive_cutoff = rfc2560.id_pkix_ocsp_archive_cutoff -id_pkix_ocsp_basic = rfc2560.id_pkix_ocsp_basic -id_pkix_ocsp_crl = rfc2560.id_pkix_ocsp_crl -id_pkix_ocsp_nocheck = rfc2560.id_pkix_ocsp_nocheck -id_pkix_ocsp_nonce = rfc2560.id_pkix_ocsp_nonce -id_pkix_ocsp_response = rfc2560.id_pkix_ocsp_response -id_pkix_ocsp_service_locator = rfc2560.id_pkix_ocsp_service_locator - - -# Additional object identifiers - -id_pkix_ocsp_pref_sig_algs = id_pkix_ocsp + (8, ) -id_pkix_ocsp_extended_revoke = id_pkix_ocsp + (9, ) - - -# Updated structures (mostly to improve openTypes support) - -class CertID(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('hashAlgorithm', AlgorithmIdentifier()), - namedtype.NamedType('issuerNameHash', univ.OctetString()), - namedtype.NamedType('issuerKeyHash', univ.OctetString()), - namedtype.NamedType('serialNumber', CertificateSerialNumber()) - ) - - -class SingleResponse(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('certID', CertID()), - namedtype.NamedType('certStatus', CertStatus()), - namedtype.NamedType('thisUpdate', useful.GeneralizedTime()), - namedtype.OptionalNamedType('nextUpdate', useful.GeneralizedTime().subtype( - explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), - namedtype.OptionalNamedType('singleExtensions', Extensions().subtype( - explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))) - ) - - -class ResponderID(univ.Choice): - componentType = namedtype.NamedTypes( - namedtype.NamedType('byName', Name().subtype( - explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), - namedtype.NamedType('byKey', KeyHash().subtype( - explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))) - ) - - -class ResponseData(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.DefaultedNamedType('version', Version('v1').subtype( - explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), - namedtype.NamedType('responderID', ResponderID()), - namedtype.NamedType('producedAt', useful.GeneralizedTime()), - namedtype.NamedType('responses', univ.SequenceOf( - componentType=SingleResponse())), - namedtype.OptionalNamedType('responseExtensions', Extensions().subtype( - explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))) - ) - - -class BasicOCSPResponse(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('tbsResponseData', ResponseData()), - namedtype.NamedType('signatureAlgorithm', AlgorithmIdentifier()), - namedtype.NamedType('signature', univ.BitString()), - namedtype.OptionalNamedType('certs', univ.SequenceOf( - componentType=Certificate()).subtype(explicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatSimple, 0))) - ) - - -class Request(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('reqCert', CertID()), - namedtype.OptionalNamedType('singleRequestExtensions', Extensions().subtype( - explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))) - ) - - -class Signature(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('signatureAlgorithm', AlgorithmIdentifier()), - namedtype.NamedType('signature', univ.BitString()), - namedtype.OptionalNamedType('certs', univ.SequenceOf( - componentType=Certificate()).subtype(explicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatSimple, 0))) - ) - - -class TBSRequest(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.DefaultedNamedType('version', Version('v1').subtype( - explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), - namedtype.OptionalNamedType('requestorName', GeneralName().subtype( - explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), - namedtype.NamedType('requestList', univ.SequenceOf( - componentType=Request())), - namedtype.OptionalNamedType('requestExtensions', Extensions().subtype( - explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))) - ) - - -class OCSPRequest(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('tbsRequest', TBSRequest()), - namedtype.OptionalNamedType('optionalSignature', Signature().subtype( - explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))) - ) - - -# Previously omitted structure - -class ServiceLocator(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('issuer', Name()), - namedtype.NamedType('locator', AuthorityInfoAccessSyntax()) - ) - - -# Additional structures - -class CrlID(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.OptionalNamedType('crlUrl', char.IA5String().subtype( - explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), - namedtype.OptionalNamedType('crlNum', univ.Integer().subtype( - explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), - namedtype.OptionalNamedType('crlTime', useful.GeneralizedTime().subtype( - explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))) - ) - - -class PreferredSignatureAlgorithm(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('sigIdentifier', AlgorithmIdentifier()), - namedtype.OptionalNamedType('certIdentifier', AlgorithmIdentifier()) - ) - - -class PreferredSignatureAlgorithms(univ.SequenceOf): - componentType = PreferredSignatureAlgorithm() - - - -# Response Type OID to Response Map - -ocspResponseMap = { - id_pkix_ocsp_basic: BasicOCSPResponse(), -} - - -# Map of Extension OIDs to Extensions added to the ones -# that are in rfc5280.py - -_certificateExtensionsMapUpdate = { - # Certificate Extension - id_pkix_ocsp_nocheck: univ.Null(""), - # OCSP Request Extensions - id_pkix_ocsp_nonce: univ.OctetString(), - id_pkix_ocsp_response: AcceptableResponses(), - id_pkix_ocsp_service_locator: ServiceLocator(), - id_pkix_ocsp_pref_sig_algs: PreferredSignatureAlgorithms(), - # OCSP Response Extensions - id_pkix_ocsp_crl: CrlID(), - id_pkix_ocsp_archive_cutoff: ArchiveCutoff(), - id_pkix_ocsp_extended_revoke: univ.Null(""), -} - -rfc5280.certificateExtensionsMap.update(_certificateExtensionsMapUpdate) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc7030.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc7030.py deleted file mode 100644 index 84b6dc5f9a35..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc7030.py +++ /dev/null @@ -1,66 +0,0 @@ -# -# This file is part of pyasn1-modules software. -# -# Created by Russ Housley with assistance from asn1ate v.0.6.0. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# Enrollment over Secure Transport (EST) -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc7030.txt -# - -from pyasn1.type import constraint -from pyasn1.type import namedtype -from pyasn1.type import univ - -from pyasn1_modules import rfc5652 - -MAX = float('inf') - - -# Imports from RFC 5652 - -Attribute = rfc5652.Attribute - - -# Asymmetric Decrypt Key Identifier Attribute - -id_aa_asymmDecryptKeyID = univ.ObjectIdentifier('1.2.840.113549.1.9.16.2.54') - -class AsymmetricDecryptKeyIdentifier(univ.OctetString): - pass - - -aa_asymmDecryptKeyID = Attribute() -aa_asymmDecryptKeyID['attrType'] = id_aa_asymmDecryptKeyID -aa_asymmDecryptKeyID['attrValues'][0] = AsymmetricDecryptKeyIdentifier() - - -# CSR Attributes - -class AttrOrOID(univ.Choice): - pass - -AttrOrOID.componentType = namedtype.NamedTypes( - namedtype.NamedType('oid', univ.ObjectIdentifier()), - namedtype.NamedType('attribute', Attribute()) -) - - -class CsrAttrs(univ.SequenceOf): - pass - -CsrAttrs.componentType = AttrOrOID() -CsrAttrs.subtypeSpec=constraint.ValueSizeConstraint(0, MAX) - - -# Update CMS Attribute Map - -_cmsAttributesMapUpdate = { - id_aa_asymmDecryptKeyID: AsymmetricDecryptKeyIdentifier(), -} - -rfc5652.cmsAttributesMap.update(_cmsAttributesMapUpdate) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc7191.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc7191.py deleted file mode 100644 index 7c2be1156278..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc7191.py +++ /dev/null @@ -1,261 +0,0 @@ -# This file is being contributed to of pyasn1-modules software. -# -# Created by Russ Housley without assistance from the asn1ate tool. -# Modified by Russ Housley to add support for opentypes. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# CMS Key Package Receipt and Error Content Types -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc7191.txt - -from pyasn1.type import constraint -from pyasn1.type import namedtype -from pyasn1.type import namedval -from pyasn1.type import opentype -from pyasn1.type import tag -from pyasn1.type import univ - -from pyasn1_modules import rfc5280 -from pyasn1_modules import rfc5652 - -MAX = float('inf') - -DistinguishedName = rfc5280.DistinguishedName - - -# SingleAttribute is the same as Attribute in RFC 5652, except that the -# attrValues SET must have one and only one member - -class AttributeValue(univ.Any): - pass - - -class AttributeValues(univ.SetOf): - pass - -AttributeValues.componentType = AttributeValue() -AttributeValues.sizeSpec = univ.Set.sizeSpec + constraint.ValueSizeConstraint(1, 1) - - -class SingleAttribute(univ.Sequence): - pass - -SingleAttribute.componentType = namedtype.NamedTypes( - namedtype.NamedType('attrType', univ.ObjectIdentifier()), - namedtype.NamedType('attrValues', AttributeValues(), - openType=opentype.OpenType('attrType', rfc5652.cmsAttributesMap) - ) -) - - -# SIR Entity Name - -class SIREntityNameType(univ.ObjectIdentifier): - pass - - -class SIREntityNameValue(univ.Any): - pass - - -class SIREntityName(univ.Sequence): - pass - -SIREntityName.componentType = namedtype.NamedTypes( - namedtype.NamedType('sirenType', SIREntityNameType()), - namedtype.NamedType('sirenValue', univ.OctetString()) - # CONTAINING the DER-encoded SIREntityNameValue -) - - -class SIREntityNames(univ.SequenceOf): - pass - -SIREntityNames.componentType = SIREntityName() -SIREntityNames.sizeSpec=constraint.ValueSizeConstraint(1, MAX) - - -id_dn = univ.ObjectIdentifier('2.16.840.1.101.2.1.16.0') - - -class siren_dn(SIREntityName): - def __init__(self): - SIREntityName.__init__(self) - self['sirenType'] = id_dn - - -# Key Package Error CMS Content Type - -class EnumeratedErrorCode(univ.Enumerated): - pass - -# Error codes with values <= 33 are aligned with RFC 5934 -EnumeratedErrorCode.namedValues = namedval.NamedValues( - ('decodeFailure', 1), - ('badContentInfo', 2), - ('badSignedData', 3), - ('badEncapContent', 4), - ('badCertificate', 5), - ('badSignerInfo', 6), - ('badSignedAttrs', 7), - ('badUnsignedAttrs', 8), - ('missingContent', 9), - ('noTrustAnchor', 10), - ('notAuthorized', 11), - ('badDigestAlgorithm', 12), - ('badSignatureAlgorithm', 13), - ('unsupportedKeySize', 14), - ('unsupportedParameters', 15), - ('signatureFailure', 16), - ('insufficientMemory', 17), - ('incorrectTarget', 23), - ('missingSignature', 29), - ('resourcesBusy', 30), - ('versionNumberMismatch', 31), - ('revokedCertificate', 33), - ('ambiguousDecrypt', 60), - ('noDecryptKey', 61), - ('badEncryptedData', 62), - ('badEnvelopedData', 63), - ('badAuthenticatedData', 64), - ('badAuthEnvelopedData', 65), - ('badKeyAgreeRecipientInfo', 66), - ('badKEKRecipientInfo', 67), - ('badEncryptContent', 68), - ('badEncryptAlgorithm', 69), - ('missingCiphertext', 70), - ('decryptFailure', 71), - ('badMACAlgorithm', 72), - ('badAuthAttrs', 73), - ('badUnauthAttrs', 74), - ('invalidMAC', 75), - ('mismatchedDigestAlg', 76), - ('missingCertificate', 77), - ('tooManySigners', 78), - ('missingSignedAttributes', 79), - ('derEncodingNotUsed', 80), - ('missingContentHints', 81), - ('invalidAttributeLocation', 82), - ('badMessageDigest', 83), - ('badKeyPackage', 84), - ('badAttributes', 85), - ('attributeComparisonFailure', 86), - ('unsupportedSymmetricKeyPackage', 87), - ('unsupportedAsymmetricKeyPackage', 88), - ('constraintViolation', 89), - ('ambiguousDefaultValue', 90), - ('noMatchingRecipientInfo', 91), - ('unsupportedKeyWrapAlgorithm', 92), - ('badKeyTransRecipientInfo', 93), - ('other', 127) -) - - -class ErrorCodeChoice(univ.Choice): - pass - -ErrorCodeChoice.componentType = namedtype.NamedTypes( - namedtype.NamedType('enum', EnumeratedErrorCode()), - namedtype.NamedType('oid', univ.ObjectIdentifier()) -) - - -class KeyPkgID(univ.OctetString): - pass - - -class KeyPkgIdentifier(univ.Choice): - pass - -KeyPkgIdentifier.componentType = namedtype.NamedTypes( - namedtype.NamedType('pkgID', KeyPkgID()), - namedtype.NamedType('attribute', SingleAttribute()) -) - - -class KeyPkgVersion(univ.Integer): - pass - - -KeyPkgVersion.namedValues = namedval.NamedValues( - ('v1', 1), - ('v2', 2) -) - -KeyPkgVersion.subtypeSpec = constraint.ValueRangeConstraint(1, 65535) - - -id_ct_KP_keyPackageError = univ.ObjectIdentifier('2.16.840.1.101.2.1.2.78.6') - -class KeyPackageError(univ.Sequence): - pass - -KeyPackageError.componentType = namedtype.NamedTypes( - namedtype.DefaultedNamedType('version', KeyPkgVersion().subtype(value='v2')), - namedtype.OptionalNamedType('errorOf', KeyPkgIdentifier().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))), - namedtype.NamedType('errorBy', SIREntityName()), - namedtype.NamedType('errorCode', ErrorCodeChoice()) -) - - -# Key Package Receipt CMS Content Type - -id_ct_KP_keyPackageReceipt = univ.ObjectIdentifier('2.16.840.1.101.2.1.2.78.3') - -class KeyPackageReceipt(univ.Sequence): - pass - -KeyPackageReceipt.componentType = namedtype.NamedTypes( - namedtype.DefaultedNamedType('version', KeyPkgVersion().subtype(value='v2')), - namedtype.NamedType('receiptOf', KeyPkgIdentifier()), - namedtype.NamedType('receivedBy', SIREntityName()) -) - - -# Key Package Receipt Request Attribute - -class KeyPkgReceiptReq(univ.Sequence): - pass - -KeyPkgReceiptReq.componentType = namedtype.NamedTypes( - namedtype.DefaultedNamedType('encryptReceipt', univ.Boolean().subtype(value=0)), - namedtype.OptionalNamedType('receiptsFrom', SIREntityNames().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), - namedtype.NamedType('receiptsTo', SIREntityNames()) -) - - -id_aa_KP_keyPkgIdAndReceiptReq = univ.ObjectIdentifier('2.16.840.1.101.2.1.5.65') - -class KeyPkgIdentifierAndReceiptReq(univ.Sequence): - pass - -KeyPkgIdentifierAndReceiptReq.componentType = namedtype.NamedTypes( - namedtype.NamedType('pkgID', KeyPkgID()), - namedtype.OptionalNamedType('receiptReq', KeyPkgReceiptReq()) -) - - -# Map of Attribute Type OIDs to Attributes are added to -# the ones that are in rfc5652.py - -_cmsAttributesMapUpdate = { - id_aa_KP_keyPkgIdAndReceiptReq: KeyPkgIdentifierAndReceiptReq(), -} - -rfc5652.cmsAttributesMap.update(_cmsAttributesMapUpdate) - - -# Map of CMC Content Type OIDs to CMC Content Types are added to -# the ones that are in rfc5652.py - -_cmsContentTypesMapUpdate = { - id_ct_KP_keyPackageError: KeyPackageError(), - id_ct_KP_keyPackageReceipt: KeyPackageReceipt(), -} - -rfc5652.cmsContentTypesMap.update(_cmsContentTypesMapUpdate) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc7229.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc7229.py deleted file mode 100644 index e9bce2d5b61e..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc7229.py +++ /dev/null @@ -1,29 +0,0 @@ -# -# This file is part of pyasn1-modules software. -# -# Created by Russ Housley. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# Object Identifiers for Test Certificate Policies -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc7229.txt -# - -from pyasn1.type import univ - - -id_pkix = univ.ObjectIdentifier('1.3.6.1.5.5.7') - -id_TEST = id_pkix + (13, ) - -id_TEST_certPolicyOne = id_TEST + (1, ) -id_TEST_certPolicyTwo = id_TEST + (2, ) -id_TEST_certPolicyThree = id_TEST + (3, ) -id_TEST_certPolicyFour = id_TEST + (4, ) -id_TEST_certPolicyFive = id_TEST + (5, ) -id_TEST_certPolicySix = id_TEST + (6, ) -id_TEST_certPolicySeven = id_TEST + (7, ) -id_TEST_certPolicyEight = id_TEST + (8, ) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc7292.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc7292.py deleted file mode 100644 index 1c9f319a5ddb..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc7292.py +++ /dev/null @@ -1,357 +0,0 @@ -# This file is being contributed to pyasn1-modules software. -# -# Created by Russ Housley with assistance from the asn1ate tool. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# PKCS #12: Personal Information Exchange Syntax v1.1 -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc7292.txt -# https://www.rfc-editor.org/errata_search.php?rfc=7292 - -from pyasn1.type import char -from pyasn1.type import constraint -from pyasn1.type import namedtype -from pyasn1.type import namedval -from pyasn1.type import opentype -from pyasn1.type import tag -from pyasn1.type import univ - -from pyasn1_modules import rfc2315 -from pyasn1_modules import rfc5652 -from pyasn1_modules import rfc5280 -from pyasn1_modules import rfc5958 - - -def _OID(*components): - output = [] - for x in tuple(components): - if isinstance(x, univ.ObjectIdentifier): - output.extend(list(x)) - else: - output.append(int(x)) - - return univ.ObjectIdentifier(output) - - -# Initialize the maps used in PKCS#12 - -pkcs12BagTypeMap = { } - -pkcs12CertBagMap = { } - -pkcs12CRLBagMap = { } - -pkcs12SecretBagMap = { } - - -# Imports from RFC 2315, RFC 5652, and RFC 5958 - -DigestInfo = rfc2315.DigestInfo - - -ContentInfo = rfc5652.ContentInfo - -PKCS12Attribute = rfc5652.Attribute - - -EncryptedPrivateKeyInfo = rfc5958.EncryptedPrivateKeyInfo - -PrivateKeyInfo = rfc5958.PrivateKeyInfo - - -# CMSSingleAttribute is the same as Attribute in RFC 5652 except the attrValues -# SET must have one and only one member - -class AttributeType(univ.ObjectIdentifier): - pass - - -class AttributeValue(univ.Any): - pass - - -class AttributeValues(univ.SetOf): - pass - -AttributeValues.componentType = AttributeValue() - - -class CMSSingleAttribute(univ.Sequence): - pass - -CMSSingleAttribute.componentType = namedtype.NamedTypes( - namedtype.NamedType('attrType', AttributeType()), - namedtype.NamedType('attrValues', - AttributeValues().subtype(sizeSpec=constraint.ValueSizeConstraint(1, 1)), - openType=opentype.OpenType('attrType', rfc5652.cmsAttributesMap) - ) -) - - -# Object identifier arcs - -rsadsi = _OID(1, 2, 840, 113549) - -pkcs = _OID(rsadsi, 1) - -pkcs_9 = _OID(pkcs, 9) - -certTypes = _OID(pkcs_9, 22) - -crlTypes = _OID(pkcs_9, 23) - -pkcs_12 = _OID(pkcs, 12) - - -# PBE Algorithm Identifiers and Parameters Structure - -pkcs_12PbeIds = _OID(pkcs_12, 1) - -pbeWithSHAAnd128BitRC4 = _OID(pkcs_12PbeIds, 1) - -pbeWithSHAAnd40BitRC4 = _OID(pkcs_12PbeIds, 2) - -pbeWithSHAAnd3_KeyTripleDES_CBC = _OID(pkcs_12PbeIds, 3) - -pbeWithSHAAnd2_KeyTripleDES_CBC = _OID(pkcs_12PbeIds, 4) - -pbeWithSHAAnd128BitRC2_CBC = _OID(pkcs_12PbeIds, 5) - -pbeWithSHAAnd40BitRC2_CBC = _OID(pkcs_12PbeIds, 6) - - -class Pkcs_12PbeParams(univ.Sequence): - pass - -Pkcs_12PbeParams.componentType = namedtype.NamedTypes( - namedtype.NamedType('salt', univ.OctetString()), - namedtype.NamedType('iterations', univ.Integer()) -) - - -# Bag types - -bagtypes = _OID(pkcs_12, 10, 1) - -class BAG_TYPE(univ.Sequence): - pass - -BAG_TYPE.componentType = namedtype.NamedTypes( - namedtype.NamedType('id', univ.ObjectIdentifier()), - namedtype.NamedType('unnamed1', univ.Any(), - openType=opentype.OpenType('attrType', pkcs12BagTypeMap) - ) -) - - -id_keyBag = _OID(bagtypes, 1) - -class KeyBag(PrivateKeyInfo): - pass - - -id_pkcs8ShroudedKeyBag = _OID(bagtypes, 2) - -class PKCS8ShroudedKeyBag(EncryptedPrivateKeyInfo): - pass - - -id_certBag = _OID(bagtypes, 3) - -class CertBag(univ.Sequence): - pass - -CertBag.componentType = namedtype.NamedTypes( - namedtype.NamedType('certId', univ.ObjectIdentifier()), - namedtype.NamedType('certValue', - univ.Any().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)), - openType=opentype.OpenType('certId', pkcs12CertBagMap) - ) -) - - -x509Certificate = CertBag() -x509Certificate['certId'] = _OID(certTypes, 1) -x509Certificate['certValue'] = univ.OctetString() -# DER-encoded X.509 certificate stored in OCTET STRING - - -sdsiCertificate = CertBag() -sdsiCertificate['certId'] = _OID(certTypes, 2) -sdsiCertificate['certValue'] = char.IA5String() -# Base64-encoded SDSI certificate stored in IA5String - - -id_CRLBag = _OID(bagtypes, 4) - -class CRLBag(univ.Sequence): - pass - -CRLBag.componentType = namedtype.NamedTypes( - namedtype.NamedType('crlId', univ.ObjectIdentifier()), - namedtype.NamedType('crlValue', - univ.Any().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)), - openType=opentype.OpenType('crlId', pkcs12CRLBagMap) - ) -) - - -x509CRL = CRLBag() -x509CRL['crlId'] = _OID(crlTypes, 1) -x509CRL['crlValue'] = univ.OctetString() -# DER-encoded X.509 CRL stored in OCTET STRING - - -id_secretBag = _OID(bagtypes, 5) - -class SecretBag(univ.Sequence): - pass - -SecretBag.componentType = namedtype.NamedTypes( - namedtype.NamedType('secretTypeId', univ.ObjectIdentifier()), - namedtype.NamedType('secretValue', - univ.Any().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)), - openType=opentype.OpenType('secretTypeId', pkcs12SecretBagMap) - ) -) - - -id_safeContentsBag = _OID(bagtypes, 6) - -class SafeBag(univ.Sequence): - pass - -SafeBag.componentType = namedtype.NamedTypes( - namedtype.NamedType('bagId', univ.ObjectIdentifier()), - namedtype.NamedType('bagValue', - univ.Any().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)), - openType=opentype.OpenType('bagId', pkcs12BagTypeMap) - ), - namedtype.OptionalNamedType('bagAttributes', - univ.SetOf(componentType=PKCS12Attribute()) - ) -) - - -class SafeContents(univ.SequenceOf): - pass - -SafeContents.componentType = SafeBag() - - -# The PFX PDU - -class AuthenticatedSafe(univ.SequenceOf): - pass - -AuthenticatedSafe.componentType = ContentInfo() -# Data if unencrypted -# EncryptedData if password-encrypted -# EnvelopedData if public key-encrypted - - -class MacData(univ.Sequence): - pass - -MacData.componentType = namedtype.NamedTypes( - namedtype.NamedType('mac', DigestInfo()), - namedtype.NamedType('macSalt', univ.OctetString()), - namedtype.DefaultedNamedType('iterations', univ.Integer().subtype(value=1)) - # Note: The default is for historical reasons and its use is deprecated -) - - -class PFX(univ.Sequence): - pass - -PFX.componentType = namedtype.NamedTypes( - namedtype.NamedType('version', - univ.Integer(namedValues=namedval.NamedValues(('v3', 3))) - ), - namedtype.NamedType('authSafe', ContentInfo()), - namedtype.OptionalNamedType('macData', MacData()) -) - - -# Local key identifier (also defined as certificateAttribute in rfc2985.py) - -pkcs_9_at_localKeyId = _OID(pkcs_9, 21) - -localKeyId = CMSSingleAttribute() -localKeyId['attrType'] = pkcs_9_at_localKeyId -localKeyId['attrValues'][0] = univ.OctetString() - - -# Friendly name (also defined as certificateAttribute in rfc2985.py) - -pkcs_9_ub_pkcs9String = univ.Integer(255) - -pkcs_9_ub_friendlyName = univ.Integer(pkcs_9_ub_pkcs9String) - -pkcs_9_at_friendlyName = _OID(pkcs_9, 20) - -class FriendlyName(char.BMPString): - pass - -FriendlyName.subtypeSpec = constraint.ValueSizeConstraint(1, pkcs_9_ub_friendlyName) - - -friendlyName = CMSSingleAttribute() -friendlyName['attrType'] = pkcs_9_at_friendlyName -friendlyName['attrValues'][0] = FriendlyName() - - -# Update the PKCS#12 maps - -_pkcs12BagTypeMap = { - id_keyBag: KeyBag(), - id_pkcs8ShroudedKeyBag: PKCS8ShroudedKeyBag(), - id_certBag: CertBag(), - id_CRLBag: CRLBag(), - id_secretBag: SecretBag(), - id_safeContentsBag: SafeBag(), -} - -pkcs12BagTypeMap.update(_pkcs12BagTypeMap) - - -_pkcs12CertBagMap = { - _OID(certTypes, 1): univ.OctetString(), - _OID(certTypes, 2): char.IA5String(), -} - -pkcs12CertBagMap.update(_pkcs12CertBagMap) - - -_pkcs12CRLBagMap = { - _OID(crlTypes, 1): univ.OctetString(), -} - -pkcs12CRLBagMap.update(_pkcs12CRLBagMap) - - -# Update the Algorithm Identifier map - -_algorithmIdentifierMapUpdate = { - pbeWithSHAAnd128BitRC4: Pkcs_12PbeParams(), - pbeWithSHAAnd40BitRC4: Pkcs_12PbeParams(), - pbeWithSHAAnd3_KeyTripleDES_CBC: Pkcs_12PbeParams(), - pbeWithSHAAnd2_KeyTripleDES_CBC: Pkcs_12PbeParams(), - pbeWithSHAAnd128BitRC2_CBC: Pkcs_12PbeParams(), - pbeWithSHAAnd40BitRC2_CBC: Pkcs_12PbeParams(), -} - -rfc5280.algorithmIdentifierMap.update(_algorithmIdentifierMapUpdate) - - -# Update the CMS Attribute map - -_cmsAttributesMapUpdate = { - pkcs_9_at_friendlyName: FriendlyName(), - pkcs_9_at_localKeyId: univ.OctetString(), -} - -rfc5652.cmsAttributesMap.update(_cmsAttributesMapUpdate) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc7296.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc7296.py deleted file mode 100644 index 95a191a14ded..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc7296.py +++ /dev/null @@ -1,32 +0,0 @@ -# This file is being contributed to pyasn1-modules software. -# -# Created by Russ Housley. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# IKEv2 Certificate Bundle -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc7296.txt - -from pyasn1.type import namedtype -from pyasn1.type import tag -from pyasn1.type import univ - -from pyasn1_modules import rfc5280 - - -class CertificateOrCRL(univ.Choice): - pass - -CertificateOrCRL.componentType = namedtype.NamedTypes( - namedtype.NamedType('cert', rfc5280.Certificate().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), - namedtype.NamedType('crl', rfc5280.CertificateList().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))) -) - - -class CertificateBundle(univ.SequenceOf): - pass - -CertificateBundle.componentType = CertificateOrCRL() diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc7508.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc7508.py deleted file mode 100644 index 66460240f149..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc7508.py +++ /dev/null @@ -1,90 +0,0 @@ -# -# This file is part of pyasn1-modules software. -# -# Created by Russ Housley with assistance from asn1ate v.0.6.0. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# Securing Header Fields with S/MIME -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc7508.txt -# https://www.rfc-editor.org/errata/eid5875 -# - -from pyasn1.type import char -from pyasn1.type import constraint -from pyasn1.type import namedtype -from pyasn1.type import namedval -from pyasn1.type import univ - -from pyasn1_modules import rfc5652 - -import string - -MAX = float('inf') - - -class Algorithm(univ.Enumerated): - namedValues = namedval.NamedValues( - ('canonAlgorithmSimple', 0), - ('canonAlgorithmRelaxed', 1) - ) - - -class HeaderFieldStatus(univ.Integer): - namedValues = namedval.NamedValues( - ('duplicated', 0), - ('deleted', 1), - ('modified', 2) - ) - - -class HeaderFieldName(char.VisibleString): - subtypeSpec = ( - constraint.PermittedAlphabetConstraint(*string.printable) - - constraint.PermittedAlphabetConstraint(':') - ) - - -class HeaderFieldValue(char.UTF8String): - pass - - -class HeaderField(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('field-Name', HeaderFieldName()), - namedtype.NamedType('field-Value', HeaderFieldValue()), - namedtype.DefaultedNamedType('field-Status', - HeaderFieldStatus().subtype(value='duplicated')) - ) - - -class HeaderFields(univ.SequenceOf): - componentType = HeaderField() - subtypeSpec = constraint.ValueSizeConstraint(1, MAX) - - -class SecureHeaderFields(univ.Set): - componentType = namedtype.NamedTypes( - namedtype.NamedType('canonAlgorithm', Algorithm()), - namedtype.NamedType('secHeaderFields', HeaderFields()) - ) - - -id_aa = univ.ObjectIdentifier((1, 2, 840, 113549, 1, 9, 16, 2, )) - -id_aa_secureHeaderFieldsIdentifier = id_aa + (55, ) - - - -# Map of Attribute Type OIDs to Attributes added to the -# ones that are in rfc5652.py - -_cmsAttributesMapUpdate = { - id_aa_secureHeaderFieldsIdentifier: SecureHeaderFields(), -} - -rfc5652.cmsAttributesMap.update(_cmsAttributesMapUpdate) - diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc7585.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc7585.py deleted file mode 100644 index b3fd4a5bacab..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc7585.py +++ /dev/null @@ -1,50 +0,0 @@ -# -# This file is part of pyasn1-modules software. -# -# Created by Russ Housley with some assistance from asn1ate v.0.6.0. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# Network Access Identifier (NAI) Realm Name for Certificates -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc7585.txt -# - -from pyasn1.type import char -from pyasn1.type import constraint -from pyasn1.type import univ - -from pyasn1_modules import rfc5280 - - -# NAI Realm Name for Certificates - -id_pkix = univ.ObjectIdentifier('1.3.6.1.5.5.7') - -id_on = id_pkix + (8, ) - -id_on_naiRealm = id_on + (8, ) - - -ub_naiRealm_length = univ.Integer(255) - - -class NAIRealm(char.UTF8String): - subtypeSpec = constraint.ValueSizeConstraint(1, ub_naiRealm_length) - - -naiRealm = rfc5280.AnotherName() -naiRealm['type-id'] = id_on_naiRealm -naiRealm['value'] = NAIRealm() - - -# Map of Other Name OIDs to Other Name is added to the -# ones that are in rfc5280.py - -_anotherNameMapUpdate = { - id_on_naiRealm: NAIRealm(), -} - -rfc5280.anotherNameMap.update(_anotherNameMapUpdate) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc7633.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc7633.py deleted file mode 100644 index f518440ff474..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc7633.py +++ /dev/null @@ -1,38 +0,0 @@ -# -# This file is part of pyasn1-modules software. -# -# Created by Russ Housley with some assistance from asn1ate v.0.6.0. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# Transport Layer Security (TLS) Feature Certificate Extension -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc7633.txt -# - -from pyasn1.type import univ - -from pyasn1_modules import rfc5280 - - -# TLS Features Extension - -id_pe = univ.ObjectIdentifier('1.3.6.1.5.5.7.1') - -id_pe_tlsfeature = id_pe + (24, ) - - -class Features(univ.SequenceOf): - componentType = univ.Integer() - - -# Map of Certificate Extension OIDs to Extensions added to the -# ones that are in rfc5280.py - -_certificateExtensionsMapUpdate = { - id_pe_tlsfeature: Features(), -} - -rfc5280.certificateExtensionsMap.update(_certificateExtensionsMapUpdate) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc7773.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc7773.py deleted file mode 100644 index 0fee2aa346c1..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc7773.py +++ /dev/null @@ -1,52 +0,0 @@ -# -# This file is part of pyasn1-modules software. -# -# Created by Russ Housley with some assistance from asn1ate v.0.6.0. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# Authentication Context Certificate Extension -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc7773.txt -# - -from pyasn1.type import char -from pyasn1.type import constraint -from pyasn1.type import namedtype -from pyasn1.type import univ - -from pyasn1_modules import rfc5280 - -MAX = float('inf') - - -# Authentication Context Extension - -e_legnamnden = univ.ObjectIdentifier('1.2.752.201') - -id_eleg_ce = e_legnamnden + (5, ) - -id_ce_authContext = id_eleg_ce + (1, ) - - -class AuthenticationContext(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('contextType', char.UTF8String()), - namedtype.OptionalNamedType('contextInfo', char.UTF8String()) - ) - -class AuthenticationContexts(univ.SequenceOf): - componentType = AuthenticationContext() - subtypeSpec=constraint.ValueSizeConstraint(1, MAX) - - -# Map of Certificate Extension OIDs to Extensions added to the -# ones that are in rfc5280.py - -_certificateExtensionsMapUpdate = { - id_ce_authContext: AuthenticationContexts(), -} - -rfc5280.certificateExtensionsMap.update(_certificateExtensionsMapUpdate) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc7894-1.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc7894-1.py deleted file mode 100644 index 92638d1bc070..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc7894-1.py +++ /dev/null @@ -1,92 +0,0 @@ -# -# This file is part of pyasn1-modules software. -# -# Created by Russ Housley. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# Alternative Challenge Password Attributes for EST -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc7894.txt -# - -from pyasn1.type import char -from pyasn1.type import constraint -from pyasn1.type import namedtype -from pyasn1.type import univ - -from pyasn1_modules import rfc5652 -from pyasn1_modules import rfc6402 -from pyasn1_modules import rfc7191 - - -# SingleAttribute is the same as Attribute in RFC 5652, except that the -# attrValues SET must have one and only one member - -Attribute = rfc7191.SingleAttribute - - -# DirectoryString is the same as RFC 5280, except the length is limited to 255 - -class DirectoryString(univ.Choice): - pass - -DirectoryString.componentType = namedtype.NamedTypes( - namedtype.NamedType('teletexString', char.TeletexString().subtype( - subtypeSpec=constraint.ValueSizeConstraint(1, 255))), - namedtype.NamedType('printableString', char.PrintableString().subtype( - subtypeSpec=constraint.ValueSizeConstraint(1, 255))), - namedtype.NamedType('universalString', char.UniversalString().subtype( - subtypeSpec=constraint.ValueSizeConstraint(1, 255))), - namedtype.NamedType('utf8String', char.UTF8String().subtype( - subtypeSpec=constraint.ValueSizeConstraint(1, 255))), - namedtype.NamedType('bmpString', char.BMPString().subtype( - subtypeSpec=constraint.ValueSizeConstraint(1, 255))) -) - - -# OTP Challenge Attribute - -id_aa_otpChallenge = univ.ObjectIdentifier('1.2.840.113549.1.9.16.2.56') - -ub_aa_otpChallenge = univ.Integer(255) - -otpChallenge = rfc5652.Attribute() -otpChallenge['attrType'] = id_aa_otpChallenge -otpChallenge['attrValues'][0] = DirectoryString() - - -# Revocation Challenge Attribute - -id_aa_revocationChallenge = univ.ObjectIdentifier('1.2.840.113549.1.9.16.2.57') - -ub_aa_revocationChallenge = univ.Integer(255) - -revocationChallenge = rfc5652.Attribute() -revocationChallenge['attrType'] = id_aa_revocationChallenge -revocationChallenge['attrValues'][0] = DirectoryString() - - -# EST Identity Linking Attribute - -id_aa_estIdentityLinking = univ.ObjectIdentifier('1.2.840.113549.1.9.16.2.58') - -ub_aa_est_identity_linking = univ.Integer(255) - -estIdentityLinking = rfc5652.Attribute() -estIdentityLinking['attrType'] = id_aa_estIdentityLinking -estIdentityLinking['attrValues'][0] = DirectoryString() - - -# Map of Attribute Type OIDs to Attributes added to the -# ones that are in rfc6402.py - -_cmcControlAttributesMapUpdate = { - id_aa_otpChallenge: DirectoryString(), - id_aa_revocationChallenge: DirectoryString(), - id_aa_estIdentityLinking: DirectoryString(), -} - -rfc6402.cmcControlAttributesMap.update(_cmcControlAttributesMapUpdate) \ No newline at end of file diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc7894.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc7894.py deleted file mode 100644 index 41936433d14b..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc7894.py +++ /dev/null @@ -1,92 +0,0 @@ -# -# This file is part of pyasn1-modules software. -# -# Created by Russ Housley. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# Alternative Challenge Password Attributes for EST -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc7894.txt -# - -from pyasn1.type import char -from pyasn1.type import constraint -from pyasn1.type import namedtype -from pyasn1.type import univ - -from pyasn1_modules import rfc5652 -from pyasn1_modules import rfc6402 -from pyasn1_modules import rfc7191 - - -# SingleAttribute is the same as Attribute in RFC 5652, except that the -# attrValues SET must have one and only one member - -Attribute = rfc7191.SingleAttribute - - -# DirectoryString is the same as RFC 5280, except the length is limited to 255 - -class DirectoryString(univ.Choice): - pass - -DirectoryString.componentType = namedtype.NamedTypes( - namedtype.NamedType('teletexString', char.TeletexString().subtype( - subtypeSpec=constraint.ValueSizeConstraint(1, 255))), - namedtype.NamedType('printableString', char.PrintableString().subtype( - subtypeSpec=constraint.ValueSizeConstraint(1, 255))), - namedtype.NamedType('universalString', char.UniversalString().subtype( - subtypeSpec=constraint.ValueSizeConstraint(1, 255))), - namedtype.NamedType('utf8String', char.UTF8String().subtype( - subtypeSpec=constraint.ValueSizeConstraint(1, 255))), - namedtype.NamedType('bmpString', char.BMPString().subtype( - subtypeSpec=constraint.ValueSizeConstraint(1, 255))) -) - - -# OTP Challenge Attribute - -id_aa_otpChallenge = univ.ObjectIdentifier('1.2.840.113549.1.9.16.2.56') - -ub_aa_otpChallenge = univ.Integer(255) - -otpChallenge = Attribute() -otpChallenge['attrType'] = id_aa_otpChallenge -otpChallenge['attrValues'][0] = DirectoryString() - - -# Revocation Challenge Attribute - -id_aa_revocationChallenge = univ.ObjectIdentifier('1.2.840.113549.1.9.16.2.57') - -ub_aa_revocationChallenge = univ.Integer(255) - -revocationChallenge = Attribute() -revocationChallenge['attrType'] = id_aa_revocationChallenge -revocationChallenge['attrValues'][0] = DirectoryString() - - -# EST Identity Linking Attribute - -id_aa_estIdentityLinking = univ.ObjectIdentifier('1.2.840.113549.1.9.16.2.58') - -ub_aa_est_identity_linking = univ.Integer(255) - -estIdentityLinking = Attribute() -estIdentityLinking['attrType'] = id_aa_estIdentityLinking -estIdentityLinking['attrValues'][0] = DirectoryString() - - -# Map of Attribute Type OIDs to Attributes added to the -# ones that are in rfc6402.py - -_cmcControlAttributesMapUpdate = { - id_aa_otpChallenge: DirectoryString(), - id_aa_revocationChallenge: DirectoryString(), - id_aa_estIdentityLinking: DirectoryString(), -} - -rfc6402.cmcControlAttributesMap.update(_cmcControlAttributesMapUpdate) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc7906.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc7906.py deleted file mode 100644 index fa5f6b0733c6..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc7906.py +++ /dev/null @@ -1,736 +0,0 @@ -# -# This file is part of pyasn1-modules software. -# -# Created by Russ Housley. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# NSA's CMS Key Management Attributes -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc7906.txt -# https://www.rfc-editor.org/errata/eid5850 -# - -from pyasn1.type import char -from pyasn1.type import constraint -from pyasn1.type import namedtype -from pyasn1.type import namedval -from pyasn1.type import tag -from pyasn1.type import univ - -from pyasn1_modules import rfc2634 -from pyasn1_modules import rfc4108 -from pyasn1_modules import rfc5280 -from pyasn1_modules import rfc5652 -from pyasn1_modules import rfc6010 -from pyasn1_modules import rfc6019 -from pyasn1_modules import rfc7191 - -MAX = float('inf') - - -# Imports From RFC 2634 - -id_aa_contentHint = rfc2634.id_aa_contentHint - -ContentHints = rfc2634.ContentHints - -id_aa_securityLabel = rfc2634.id_aa_securityLabel - -SecurityPolicyIdentifier = rfc2634.SecurityPolicyIdentifier - -SecurityClassification = rfc2634.SecurityClassification - -ESSPrivacyMark = rfc2634.ESSPrivacyMark - -SecurityCategories= rfc2634.SecurityCategories - -ESSSecurityLabel = rfc2634.ESSSecurityLabel - - -# Imports From RFC 4108 - -id_aa_communityIdentifiers = rfc4108.id_aa_communityIdentifiers - -CommunityIdentifier = rfc4108.CommunityIdentifier - -CommunityIdentifiers = rfc4108.CommunityIdentifiers - - -# Imports From RFC 5280 - -AlgorithmIdentifier = rfc5280.AlgorithmIdentifier - -Name = rfc5280.Name - -Certificate = rfc5280.Certificate - -GeneralNames = rfc5280.GeneralNames - -GeneralName = rfc5280.GeneralName - - -SubjectInfoAccessSyntax = rfc5280.SubjectInfoAccessSyntax - -id_pkix = rfc5280.id_pkix - -id_pe = rfc5280.id_pe - -id_pe_subjectInfoAccess = rfc5280.id_pe_subjectInfoAccess - - -# Imports From RFC 6010 - -CMSContentConstraints = rfc6010.CMSContentConstraints - - -# Imports From RFC 6019 - -BinaryTime = rfc6019.BinaryTime - -id_aa_binarySigningTime = rfc6019.id_aa_binarySigningTime - -BinarySigningTime = rfc6019.BinarySigningTime - - -# Imports From RFC 5652 - -Attribute = rfc5652.Attribute - -CertificateSet = rfc5652.CertificateSet - -CertificateChoices = rfc5652.CertificateChoices - -id_contentType = rfc5652.id_contentType - -ContentType = rfc5652.ContentType - -id_messageDigest = rfc5652.id_messageDigest - -MessageDigest = rfc5652.MessageDigest - - -# Imports From RFC 7191 - -SIREntityName = rfc7191.SIREntityName - -id_aa_KP_keyPkgIdAndReceiptReq = rfc7191.id_aa_KP_keyPkgIdAndReceiptReq - -KeyPkgIdentifierAndReceiptReq = rfc7191.KeyPkgIdentifierAndReceiptReq - - -# Key Province Attribute - -id_aa_KP_keyProvinceV2 = univ.ObjectIdentifier('2.16.840.1.101.2.1.5.71') - - -class KeyProvinceV2(univ.ObjectIdentifier): - pass - - -aa_keyProvince_v2 = Attribute() -aa_keyProvince_v2['attrType'] = id_aa_KP_keyProvinceV2 -aa_keyProvince_v2['attrValues'][0] = KeyProvinceV2() - - -# Manifest Attribute - -id_aa_KP_manifest = univ.ObjectIdentifier('2.16.840.1.101.2.1.5.72') - - -class ShortTitle(char.PrintableString): - pass - - -class Manifest(univ.SequenceOf): - pass - -Manifest.componentType = ShortTitle() -Manifest.subtypeSpec=constraint.ValueSizeConstraint(1, MAX) - - -aa_manifest = Attribute() -aa_manifest['attrType'] = id_aa_KP_manifest -aa_manifest['attrValues'][0] = Manifest() - - -# Key Algorithm Attribute - -id_kma_keyAlgorithm = univ.ObjectIdentifier('2.16.840.1.101.2.1.13.1') - - -class KeyAlgorithm(univ.Sequence): - pass - -KeyAlgorithm.componentType = namedtype.NamedTypes( - namedtype.NamedType('keyAlg', univ.ObjectIdentifier()), - namedtype.OptionalNamedType('checkWordAlg', univ.ObjectIdentifier().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), - namedtype.OptionalNamedType('crcAlg', univ.ObjectIdentifier().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))) -) - - -aa_keyAlgorithm = Attribute() -aa_keyAlgorithm['attrType'] = id_kma_keyAlgorithm -aa_keyAlgorithm['attrValues'][0] = KeyAlgorithm() - - -# User Certificate Attribute - -id_at_userCertificate = univ.ObjectIdentifier('2.5.4.36') - - -aa_userCertificate = Attribute() -aa_userCertificate['attrType'] = id_at_userCertificate -aa_userCertificate['attrValues'][0] = Certificate() - - -# Key Package Receivers Attribute - -id_kma_keyPkgReceiversV2 = univ.ObjectIdentifier('2.16.840.1.101.2.1.13.16') - - -class KeyPkgReceiver(univ.Choice): - pass - -KeyPkgReceiver.componentType = namedtype.NamedTypes( - namedtype.NamedType('sirEntity', SIREntityName().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), - namedtype.NamedType('community', CommunityIdentifier().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))) -) - - -class KeyPkgReceiversV2(univ.SequenceOf): - pass - -KeyPkgReceiversV2.componentType = KeyPkgReceiver() -KeyPkgReceiversV2.subtypeSpec=constraint.ValueSizeConstraint(1, MAX) - - -aa_keyPackageReceivers_v2 = Attribute() -aa_keyPackageReceivers_v2['attrType'] = id_kma_keyPkgReceiversV2 -aa_keyPackageReceivers_v2['attrValues'][0] = KeyPkgReceiversV2() - - -# TSEC Nomenclature Attribute - -id_kma_TSECNomenclature = univ.ObjectIdentifier('2.16.840.1.101.2.1.13.3') - - -class CharEdition(char.PrintableString): - pass - - -class CharEditionRange(univ.Sequence): - pass - -CharEditionRange.componentType = namedtype.NamedTypes( - namedtype.NamedType('firstCharEdition', CharEdition()), - namedtype.NamedType('lastCharEdition', CharEdition()) -) - - -class NumEdition(univ.Integer): - pass - -NumEdition.subtypeSpec = constraint.ValueRangeConstraint(0, 308915776) - - -class NumEditionRange(univ.Sequence): - pass - -NumEditionRange.componentType = namedtype.NamedTypes( - namedtype.NamedType('firstNumEdition', NumEdition()), - namedtype.NamedType('lastNumEdition', NumEdition()) -) - - -class EditionID(univ.Choice): - pass - -EditionID.componentType = namedtype.NamedTypes( - namedtype.NamedType('char', univ.Choice(componentType=namedtype.NamedTypes( - namedtype.NamedType('charEdition', CharEdition().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), - namedtype.NamedType('charEditionRange', CharEditionRange().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2))) - )) - ), - namedtype.NamedType('num', univ.Choice(componentType=namedtype.NamedTypes( - namedtype.NamedType('numEdition', NumEdition().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))), - namedtype.NamedType('numEditionRange', NumEditionRange().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 4))) - )) - ) -) - - -class Register(univ.Integer): - pass - -Register.subtypeSpec = constraint.ValueRangeConstraint(0, 2147483647) - - -class RegisterRange(univ.Sequence): - pass - -RegisterRange.componentType = namedtype.NamedTypes( - namedtype.NamedType('firstRegister', Register()), - namedtype.NamedType('lastRegister', Register()) -) - - -class RegisterID(univ.Choice): - pass - -RegisterID.componentType = namedtype.NamedTypes( - namedtype.NamedType('register', Register().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 5))), - namedtype.NamedType('registerRange', RegisterRange().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 6))) -) - - -class SegmentNumber(univ.Integer): - pass - -SegmentNumber.subtypeSpec = constraint.ValueRangeConstraint(1, 127) - - -class SegmentRange(univ.Sequence): - pass - -SegmentRange.componentType = namedtype.NamedTypes( - namedtype.NamedType('firstSegment', SegmentNumber()), - namedtype.NamedType('lastSegment', SegmentNumber()) -) - - -class SegmentID(univ.Choice): - pass - -SegmentID.componentType = namedtype.NamedTypes( - namedtype.NamedType('segmentNumber', SegmentNumber().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 7))), - namedtype.NamedType('segmentRange', SegmentRange().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 8))) -) - - -class TSECNomenclature(univ.Sequence): - pass - -TSECNomenclature.componentType = namedtype.NamedTypes( - namedtype.NamedType('shortTitle', ShortTitle()), - namedtype.OptionalNamedType('editionID', EditionID()), - namedtype.OptionalNamedType('registerID', RegisterID()), - namedtype.OptionalNamedType('segmentID', SegmentID()) -) - - -aa_tsecNomenclature = Attribute() -aa_tsecNomenclature['attrType'] = id_kma_TSECNomenclature -aa_tsecNomenclature['attrValues'][0] = TSECNomenclature() - - -# Key Purpose Attribute - -id_kma_keyPurpose = univ.ObjectIdentifier('2.16.840.1.101.2.1.13.13') - - -class KeyPurpose(univ.Enumerated): - pass - -KeyPurpose.namedValues = namedval.NamedValues( - ('n-a', 0), - ('a', 65), - ('b', 66), - ('l', 76), - ('m', 77), - ('r', 82), - ('s', 83), - ('t', 84), - ('v', 86), - ('x', 88), - ('z', 90) -) - - -aa_keyPurpose = Attribute() -aa_keyPurpose['attrType'] = id_kma_keyPurpose -aa_keyPurpose['attrValues'][0] = KeyPurpose() - - -# Key Use Attribute - -id_kma_keyUse = univ.ObjectIdentifier('2.16.840.1.101.2.1.13.14') - - -class KeyUse(univ.Enumerated): - pass - -KeyUse.namedValues = namedval.NamedValues( - ('n-a', 0), - ('ffk', 1), - ('kek', 2), - ('kpk', 3), - ('msk', 4), - ('qkek', 5), - ('tek', 6), - ('tsk', 7), - ('trkek', 8), - ('nfk', 9), - ('effk', 10), - ('ebfk', 11), - ('aek', 12), - ('wod', 13), - ('kesk', 246), - ('eik', 247), - ('ask', 248), - ('kmk', 249), - ('rsk', 250), - ('csk', 251), - ('sak', 252), - ('rgk', 253), - ('cek', 254), - ('exk', 255) -) - - -aa_keyUse = Attribute() -aa_keyPurpose['attrType'] = id_kma_keyUse -aa_keyPurpose['attrValues'][0] = KeyUse() - - -# Transport Key Attribute - -id_kma_transportKey = univ.ObjectIdentifier('2.16.840.1.101.2.1.13.15') - - -class TransOp(univ.Enumerated): - pass - -TransOp.namedValues = namedval.NamedValues( - ('transport', 1), - ('operational', 2) -) - - -aa_transportKey = Attribute() -aa_transportKey['attrType'] = id_kma_transportKey -aa_transportKey['attrValues'][0] = TransOp() - - -# Key Distribution Period Attribute - -id_kma_keyDistPeriod = univ.ObjectIdentifier('2.16.840.1.101.2.1.13.5') - - -class KeyDistPeriod(univ.Sequence): - pass - -KeyDistPeriod.componentType = namedtype.NamedTypes( - namedtype.OptionalNamedType('doNotDistBefore', BinaryTime().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), - namedtype.NamedType('doNotDistAfter', BinaryTime()) -) - - -aa_keyDistributionPeriod = Attribute() -aa_keyDistributionPeriod['attrType'] = id_kma_keyDistPeriod -aa_keyDistributionPeriod['attrValues'][0] = KeyDistPeriod() - - -# Key Validity Period Attribute - -id_kma_keyValidityPeriod = univ.ObjectIdentifier('2.16.840.1.101.2.1.13.6') - - -class KeyValidityPeriod(univ.Sequence): - pass - -KeyValidityPeriod.componentType = namedtype.NamedTypes( - namedtype.NamedType('doNotUseBefore', BinaryTime()), - namedtype.OptionalNamedType('doNotUseAfter', BinaryTime()) -) - - -aa_keyValidityPeriod = Attribute() -aa_keyValidityPeriod['attrType'] = id_kma_keyValidityPeriod -aa_keyValidityPeriod['attrValues'][0] = KeyValidityPeriod() - - -# Key Duration Attribute - -id_kma_keyDuration = univ.ObjectIdentifier('2.16.840.1.101.2.1.13.7') - - -ub_KeyDuration_months = univ.Integer(72) - -ub_KeyDuration_hours = univ.Integer(96) - -ub_KeyDuration_days = univ.Integer(732) - -ub_KeyDuration_weeks = univ.Integer(104) - -ub_KeyDuration_years = univ.Integer(100) - - -class KeyDuration(univ.Choice): - pass - -KeyDuration.componentType = namedtype.NamedTypes( - namedtype.NamedType('hours', univ.Integer().subtype( - subtypeSpec=constraint.ValueRangeConstraint(1, ub_KeyDuration_hours)).subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), - namedtype.NamedType('days', univ.Integer().subtype( - subtypeSpec=constraint.ValueRangeConstraint(1, ub_KeyDuration_days))), - namedtype.NamedType('weeks', univ.Integer().subtype( - subtypeSpec=constraint.ValueRangeConstraint(1, ub_KeyDuration_weeks)).subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), - namedtype.NamedType('months', univ.Integer().subtype( - subtypeSpec=constraint.ValueRangeConstraint(1, ub_KeyDuration_months)).subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))), - namedtype.NamedType('years', univ.Integer().subtype( - subtypeSpec=constraint.ValueRangeConstraint(1, ub_KeyDuration_years)).subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))) -) - - -aa_keyDurationPeriod = Attribute() -aa_keyDurationPeriod['attrType'] = id_kma_keyDuration -aa_keyDurationPeriod['attrValues'][0] = KeyDuration() - - -# Classification Attribute - -id_aa_KP_classification = univ.ObjectIdentifier(id_aa_securityLabel) - - -id_enumeratedPermissiveAttributes = univ.ObjectIdentifier('2.16.840.1.101.2.1.8.3.1') - -id_enumeratedRestrictiveAttributes = univ.ObjectIdentifier('2.16.840.1.101.2.1.8.3.4') - -id_informativeAttributes = univ.ObjectIdentifier('2.16.840.1.101.2.1.8.3.3') - - -class SecurityAttribute(univ.Integer): - pass - -SecurityAttribute.subtypeSpec = constraint.ValueRangeConstraint(0, MAX) - - -class EnumeratedTag(univ.Sequence): - pass - -EnumeratedTag.componentType = namedtype.NamedTypes( - namedtype.NamedType('tagName', univ.ObjectIdentifier()), - namedtype.NamedType('attributeList', univ.SetOf(componentType=SecurityAttribute())) -) - - -class FreeFormField(univ.Choice): - pass - -FreeFormField.componentType = namedtype.NamedTypes( - namedtype.NamedType('bitSetAttributes', univ.BitString()), # Not permitted in RFC 7906 - namedtype.NamedType('securityAttributes', univ.SetOf(componentType=SecurityAttribute())) -) - - -class InformativeTag(univ.Sequence): - pass - -InformativeTag.componentType = namedtype.NamedTypes( - namedtype.NamedType('tagName', univ.ObjectIdentifier()), - namedtype.NamedType('attributes', FreeFormField()) -) - - -class Classification(ESSSecurityLabel): - pass - - -aa_classification = Attribute() -aa_classification['attrType'] = id_aa_KP_classification -aa_classification['attrValues'][0] = Classification() - - -# Split Identifier Attribute - -id_kma_splitID = univ.ObjectIdentifier('2.16.840.1.101.2.1.13.11') - - -class SplitID(univ.Sequence): - pass - -SplitID.componentType = namedtype.NamedTypes( - namedtype.NamedType('half', univ.Enumerated( - namedValues=namedval.NamedValues(('a', 0), ('b', 1)))), - namedtype.OptionalNamedType('combineAlg', AlgorithmIdentifier()) -) - - -aa_splitIdentifier = Attribute() -aa_splitIdentifier['attrType'] = id_kma_splitID -aa_splitIdentifier['attrValues'][0] = SplitID() - - -# Key Package Type Attribute - -id_kma_keyPkgType = univ.ObjectIdentifier('2.16.840.1.101.2.1.13.12') - - -class KeyPkgType(univ.ObjectIdentifier): - pass - - -aa_keyPackageType = Attribute() -aa_keyPackageType['attrType'] = id_kma_keyPkgType -aa_keyPackageType['attrValues'][0] = KeyPkgType() - - -# Signature Usage Attribute - -id_kma_sigUsageV3 = univ.ObjectIdentifier('2.16.840.1.101.2.1.13.22') - - -class SignatureUsage(CMSContentConstraints): - pass - - -aa_signatureUsage_v3 = Attribute() -aa_signatureUsage_v3['attrType'] = id_kma_sigUsageV3 -aa_signatureUsage_v3['attrValues'][0] = SignatureUsage() - - -# Other Certificate Format Attribute - -id_kma_otherCertFormats = univ.ObjectIdentifier('2.16.840.1.101.2.1.13.19') - - -aa_otherCertificateFormats = Attribute() -aa_signatureUsage_v3['attrType'] = id_kma_otherCertFormats -aa_signatureUsage_v3['attrValues'][0] = CertificateChoices() - - -# PKI Path Attribute - -id_at_pkiPath = univ.ObjectIdentifier('2.5.4.70') - - -class PkiPath(univ.SequenceOf): - pass - -PkiPath.componentType = Certificate() -PkiPath.subtypeSpec=constraint.ValueSizeConstraint(1, MAX) - - -aa_pkiPath = Attribute() -aa_pkiPath['attrType'] = id_at_pkiPath -aa_pkiPath['attrValues'][0] = PkiPath() - - -# Useful Certificates Attribute - -id_kma_usefulCerts = univ.ObjectIdentifier('2.16.840.1.101.2.1.13.20') - - -aa_usefulCertificates = Attribute() -aa_usefulCertificates['attrType'] = id_kma_usefulCerts -aa_usefulCertificates['attrValues'][0] = CertificateSet() - - -# Key Wrap Attribute - -id_kma_keyWrapAlgorithm = univ.ObjectIdentifier('2.16.840.1.101.2.1.13.21') - - -aa_keyWrapAlgorithm = Attribute() -aa_keyWrapAlgorithm['attrType'] = id_kma_keyWrapAlgorithm -aa_keyWrapAlgorithm['attrValues'][0] = AlgorithmIdentifier() - - -# Content Decryption Key Identifier Attribute - -id_aa_KP_contentDecryptKeyID = univ.ObjectIdentifier('2.16.840.1.101.2.1.5.66') - - -class ContentDecryptKeyID(univ.OctetString): - pass - - -aa_contentDecryptKeyIdentifier = Attribute() -aa_contentDecryptKeyIdentifier['attrType'] = id_aa_KP_contentDecryptKeyID -aa_contentDecryptKeyIdentifier['attrValues'][0] = ContentDecryptKeyID() - - -# Certificate Pointers Attribute - -aa_certificatePointers = Attribute() -aa_certificatePointers['attrType'] = id_pe_subjectInfoAccess -aa_certificatePointers['attrValues'][0] = SubjectInfoAccessSyntax() - - -# CRL Pointers Attribute - -id_aa_KP_crlPointers = univ.ObjectIdentifier('2.16.840.1.101.2.1.5.70') - - -aa_cRLDistributionPoints = Attribute() -aa_cRLDistributionPoints['attrType'] = id_aa_KP_crlPointers -aa_cRLDistributionPoints['attrValues'][0] = GeneralNames() - - -# Extended Error Codes - -id_errorCodes = univ.ObjectIdentifier('2.16.840.1.101.2.1.22') - -id_missingKeyType = univ.ObjectIdentifier('2.16.840.1.101.2.1.22.1') - -id_privacyMarkTooLong = univ.ObjectIdentifier('2.16.840.1.101.2.1.22.2') - -id_unrecognizedSecurityPolicy = univ.ObjectIdentifier('2.16.840.1.101.2.1.22.3') - - -# Map of Attribute Type OIDs to Attributes added to the -# ones that are in rfc5652.py - -_cmsAttributesMapUpdate = { - id_aa_contentHint: ContentHints(), - id_aa_communityIdentifiers: CommunityIdentifiers(), - id_aa_binarySigningTime: BinarySigningTime(), - id_contentType: ContentType(), - id_messageDigest: MessageDigest(), - id_aa_KP_keyPkgIdAndReceiptReq: KeyPkgIdentifierAndReceiptReq(), - id_aa_KP_keyProvinceV2: KeyProvinceV2(), - id_aa_KP_manifest: Manifest(), - id_kma_keyAlgorithm: KeyAlgorithm(), - id_at_userCertificate: Certificate(), - id_kma_keyPkgReceiversV2: KeyPkgReceiversV2(), - id_kma_TSECNomenclature: TSECNomenclature(), - id_kma_keyPurpose: KeyPurpose(), - id_kma_keyUse: KeyUse(), - id_kma_transportKey: TransOp(), - id_kma_keyDistPeriod: KeyDistPeriod(), - id_kma_keyValidityPeriod: KeyValidityPeriod(), - id_kma_keyDuration: KeyDuration(), - id_aa_KP_classification: Classification(), - id_kma_splitID: SplitID(), - id_kma_keyPkgType: KeyPkgType(), - id_kma_sigUsageV3: SignatureUsage(), - id_kma_otherCertFormats: CertificateChoices(), - id_at_pkiPath: PkiPath(), - id_kma_usefulCerts: CertificateSet(), - id_kma_keyWrapAlgorithm: AlgorithmIdentifier(), - id_aa_KP_contentDecryptKeyID: ContentDecryptKeyID(), - id_pe_subjectInfoAccess: SubjectInfoAccessSyntax(), - id_aa_KP_crlPointers: GeneralNames(), -} - -rfc5652.cmsAttributesMap.update(_cmsAttributesMapUpdate) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc7914.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc7914.py deleted file mode 100644 index 99e955156722..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc7914.py +++ /dev/null @@ -1,49 +0,0 @@ -# -# This file is part of pyasn1-modules software. -# -# Created by Russ Housley with assistance from asn1ate v.0.6.0. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -#The scrypt Password-Based Key Derivation Function -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc8520.txt -# https://www.rfc-editor.org/errata/eid5871 -# - -from pyasn1.type import constraint -from pyasn1.type import namedtype -from pyasn1.type import univ - -from pyasn1_modules import rfc5280 - -MAX = float('inf') - - -id_scrypt = univ.ObjectIdentifier('1.3.6.1.4.1.11591.4.11') - - -class Scrypt_params(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('salt', - univ.OctetString()), - namedtype.NamedType('costParameter', - univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(1, MAX))), - namedtype.NamedType('blockSize', - univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(1, MAX))), - namedtype.NamedType('parallelizationParameter', - univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(1, MAX))), - namedtype.OptionalNamedType('keyLength', - univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(1, MAX))) - ) - - -# Update the Algorithm Identifier map in rfc5280.py - -_algorithmIdentifierMapUpdate = { - id_scrypt: Scrypt_params(), -} - -rfc5280.algorithmIdentifierMap.update(_algorithmIdentifierMapUpdate) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc8017.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc8017.py deleted file mode 100644 index fefed1dcd6b5..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc8017.py +++ /dev/null @@ -1,153 +0,0 @@ -# -# This file is part of pyasn1-modules software. -# -# Created by Russ Housley. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# PKCS #1: RSA Cryptography Specifications Version 2.2 -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc8017.txt -# - -from pyasn1.type import constraint -from pyasn1.type import namedtype -from pyasn1.type import namedval -from pyasn1.type import univ - -from pyasn1_modules import rfc2437 -from pyasn1_modules import rfc3447 -from pyasn1_modules import rfc4055 -from pyasn1_modules import rfc5280 - -MAX = float('inf') - - -# Import Algorithm Identifier from RFC 5280 - -AlgorithmIdentifier = rfc5280.AlgorithmIdentifier - -class DigestAlgorithm(AlgorithmIdentifier): - pass - -class HashAlgorithm(AlgorithmIdentifier): - pass - -class MaskGenAlgorithm(AlgorithmIdentifier): - pass - -class PSourceAlgorithm(AlgorithmIdentifier): - pass - - -# Object identifiers from NIST SHA2 - -hashAlgs = univ.ObjectIdentifier('2.16.840.1.101.3.4.2') -id_sha256 = rfc4055.id_sha256 -id_sha384 = rfc4055.id_sha384 -id_sha512 = rfc4055.id_sha512 -id_sha224 = rfc4055.id_sha224 -id_sha512_224 = hashAlgs + (5, ) -id_sha512_256 = hashAlgs + (6, ) - - -# Basic object identifiers - -pkcs_1 = univ.ObjectIdentifier('1.2.840.113549.1.1') -rsaEncryption = rfc2437.rsaEncryption -id_RSAES_OAEP = rfc2437.id_RSAES_OAEP -id_pSpecified = rfc2437.id_pSpecified -id_RSASSA_PSS = rfc4055.id_RSASSA_PSS -md2WithRSAEncryption = rfc2437.md2WithRSAEncryption -md5WithRSAEncryption = rfc2437.md5WithRSAEncryption -sha1WithRSAEncryption = rfc2437.sha1WithRSAEncryption -sha224WithRSAEncryption = rfc4055.sha224WithRSAEncryption -sha256WithRSAEncryption = rfc4055.sha256WithRSAEncryption -sha384WithRSAEncryption = rfc4055.sha384WithRSAEncryption -sha512WithRSAEncryption = rfc4055.sha512WithRSAEncryption -sha512_224WithRSAEncryption = pkcs_1 + (15, ) -sha512_256WithRSAEncryption = pkcs_1 + (16, ) -id_sha1 = rfc2437.id_sha1 -id_md2 = univ.ObjectIdentifier('1.2.840.113549.2.2') -id_md5 = univ.ObjectIdentifier('1.2.840.113549.2.5') -id_mgf1 = rfc2437.id_mgf1 - - -# Default parameter values - -sha1 = rfc4055.sha1Identifier -SHA1Parameters = univ.Null("") - -mgf1SHA1 = rfc4055.mgf1SHA1Identifier - -class EncodingParameters(univ.OctetString): - subtypeSpec = constraint.ValueSizeConstraint(0, MAX) - -pSpecifiedEmpty = rfc4055.pSpecifiedEmptyIdentifier - -emptyString = EncodingParameters(value='') - - -# Main structures - -class Version(univ.Integer): - namedValues = namedval.NamedValues( - ('two-prime', 0), - ('multi', 1) - ) - -class TrailerField(univ.Integer): - namedValues = namedval.NamedValues( - ('trailerFieldBC', 1) - ) - -RSAPublicKey = rfc2437.RSAPublicKey - -OtherPrimeInfo = rfc3447.OtherPrimeInfo -OtherPrimeInfos = rfc3447.OtherPrimeInfos -RSAPrivateKey = rfc3447.RSAPrivateKey - -RSAES_OAEP_params = rfc4055.RSAES_OAEP_params -rSAES_OAEP_Default_Identifier = rfc4055.rSAES_OAEP_Default_Identifier - -RSASSA_PSS_params = rfc4055.RSASSA_PSS_params -rSASSA_PSS_Default_Identifier = rfc4055.rSASSA_PSS_Default_Identifier - - -# Syntax for the EMSA-PKCS1-v1_5 hash identifier - -class DigestInfo(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('digestAlgorithm', DigestAlgorithm()), - namedtype.NamedType('digest', univ.OctetString()) - ) - - -# Update the Algorithm Identifier map - -_algorithmIdentifierMapUpdate = { - id_sha1: univ.Null(), - id_sha224: univ.Null(), - id_sha256: univ.Null(), - id_sha384: univ.Null(), - id_sha512: univ.Null(), - id_sha512_224: univ.Null(), - id_sha512_256: univ.Null(), - id_mgf1: AlgorithmIdentifier(), - id_pSpecified: univ.OctetString(), - id_RSAES_OAEP: RSAES_OAEP_params(), - id_RSASSA_PSS: RSASSA_PSS_params(), - md2WithRSAEncryption: univ.Null(), - md5WithRSAEncryption: univ.Null(), - sha1WithRSAEncryption: univ.Null(), - sha224WithRSAEncryption: univ.Null(), - sha256WithRSAEncryption: univ.Null(), - sha384WithRSAEncryption: univ.Null(), - sha512WithRSAEncryption: univ.Null(), - sha512_224WithRSAEncryption: univ.Null(), - sha512_256WithRSAEncryption: univ.Null(), -} - -rfc5280.algorithmIdentifierMap.update(_algorithmIdentifierMapUpdate) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc8018.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc8018.py deleted file mode 100644 index 7a44eea8d25e..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc8018.py +++ /dev/null @@ -1,260 +0,0 @@ -# -# This file is part of pyasn1-modules software. -# -# Created by Russ Housley. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# PKCS #5: Password-Based Cryptography Specification, Version 2.1 -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc8018.txt -# - -from pyasn1.type import constraint -from pyasn1.type import namedtype -from pyasn1.type import namedval -from pyasn1.type import univ - -from pyasn1_modules import rfc3565 -from pyasn1_modules import rfc5280 - -MAX = float('inf') - -def _OID(*components): - output = [] - for x in tuple(components): - if isinstance(x, univ.ObjectIdentifier): - output.extend(list(x)) - else: - output.append(int(x)) - - return univ.ObjectIdentifier(output) - - -# Import from RFC 3565 - -AES_IV = rfc3565.AES_IV - - -# Import from RFC 5280 - -AlgorithmIdentifier = rfc5280.AlgorithmIdentifier - - -# Basic object identifiers - -nistAlgorithms = _OID(2, 16, 840, 1, 101, 3, 4) - -aes = _OID(nistAlgorithms, 1) - -oiw = _OID(1, 3, 14) - -rsadsi = _OID(1, 2, 840, 113549) - -pkcs = _OID(rsadsi, 1) - -digestAlgorithm = _OID(rsadsi, 2) - -encryptionAlgorithm = _OID(rsadsi, 3) - -pkcs_5 = _OID(pkcs, 5) - - - -# HMAC object identifiers - -id_hmacWithSHA1 = _OID(digestAlgorithm, 7) - -id_hmacWithSHA224 = _OID(digestAlgorithm, 8) - -id_hmacWithSHA256 = _OID(digestAlgorithm, 9) - -id_hmacWithSHA384 = _OID(digestAlgorithm, 10) - -id_hmacWithSHA512 = _OID(digestAlgorithm, 11) - -id_hmacWithSHA512_224 = _OID(digestAlgorithm, 12) - -id_hmacWithSHA512_256 = _OID(digestAlgorithm, 13) - - -# PBES1 object identifiers - -pbeWithMD2AndDES_CBC = _OID(pkcs_5, 1) - -pbeWithMD2AndRC2_CBC = _OID(pkcs_5, 4) - -pbeWithMD5AndDES_CBC = _OID(pkcs_5, 3) - -pbeWithMD5AndRC2_CBC = _OID(pkcs_5, 6) - -pbeWithSHA1AndDES_CBC = _OID(pkcs_5, 10) - -pbeWithSHA1AndRC2_CBC = _OID(pkcs_5, 11) - - -# Supporting techniques object identifiers - -desCBC = _OID(oiw, 3, 2, 7) - -des_EDE3_CBC = _OID(encryptionAlgorithm, 7) - -rc2CBC = _OID(encryptionAlgorithm, 2) - -rc5_CBC_PAD = _OID(encryptionAlgorithm, 9) - -aes128_CBC_PAD = _OID(aes, 2) - -aes192_CBC_PAD = _OID(aes, 22) - -aes256_CBC_PAD = _OID(aes, 42) - - -# PBES1 - -class PBEParameter(univ.Sequence): - pass - -PBEParameter.componentType = namedtype.NamedTypes( - namedtype.NamedType('salt', univ.OctetString().subtype( - subtypeSpec=constraint.ValueSizeConstraint(8, 8))), - namedtype.NamedType('iterationCount', univ.Integer()) -) - - -# PBES2 - -id_PBES2 = _OID(pkcs_5, 13) - - -class PBES2_params(univ.Sequence): - pass - -PBES2_params.componentType = namedtype.NamedTypes( - namedtype.NamedType('keyDerivationFunc', AlgorithmIdentifier()), - namedtype.NamedType('encryptionScheme', AlgorithmIdentifier()) -) - - -# PBMAC1 - -id_PBMAC1 = _OID(pkcs_5, 14) - - -class PBMAC1_params(univ.Sequence): - pass - -PBMAC1_params.componentType = namedtype.NamedTypes( - namedtype.NamedType('keyDerivationFunc', AlgorithmIdentifier()), - namedtype.NamedType('messageAuthScheme', AlgorithmIdentifier()) -) - - -# PBKDF2 - -id_PBKDF2 = _OID(pkcs_5, 12) - - -algid_hmacWithSHA1 = AlgorithmIdentifier() -algid_hmacWithSHA1['algorithm'] = id_hmacWithSHA1 -algid_hmacWithSHA1['parameters'] = univ.Null("") - - -class PBKDF2_params(univ.Sequence): - pass - -PBKDF2_params.componentType = namedtype.NamedTypes( - namedtype.NamedType('salt', univ.Choice(componentType=namedtype.NamedTypes( - namedtype.NamedType('specified', univ.OctetString()), - namedtype.NamedType('otherSource', AlgorithmIdentifier()) - ))), - namedtype.NamedType('iterationCount', univ.Integer().subtype( - subtypeSpec=constraint.ValueRangeConstraint(1, MAX))), - namedtype.OptionalNamedType('keyLength', univ.Integer().subtype( - subtypeSpec=constraint.ValueRangeConstraint(1, MAX))), - namedtype.DefaultedNamedType('prf', algid_hmacWithSHA1) -) - - -# RC2 CBC algorithm parameter - -class RC2_CBC_Parameter(univ.Sequence): - pass - -RC2_CBC_Parameter.componentType = namedtype.NamedTypes( - namedtype.OptionalNamedType('rc2ParameterVersion', univ.Integer()), - namedtype.NamedType('iv', univ.OctetString().subtype( - subtypeSpec=constraint.ValueSizeConstraint(8, 8))) -) - - -# RC5 CBC algorithm parameter - -class RC5_CBC_Parameters(univ.Sequence): - pass - -RC5_CBC_Parameters.componentType = namedtype.NamedTypes( - namedtype.NamedType('version', - univ.Integer(namedValues=namedval.NamedValues(('v1_0', 16))).subtype( - subtypeSpec=constraint.SingleValueConstraint(16))), - namedtype.NamedType('rounds', - univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(8, 127))), - namedtype.NamedType('blockSizeInBits', - univ.Integer().subtype(subtypeSpec=constraint.SingleValueConstraint(64, 128))), - namedtype.OptionalNamedType('iv', univ.OctetString()) -) - - -# Initialization Vector for AES: OCTET STRING (SIZE(16)) - -class AES_IV(univ.OctetString): - pass - -AES_IV.subtypeSpec = constraint.ValueSizeConstraint(16, 16) - - -# Initialization Vector for DES: OCTET STRING (SIZE(8)) - -class DES_IV(univ.OctetString): - pass - -DES_IV.subtypeSpec = constraint.ValueSizeConstraint(8, 8) - - -# Update the Algorithm Identifier map - -_algorithmIdentifierMapUpdate = { - # PBKDF2-PRFs - id_hmacWithSHA1: univ.Null(), - id_hmacWithSHA224: univ.Null(), - id_hmacWithSHA256: univ.Null(), - id_hmacWithSHA384: univ.Null(), - id_hmacWithSHA512: univ.Null(), - id_hmacWithSHA512_224: univ.Null(), - id_hmacWithSHA512_256: univ.Null(), - # PBES1Algorithms - pbeWithMD2AndDES_CBC: PBEParameter(), - pbeWithMD2AndRC2_CBC: PBEParameter(), - pbeWithMD5AndDES_CBC: PBEParameter(), - pbeWithMD5AndRC2_CBC: PBEParameter(), - pbeWithSHA1AndDES_CBC: PBEParameter(), - pbeWithSHA1AndRC2_CBC: PBEParameter(), - # PBES2Algorithms - id_PBES2: PBES2_params(), - # PBES2-KDFs - id_PBKDF2: PBKDF2_params(), - # PBMAC1Algorithms - id_PBMAC1: PBMAC1_params(), - # SupportingAlgorithms - desCBC: DES_IV(), - des_EDE3_CBC: DES_IV(), - rc2CBC: RC2_CBC_Parameter(), - rc5_CBC_PAD: RC5_CBC_Parameters(), - aes128_CBC_PAD: AES_IV(), - aes192_CBC_PAD: AES_IV(), - aes256_CBC_PAD: AES_IV(), -} - -rfc5280.algorithmIdentifierMap.update(_algorithmIdentifierMapUpdate) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc8103.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc8103.py deleted file mode 100644 index 6429e8635f6d..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc8103.py +++ /dev/null @@ -1,36 +0,0 @@ -# This file is being contributed to pyasn1-modules software. -# -# Created by Russ Housley with assistance from the asn1ate tool. -# Auto-generated by asn1ate v.0.6.0 from rfc8103.asn. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# ChaCha20Poly1305 algorithm fo use with the Authenticated-Enveloped-Data -# protecting content type for the Cryptographic Message Syntax (CMS) -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc8103.txt - -from pyasn1.type import constraint -from pyasn1.type import univ - - -def _OID(*components): - output = [] - for x in tuple(components): - if isinstance(x, univ.ObjectIdentifier): - output.extend(list(x)) - else: - output.append(int(x)) - - return univ.ObjectIdentifier(output) - - -class AEADChaCha20Poly1305Nonce(univ.OctetString): - pass - - -AEADChaCha20Poly1305Nonce.subtypeSpec = constraint.ValueSizeConstraint(12, 12) - -id_alg_AEADChaCha20Poly1305 = _OID(1, 2, 840, 113549, 1, 9, 16, 3, 18) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc8209.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc8209.py deleted file mode 100644 index 7d70f51b0c00..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc8209.py +++ /dev/null @@ -1,20 +0,0 @@ -# -# This file is part of pyasn1-modules software. -# -# Created by Russ Housley. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# BGPsec Router PKI Profile -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc8209.txt -# - -from pyasn1.type import univ - - -id_kp = univ.ObjectIdentifier('1.3.6.1.5.5.7.3') - -id_kp_bgpsec_router = id_kp + (30, ) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc8226.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc8226.py deleted file mode 100644 index e7fe9460e95d..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc8226.py +++ /dev/null @@ -1,149 +0,0 @@ -# This file is being contributed to pyasn1-modules software. -# -# Created by Russ Housley with assistance from the asn1ate tool, with manual -# changes to implement appropriate constraints and added comments. -# Modified by Russ Housley to add maps for use with opentypes. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# JWT Claim Constraints and TN Authorization List for certificate extensions. -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc8226.txt (with errata corrected) - -from pyasn1.type import char -from pyasn1.type import constraint -from pyasn1.type import namedtype -from pyasn1.type import tag -from pyasn1.type import univ - -from pyasn1_modules import rfc5280 - -MAX = float('inf') - - -def _OID(*components): - output = [] - for x in tuple(components): - if isinstance(x, univ.ObjectIdentifier): - output.extend(list(x)) - else: - output.append(int(x)) - - return univ.ObjectIdentifier(output) - - -class JWTClaimName(char.IA5String): - pass - - -class JWTClaimNames(univ.SequenceOf): - pass - -JWTClaimNames.componentType = JWTClaimName() -JWTClaimNames.sizeSpec = constraint.ValueSizeConstraint(1, MAX) - - -class JWTClaimPermittedValues(univ.Sequence): - pass - -JWTClaimPermittedValues.componentType = namedtype.NamedTypes( - namedtype.NamedType('claim', JWTClaimName()), - namedtype.NamedType('permitted', univ.SequenceOf( - componentType=char.UTF8String()).subtype( - sizeSpec=constraint.ValueSizeConstraint(1, MAX))) -) - - -class JWTClaimPermittedValuesList(univ.SequenceOf): - pass - -JWTClaimPermittedValuesList.componentType = JWTClaimPermittedValues() -JWTClaimPermittedValuesList.sizeSpec = constraint.ValueSizeConstraint(1, MAX) - - -class JWTClaimConstraints(univ.Sequence): - pass - -JWTClaimConstraints.componentType = namedtype.NamedTypes( - namedtype.OptionalNamedType('mustInclude', - JWTClaimNames().subtype(explicitTag=tag.Tag(tag.tagClassContext, - tag.tagFormatSimple, 0))), - namedtype.OptionalNamedType('permittedValues', - JWTClaimPermittedValuesList().subtype(explicitTag=tag.Tag(tag.tagClassContext, - tag.tagFormatSimple, 1))) -) - -JWTClaimConstraints.subtypeSpec = constraint.ConstraintsUnion( - constraint.WithComponentsConstraint( - ('mustInclude', constraint.ComponentPresentConstraint())), - constraint.WithComponentsConstraint( - ('permittedValues', constraint.ComponentPresentConstraint())) -) - - -id_pe_JWTClaimConstraints = _OID(1, 3, 6, 1, 5, 5, 7, 1, 27) - - -class ServiceProviderCode(char.IA5String): - pass - - -class TelephoneNumber(char.IA5String): - pass - -TelephoneNumber.subtypeSpec = constraint.ConstraintsIntersection( - constraint.ValueSizeConstraint(1, 15), - constraint.PermittedAlphabetConstraint( - '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '#', '*') -) - - -class TelephoneNumberRange(univ.Sequence): - pass - -TelephoneNumberRange.componentType = namedtype.NamedTypes( - namedtype.NamedType('start', TelephoneNumber()), - namedtype.NamedType('count', - univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(2, MAX))) -) - - -class TNEntry(univ.Choice): - pass - -TNEntry.componentType = namedtype.NamedTypes( - namedtype.NamedType('spc', - ServiceProviderCode().subtype(explicitTag=tag.Tag(tag.tagClassContext, - tag.tagFormatSimple, 0))), - namedtype.NamedType('range', - TelephoneNumberRange().subtype(explicitTag=tag.Tag(tag.tagClassContext, - tag.tagFormatConstructed, 1))), - namedtype.NamedType('one', - TelephoneNumber().subtype(explicitTag=tag.Tag(tag.tagClassContext, - tag.tagFormatSimple, 2))) -) - - -class TNAuthorizationList(univ.SequenceOf): - pass - -TNAuthorizationList.componentType = TNEntry() -TNAuthorizationList.sizeSpec = constraint.ValueSizeConstraint(1, MAX) - -id_pe_TNAuthList = _OID(1, 3, 6, 1, 5, 5, 7, 1, 26) - - -id_ad_stirTNList = _OID(1, 3, 6, 1, 5, 5, 7, 48, 14) - - -# Map of Certificate Extension OIDs to Extensions added to the -# ones that are in rfc5280.py - -_certificateExtensionsMapUpdate = { - id_pe_TNAuthList: TNAuthorizationList(), - id_pe_JWTClaimConstraints: JWTClaimConstraints(), -} - -rfc5280.certificateExtensionsMap.update(_certificateExtensionsMapUpdate) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc8358.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc8358.py deleted file mode 100644 index 647a366622ad..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc8358.py +++ /dev/null @@ -1,50 +0,0 @@ -# -# This file is part of pyasn1-modules software. -# -# Created by Russ Housley. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# Digital Signatures on Internet-Draft Documents -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc8358.txt -# - -from pyasn1.type import univ - -from pyasn1_modules import rfc5652 - - -id_ct = univ.ObjectIdentifier('1.2.840.113549.1.9.16.1') - -id_ct_asciiTextWithCRLF = id_ct + (27, ) - -id_ct_epub = id_ct + (39, ) - -id_ct_htmlWithCRLF = id_ct + (38, ) - -id_ct_pdf = id_ct + (29, ) - -id_ct_postscript = id_ct + (30, ) - -id_ct_utf8TextWithCRLF = id_ct + (37, ) - -id_ct_xml = id_ct + (28, ) - - -# Map of Content Type OIDs to Content Types is added to the -# ones that are in rfc5652.py - -_cmsContentTypesMapUpdate = { - id_ct_asciiTextWithCRLF: univ.OctetString(), - id_ct_epub: univ.OctetString(), - id_ct_htmlWithCRLF: univ.OctetString(), - id_ct_pdf: univ.OctetString(), - id_ct_postscript: univ.OctetString(), - id_ct_utf8TextWithCRLF: univ.OctetString(), - id_ct_xml: univ.OctetString(), -} - -rfc5652.cmsContentTypesMap.update(_cmsContentTypesMapUpdate) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc8360.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc8360.py deleted file mode 100644 index ca180c18d81b..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc8360.py +++ /dev/null @@ -1,44 +0,0 @@ -# -# This file is part of pyasn1-modules software. -# -# Created by Russ Housley. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# Resource Public Key Infrastructure (RPKI) Validation Reconsidered -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc8360.txt -# https://www.rfc-editor.org/errata/eid5870 -# - -from pyasn1.type import univ - -from pyasn1_modules import rfc3779 -from pyasn1_modules import rfc5280 - - -# IP Address Delegation Extension V2 - -id_pe_ipAddrBlocks_v2 = univ.ObjectIdentifier('1.3.6.1.5.5.7.1.28') - -IPAddrBlocks = rfc3779.IPAddrBlocks - - -# Autonomous System Identifier Delegation Extension V2 - -id_pe_autonomousSysIds_v2 = univ.ObjectIdentifier('1.3.6.1.5.5.7.1.29') - -ASIdentifiers = rfc3779.ASIdentifiers - - -# Map of Certificate Extension OIDs to Extensions is added to the -# ones that are in rfc5280.py - -_certificateExtensionsMapUpdate = { - id_pe_ipAddrBlocks_v2: IPAddrBlocks(), - id_pe_autonomousSysIds_v2: ASIdentifiers(), -} - -rfc5280.certificateExtensionsMap.update(_certificateExtensionsMapUpdate) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc8398.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc8398.py deleted file mode 100644 index 151b63210795..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc8398.py +++ /dev/null @@ -1,52 +0,0 @@ -# -# This file is part of pyasn1-modules software. -# -# Created by Russ Housley with some assistance from asn1ate v.0.6.0. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# Internationalized Email Addresses in X.509 Certificates -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc8398.txt -# https://www.rfc-editor.org/errata/eid5418 -# - -from pyasn1.type import char -from pyasn1.type import constraint -from pyasn1.type import univ - -from pyasn1_modules import rfc5280 - -MAX = float('inf') - - -# SmtpUTF8Mailbox contains Mailbox as specified in Section 3.3 of RFC 6531 - -id_pkix = rfc5280.id_pkix - -id_on = id_pkix + (8, ) - -id_on_SmtpUTF8Mailbox = id_on + (9, ) - - -class SmtpUTF8Mailbox(char.UTF8String): - pass - -SmtpUTF8Mailbox.subtypeSpec = constraint.ValueSizeConstraint(1, MAX) - - -on_SmtpUTF8Mailbox = rfc5280.AnotherName() -on_SmtpUTF8Mailbox['type-id'] = id_on_SmtpUTF8Mailbox -on_SmtpUTF8Mailbox['value'] = SmtpUTF8Mailbox() - - -# Map of Other Name OIDs to Other Name is added to the -# ones that are in rfc5280.py - -_anotherNameMapUpdate = { - id_on_SmtpUTF8Mailbox: SmtpUTF8Mailbox(), -} - -rfc5280.anotherNameMap.update(_anotherNameMapUpdate) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc8410.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc8410.py deleted file mode 100644 index 98bc97bb14b2..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc8410.py +++ /dev/null @@ -1,43 +0,0 @@ -# This file is being contributed to pyasn1-modules software. -# -# Created by Russ Housley. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# Algorithm Identifiers for Ed25519, Ed448, X25519, and X448 -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc8410.txt - -from pyasn1.type import univ -from pyasn1_modules import rfc3565 -from pyasn1_modules import rfc4055 -from pyasn1_modules import rfc5280 - - -class SignatureAlgorithmIdentifier(rfc5280.AlgorithmIdentifier): - pass - - -class KeyEncryptionAlgorithmIdentifier(rfc5280.AlgorithmIdentifier): - pass - - -class CurvePrivateKey(univ.OctetString): - pass - - -id_X25519 = univ.ObjectIdentifier('1.3.101.110') - -id_X448 = univ.ObjectIdentifier('1.3.101.111') - -id_Ed25519 = univ.ObjectIdentifier('1.3.101.112') - -id_Ed448 = univ.ObjectIdentifier('1.3.101.113') - -id_sha512 = rfc4055.id_sha512 - -id_aes128_wrap = rfc3565.id_aes128_wrap - -id_aes256_wrap = rfc3565.id_aes256_wrap diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc8418.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc8418.py deleted file mode 100644 index 6e76487c88b1..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc8418.py +++ /dev/null @@ -1,36 +0,0 @@ -# This file is being contributed to pyasn1-modules software. -# -# Created by Russ Housley. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# Elliptic Curve Diffie-Hellman (ECDH) Key Agreement Algorithm -# with X25519 and X448 -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc8418.txt - -from pyasn1.type import univ -from pyasn1_modules import rfc5280 - - -class KeyEncryptionAlgorithmIdentifier(rfc5280.AlgorithmIdentifier): - pass - - -class KeyWrapAlgorithmIdentifier(rfc5280.AlgorithmIdentifier): - pass - - -dhSinglePass_stdDH_sha256kdf_scheme = univ.ObjectIdentifier('1.3.133.16.840.63.0.11.1') - -dhSinglePass_stdDH_sha384kdf_scheme = univ.ObjectIdentifier('1.3.133.16.840.63.0.11.2') - -dhSinglePass_stdDH_sha512kdf_scheme = univ.ObjectIdentifier('1.3.133.16.840.63.0.11.3') - -dhSinglePass_stdDH_hkdf_sha256_scheme = univ.ObjectIdentifier('1.2.840.113549.1.9.16.3.19') - -dhSinglePass_stdDH_hkdf_sha384_scheme = univ.ObjectIdentifier('1.2.840.113549.1.9.16.3.20') - -dhSinglePass_stdDH_hkdf_sha512_scheme = univ.ObjectIdentifier('1.2.840.113549.1.9.16.3.21') diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc8419.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc8419.py deleted file mode 100644 index f10994be28e8..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc8419.py +++ /dev/null @@ -1,68 +0,0 @@ -# This file is being contributed to pyasn1-modules software. -# -# Created by Russ Housley. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# Edwards-Curve Digital Signature Algorithm (EdDSA) Signatures in the CMS -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc8419.txt -# https://www.rfc-editor.org/errata/eid5869 - - -from pyasn1.type import univ - -from pyasn1_modules import rfc5280 - - -class ShakeOutputLen(univ.Integer): - pass - - -id_Ed25519 = univ.ObjectIdentifier('1.3.101.112') - -sigAlg_Ed25519 = rfc5280.AlgorithmIdentifier() -sigAlg_Ed25519['algorithm'] = id_Ed25519 -# sigAlg_Ed25519['parameters'] is absent - - -id_Ed448 = univ.ObjectIdentifier('1.3.101.113') - -sigAlg_Ed448 = rfc5280.AlgorithmIdentifier() -sigAlg_Ed448['algorithm'] = id_Ed448 -# sigAlg_Ed448['parameters'] is absent - - -hashAlgs = univ.ObjectIdentifier('2.16.840.1.101.3.4.2') - -id_sha512 = hashAlgs + (3, ) - -hashAlg_SHA_512 = rfc5280.AlgorithmIdentifier() -hashAlg_SHA_512['algorithm'] = id_sha512 -# hashAlg_SHA_512['parameters'] is absent - - -id_shake256 = hashAlgs + (12, ) - -hashAlg_SHAKE256 = rfc5280.AlgorithmIdentifier() -hashAlg_SHAKE256['algorithm'] = id_shake256 -# hashAlg_SHAKE256['parameters']is absent - - -id_shake256_len = hashAlgs + (18, ) - -hashAlg_SHAKE256_LEN = rfc5280.AlgorithmIdentifier() -hashAlg_SHAKE256_LEN['algorithm'] = id_shake256_len -hashAlg_SHAKE256_LEN['parameters'] = ShakeOutputLen() - - -# Map of Algorithm Identifier OIDs to Parameters added to the -# ones in rfc5280.py. Do not add OIDs with absent paramaters. - -_algorithmIdentifierMapUpdate = { - id_shake256_len: ShakeOutputLen(), -} - -rfc5280.algorithmIdentifierMap.update(_algorithmIdentifierMapUpdate) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc8479.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc8479.py deleted file mode 100644 index 57f78b62f2c2..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc8479.py +++ /dev/null @@ -1,45 +0,0 @@ -# -# This file is part of pyasn1-modules software. -# -# Created by Russ Housley with assistance from asn1ate v.0.6.0. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# Storing Validation Parameters in PKCS#8 -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc8479.txt -# - -from pyasn1.type import namedtype -from pyasn1.type import univ - -from pyasn1_modules import rfc5652 - - -id_attr_validation_parameters = univ.ObjectIdentifier('1.3.6.1.4.1.2312.18.8.1') - - -class ValidationParams(univ.Sequence): - pass - -ValidationParams.componentType = namedtype.NamedTypes( - namedtype.NamedType('hashAlg', univ.ObjectIdentifier()), - namedtype.NamedType('seed', univ.OctetString()) -) - - -at_validation_parameters = rfc5652.Attribute() -at_validation_parameters['attrType'] = id_attr_validation_parameters -at_validation_parameters['attrValues'][0] = ValidationParams() - - -# Map of Attribute Type OIDs to Attributes added to the -# ones that are in rfc5652.py - -_cmsAttributesMapUpdate = { - id_attr_validation_parameters: ValidationParams(), -} - -rfc5652.cmsAttributesMap.update(_cmsAttributesMapUpdate) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc8494.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc8494.py deleted file mode 100644 index fe349e14ca12..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc8494.py +++ /dev/null @@ -1,80 +0,0 @@ -# This file is being contributed to pyasn1-modules software. -# -# Created by Russ Housley with assistance from asn1ate v.0.6.0. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# Multicast Email (MULE) over Allied Communications Publication 142 -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc8494.txt - -from pyasn1.type import namedtype -from pyasn1.type import namedval -from pyasn1.type import tag -from pyasn1.type import univ - - -id_mmhs_CDT = univ.ObjectIdentifier('1.3.26.0.4406.0.4.2') - - -class AlgorithmID_ShortForm(univ.Integer): - pass - -AlgorithmID_ShortForm.namedValues = namedval.NamedValues( - ('zlibCompress', 0) -) - - -class ContentType_ShortForm(univ.Integer): - pass - -ContentType_ShortForm.namedValues = namedval.NamedValues( - ('unidentified', 0), - ('external', 1), - ('p1', 2), - ('p3', 3), - ('p7', 4), - ('mule', 25) -) - - -class CompressedContentInfo(univ.Sequence): - pass - -CompressedContentInfo.componentType = namedtype.NamedTypes( - namedtype.NamedType('unnamed', univ.Choice(componentType=namedtype.NamedTypes( - namedtype.NamedType('contentType-ShortForm', - ContentType_ShortForm().subtype(explicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatSimple, 0))), - namedtype.NamedType('contentType-OID', - univ.ObjectIdentifier().subtype(explicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatSimple, 1))) - ))), - namedtype.NamedType('compressedContent', - univ.OctetString().subtype(explicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatSimple, 0))) -) - - -class CompressionAlgorithmIdentifier(univ.Choice): - pass - -CompressionAlgorithmIdentifier.componentType = namedtype.NamedTypes( - namedtype.NamedType('algorithmID-ShortForm', - AlgorithmID_ShortForm().subtype(explicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatSimple, 0))), - namedtype.NamedType('algorithmID-OID', - univ.ObjectIdentifier().subtype(explicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatSimple, 1))) -) - - -class CompressedData(univ.Sequence): - pass - -CompressedData.componentType = namedtype.NamedTypes( - namedtype.NamedType('compressionAlgorithm', CompressionAlgorithmIdentifier()), - namedtype.NamedType('compressedContentInfo', CompressedContentInfo()) -) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc8520.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc8520.py deleted file mode 100644 index b9eb6e937786..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc8520.py +++ /dev/null @@ -1,63 +0,0 @@ -# -# This file is part of pyasn1-modules software. -# -# Created by Russ Housley with assistance from asn1ate v.0.6.0. -# Modified by Russ Housley to add maps for use with opentypes. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# X.509 Extensions for MUD URL and MUD Signer; -# Object Identifier for CMS Content Type for a MUD file -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc8520.txt -# - -from pyasn1.type import char -from pyasn1.type import univ - -from pyasn1_modules import rfc5280 -from pyasn1_modules import rfc5652 - - -# X.509 Extension for MUD URL - -id_pe_mud_url = univ.ObjectIdentifier('1.3.6.1.5.5.7.1.25') - -class MUDURLSyntax(char.IA5String): - pass - - -# X.509 Extension for MUD Signer - -id_pe_mudsigner = univ.ObjectIdentifier('1.3.6.1.5.5.7.1.30') - -class MUDsignerSyntax(rfc5280.Name): - pass - - -# Object Identifier for CMS Content Type for a MUD file - -id_ct_mudtype = univ.ObjectIdentifier('1.2.840.113549.1.9.16.1.41') - - -# Map of Certificate Extension OIDs to Extensions added to the -# ones that are in rfc5280.py - -_certificateExtensionsMapUpdate = { - id_pe_mud_url: MUDURLSyntax(), - id_pe_mudsigner: MUDsignerSyntax(), -} - -rfc5280.certificateExtensionsMap.update(_certificateExtensionsMapUpdate) - - -# Map of Content Type OIDs to Content Types added to the -# ones that are in rfc5652.py - -_cmsContentTypesMapUpdate = { - id_ct_mudtype: univ.OctetString(), -} - -rfc5652.cmsContentTypesMap.update(_cmsContentTypesMapUpdate) diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc8619.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc8619.py deleted file mode 100644 index 0aaa811bad0e..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc8619.py +++ /dev/null @@ -1,45 +0,0 @@ -# -# This file is part of pyasn1-modules software. -# -# Created by Russ Housley. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# Algorithm Identifiers for HKDF -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc8619.txt -# - -from pyasn1.type import univ - -from pyasn1_modules import rfc5280 - - -# Object Identifiers - -id_alg_hkdf_with_sha256 = univ.ObjectIdentifier('1.2.840.113549.1.9.16.3.28') - - -id_alg_hkdf_with_sha384 = univ.ObjectIdentifier('1.2.840.113549.1.9.16.3.29') - - -id_alg_hkdf_with_sha512 = univ.ObjectIdentifier('1.2.840.113549.1.9.16.3.30') - - -# Key Derivation Algorithm Identifiers - -kda_hkdf_with_sha256 = rfc5280.AlgorithmIdentifier() -kda_hkdf_with_sha256['algorithm'] = id_alg_hkdf_with_sha256 -# kda_hkdf_with_sha256['parameters'] are absent - - -kda_hkdf_with_sha384 = rfc5280.AlgorithmIdentifier() -kda_hkdf_with_sha384['algorithm'] = id_alg_hkdf_with_sha384 -# kda_hkdf_with_sha384['parameters'] are absent - - -kda_hkdf_with_sha512 = rfc5280.AlgorithmIdentifier() -kda_hkdf_with_sha512['algorithm'] = id_alg_hkdf_with_sha512 -# kda_hkdf_with_sha512['parameters'] are absent diff --git a/third_party/python/pyasn1_modules/pyasn1_modules/rfc8649.py b/third_party/python/pyasn1_modules/pyasn1_modules/rfc8649.py deleted file mode 100644 index c405f050e8e6..000000000000 --- a/third_party/python/pyasn1_modules/pyasn1_modules/rfc8649.py +++ /dev/null @@ -1,40 +0,0 @@ -# -# This file is part of pyasn1-modules software. -# -# Created by Russ Housley with assistance from asn1ate v.0.6.0. -# -# Copyright (c) 2019, Vigil Security, LLC -# License: http://snmplabs.com/pyasn1/license.html -# -# X.509 Certificate Extension for Hash Of Root Key -# -# ASN.1 source from: -# https://www.rfc-editor.org/rfc/rfc8649.txt -# - -from pyasn1.type import namedtype -from pyasn1.type import univ - -from pyasn1_modules import rfc5280 - - -id_ce_hashOfRootKey = univ.ObjectIdentifier('1.3.6.1.4.1.51483.2.1') - - -class HashedRootKey(univ.Sequence): - pass - -HashedRootKey.componentType = namedtype.NamedTypes( - namedtype.NamedType('hashAlg', rfc5280.AlgorithmIdentifier()), - namedtype.NamedType('hashValue', univ.OctetString()) -) - - -# Map of Certificate Extension OIDs to Extensions added to the -# ones that are in rfc5280.py - -_certificateExtensionsMapUpdate = { - id_ce_hashOfRootKey: HashedRootKey(), -} - -rfc5280.certificateExtensionsMap.update(_certificateExtensionsMapUpdate) diff --git a/third_party/python/pylru/LICENSE.txt b/third_party/python/pylru/LICENSE.txt deleted file mode 100644 index d159169d1050..000000000000 --- a/third_party/python/pylru/LICENSE.txt +++ /dev/null @@ -1,339 +0,0 @@ - GNU GENERAL PUBLIC LICENSE - Version 2, June 1991 - - Copyright (C) 1989, 1991 Free Software Foundation, Inc., - 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - Everyone is permitted to copy and distribute verbatim copies - of this license document, but changing it is not allowed. - - Preamble - - The licenses for most software are designed to take away your -freedom to share and change it. By contrast, the GNU General Public -License is intended to guarantee your freedom to share and change free -software--to make sure the software is free for all its users. This -General Public License applies to most of the Free Software -Foundation's software and to any other program whose authors commit to -using it. (Some other Free Software Foundation software is covered by -the GNU Lesser General Public License instead.) You can apply it to -your programs, too. - - When we speak of free software, we are referring to freedom, not -price. Our General Public Licenses are designed to make sure that you -have the freedom to distribute copies of free software (and charge for -this service if you wish), that you receive source code or can get it -if you want it, that you can change the software or use pieces of it -in new free programs; and that you know you can do these things. - - To protect your rights, we need to make restrictions that forbid -anyone to deny you these rights or to ask you to surrender the rights. -These restrictions translate to certain responsibilities for you if you -distribute copies of the software, or if you modify it. - - For example, if you distribute copies of such a program, whether -gratis or for a fee, you must give the recipients all the rights that -you have. You must make sure that they, too, receive or can get the -source code. And you must show them these terms so they know their -rights. - - We protect your rights with two steps: (1) copyright the software, and -(2) offer you this license which gives you legal permission to copy, -distribute and/or modify the software. - - Also, for each author's protection and ours, we want to make certain -that everyone understands that there is no warranty for this free -software. If the software is modified by someone else and passed on, we -want its recipients to know that what they have is not the original, so -that any problems introduced by others will not reflect on the original -authors' reputations. - - Finally, any free program is threatened constantly by software -patents. We wish to avoid the danger that redistributors of a free -program will individually obtain patent licenses, in effect making the -program proprietary. To prevent this, we have made it clear that any -patent must be licensed for everyone's free use or not licensed at all. - - The precise terms and conditions for copying, distribution and -modification follow. - - GNU GENERAL PUBLIC LICENSE - TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION - - 0. This License applies to any program or other work which contains -a notice placed by the copyright holder saying it may be distributed -under the terms of this General Public License. The "Program", below, -refers to any such program or work, and a "work based on the Program" -means either the Program or any derivative work under copyright law: -that is to say, a work containing the Program or a portion of it, -either verbatim or with modifications and/or translated into another -language. (Hereinafter, translation is included without limitation in -the term "modification".) Each licensee is addressed as "you". - -Activities other than copying, distribution and modification are not -covered by this License; they are outside its scope. The act of -running the Program is not restricted, and the output from the Program -is covered only if its contents constitute a work based on the -Program (independent of having been made by running the Program). -Whether that is true depends on what the Program does. - - 1. You may copy and distribute verbatim copies of the Program's -source code as you receive it, in any medium, provided that you -conspicuously and appropriately publish on each copy an appropriate -copyright notice and disclaimer of warranty; keep intact all the -notices that refer to this License and to the absence of any warranty; -and give any other recipients of the Program a copy of this License -along with the Program. - -You may charge a fee for the physical act of transferring a copy, and -you may at your option offer warranty protection in exchange for a fee. - - 2. You may modify your copy or copies of the Program or any portion -of it, thus forming a work based on the Program, and copy and -distribute such modifications or work under the terms of Section 1 -above, provided that you also meet all of these conditions: - - a) You must cause the modified files to carry prominent notices - stating that you changed the files and the date of any change. - - b) You must cause any work that you distribute or publish, that in - whole or in part contains or is derived from the Program or any - part thereof, to be licensed as a whole at no charge to all third - parties under the terms of this License. - - c) If the modified program normally reads commands interactively - when run, you must cause it, when started running for such - interactive use in the most ordinary way, to print or display an - announcement including an appropriate copyright notice and a - notice that there is no warranty (or else, saying that you provide - a warranty) and that users may redistribute the program under - these conditions, and telling the user how to view a copy of this - License. (Exception: if the Program itself is interactive but - does not normally print such an announcement, your work based on - the Program is not required to print an announcement.) - -These requirements apply to the modified work as a whole. If -identifiable sections of that work are not derived from the Program, -and can be reasonably considered independent and separate works in -themselves, then this License, and its terms, do not apply to those -sections when you distribute them as separate works. But when you -distribute the same sections as part of a whole which is a work based -on the Program, the distribution of the whole must be on the terms of -this License, whose permissions for other licensees extend to the -entire whole, and thus to each and every part regardless of who wrote it. - -Thus, it is not the intent of this section to claim rights or contest -your rights to work written entirely by you; rather, the intent is to -exercise the right to control the distribution of derivative or -collective works based on the Program. - -In addition, mere aggregation of another work not based on the Program -with the Program (or with a work based on the Program) on a volume of -a storage or distribution medium does not bring the other work under -the scope of this License. - - 3. You may copy and distribute the Program (or a work based on it, -under Section 2) in object code or executable form under the terms of -Sections 1 and 2 above provided that you also do one of the following: - - a) Accompany it with the complete corresponding machine-readable - source code, which must be distributed under the terms of Sections - 1 and 2 above on a medium customarily used for software interchange; or, - - b) Accompany it with a written offer, valid for at least three - years, to give any third party, for a charge no more than your - cost of physically performing source distribution, a complete - machine-readable copy of the corresponding source code, to be - distributed under the terms of Sections 1 and 2 above on a medium - customarily used for software interchange; or, - - c) Accompany it with the information you received as to the offer - to distribute corresponding source code. (This alternative is - allowed only for noncommercial distribution and only if you - received the program in object code or executable form with such - an offer, in accord with Subsection b above.) - -The source code for a work means the preferred form of the work for -making modifications to it. For an executable work, complete source -code means all the source code for all modules it contains, plus any -associated interface definition files, plus the scripts used to -control compilation and installation of the executable. However, as a -special exception, the source code distributed need not include -anything that is normally distributed (in either source or binary -form) with the major components (compiler, kernel, and so on) of the -operating system on which the executable runs, unless that component -itself accompanies the executable. - -If distribution of executable or object code is made by offering -access to copy from a designated place, then offering equivalent -access to copy the source code from the same place counts as -distribution of the source code, even though third parties are not -compelled to copy the source along with the object code. - - 4. You may not copy, modify, sublicense, or distribute the Program -except as expressly provided under this License. Any attempt -otherwise to copy, modify, sublicense or distribute the Program is -void, and will automatically terminate your rights under this License. -However, parties who have received copies, or rights, from you under -this License will not have their licenses terminated so long as such -parties remain in full compliance. - - 5. You are not required to accept this License, since you have not -signed it. However, nothing else grants you permission to modify or -distribute the Program or its derivative works. These actions are -prohibited by law if you do not accept this License. Therefore, by -modifying or distributing the Program (or any work based on the -Program), you indicate your acceptance of this License to do so, and -all its terms and conditions for copying, distributing or modifying -the Program or works based on it. - - 6. Each time you redistribute the Program (or any work based on the -Program), the recipient automatically receives a license from the -original licensor to copy, distribute or modify the Program subject to -these terms and conditions. You may not impose any further -restrictions on the recipients' exercise of the rights granted herein. -You are not responsible for enforcing compliance by third parties to -this License. - - 7. If, as a consequence of a court judgment or allegation of patent -infringement or for any other reason (not limited to patent issues), -conditions are imposed on you (whether by court order, agreement or -otherwise) that contradict the conditions of this License, they do not -excuse you from the conditions of this License. If you cannot -distribute so as to satisfy simultaneously your obligations under this -License and any other pertinent obligations, then as a consequence you -may not distribute the Program at all. For example, if a patent -license would not permit royalty-free redistribution of the Program by -all those who receive copies directly or indirectly through you, then -the only way you could satisfy both it and this License would be to -refrain entirely from distribution of the Program. - -If any portion of this section is held invalid or unenforceable under -any particular circumstance, the balance of the section is intended to -apply and the section as a whole is intended to apply in other -circumstances. - -It is not the purpose of this section to induce you to infringe any -patents or other property right claims or to contest validity of any -such claims; this section has the sole purpose of protecting the -integrity of the free software distribution system, which is -implemented by public license practices. Many people have made -generous contributions to the wide range of software distributed -through that system in reliance on consistent application of that -system; it is up to the author/donor to decide if he or she is willing -to distribute software through any other system and a licensee cannot -impose that choice. - -This section is intended to make thoroughly clear what is believed to -be a consequence of the rest of this License. - - 8. If the distribution and/or use of the Program is restricted in -certain countries either by patents or by copyrighted interfaces, the -original copyright holder who places the Program under this License -may add an explicit geographical distribution limitation excluding -those countries, so that distribution is permitted only in or among -countries not thus excluded. In such case, this License incorporates -the limitation as if written in the body of this License. - - 9. The Free Software Foundation may publish revised and/or new versions -of the General Public License from time to time. Such new versions will -be similar in spirit to the present version, but may differ in detail to -address new problems or concerns. - -Each version is given a distinguishing version number. If the Program -specifies a version number of this License which applies to it and "any -later version", you have the option of following the terms and conditions -either of that version or of any later version published by the Free -Software Foundation. If the Program does not specify a version number of -this License, you may choose any version ever published by the Free Software -Foundation. - - 10. If you wish to incorporate parts of the Program into other free -programs whose distribution conditions are different, write to the author -to ask for permission. For software which is copyrighted by the Free -Software Foundation, write to the Free Software Foundation; we sometimes -make exceptions for this. Our decision will be guided by the two goals -of preserving the free status of all derivatives of our free software and -of promoting the sharing and reuse of software generally. - - NO WARRANTY - - 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY -FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN -OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES -PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED -OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS -TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE -PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, -REPAIR OR CORRECTION. - - 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING -WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR -REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, -INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING -OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED -TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY -YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER -PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE -POSSIBILITY OF SUCH DAMAGES. - - END OF TERMS AND CONDITIONS - - How to Apply These Terms to Your New Programs - - If you develop a new program, and you want it to be of the greatest -possible use to the public, the best way to achieve this is to make it -free software which everyone can redistribute and change under these terms. - - To do so, attach the following notices to the program. It is safest -to attach them to the start of each source file to most effectively -convey the exclusion of warranty; and each file should have at least -the "copyright" line and a pointer to where the full notice is found. - - - Copyright (C) - - This program is free software; you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation; either version 2 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License along - with this program; if not, write to the Free Software Foundation, Inc., - 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. - -Also add information on how to contact you by electronic and paper mail. - -If the program is interactive, make it output a short notice like this -when it starts in an interactive mode: - - Gnomovision version 69, Copyright (C) year name of author - Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. - This is free software, and you are welcome to redistribute it - under certain conditions; type `show c' for details. - -The hypothetical commands `show w' and `show c' should show the appropriate -parts of the General Public License. Of course, the commands you use may -be called something other than `show w' and `show c'; they could even be -mouse-clicks or menu items--whatever suits your program. - -You should also get your employer (if you work as a programmer) or your -school, if any, to sign a "copyright disclaimer" for the program, if -necessary. Here is a sample; alter the names: - - Yoyodyne, Inc., hereby disclaims all copyright interest in the program - `Gnomovision' (which makes passes at compilers) written by James Hacker. - - , 1 April 1989 - Ty Coon, President of Vice - -This General Public License does not permit incorporating your program into -proprietary programs. If your program is a subroutine library, you may -consider it more useful to permit linking proprietary applications with the -library. If this is what you want to do, use the GNU Lesser General -Public License instead of this License. diff --git a/third_party/python/pylru/PKG-INFO b/third_party/python/pylru/PKG-INFO deleted file mode 100644 index d0e146d616dc..000000000000 --- a/third_party/python/pylru/PKG-INFO +++ /dev/null @@ -1,263 +0,0 @@ -Metadata-Version: 1.1 -Name: pylru -Version: 1.0.9 -Summary: A least recently used (LRU) cache implementation -Home-page: https://github.com/jlhutch/pylru -Author: Jay Hutchinson -Author-email: jlhutch+pylru@gmail.com -License: UNKNOWN -Description: - - PyLRU - ===== - - A least recently used (LRU) cache for Python. - - Introduction - ============ - - Pylru implements a true LRU cache along with several support classes. The cache is efficient and written in pure Python. It works with Python 2.6+ including the 3.x series. Basic operations (lookup, insert, delete) all run in a constant amount of time. Pylru provides a cache class with a simple dict interface. It also provides classes to wrap any object that has a dict interface with a cache. Both write-through and write-back semantics are supported. Pylru also provides classes to wrap functions in a similar way, including a function decorator. - - You can install pylru or you can just copy the source file pylru.py and use it directly in your own project. The rest of this file explains what the pylru module provides and how to use it. If you want to know more examine pylru.py. The code is straightforward and well commented. - - Usage - ===== - - lrucache - -------- - - An lrucache object has a dictionary like interface and can be used in the same way:: - - import pylru - - size = 100 # Size of the cache. The maximum number of key/value - # pairs you want the cache to hold. - - cache = pylru.lrucache(size) - # Create a cache object. - - value = cache[key] # Lookup a value given its key. - cache[key] = value # Insert a key/value pair. - del cache[key] # Delete a value given its key. - # - # These three operations affect the order of the cache. - # Lookup and insert both move the key/value to the most - # recently used position. Delete (obviously) removes a - # key/value from whatever position it was in. - - key in cache # Test for membership. Does not affect the cache order. - - value = cache.peek(key) - # Lookup a value given its key. Does not affect the - # cache order. - - cache.keys() # Return an iterator over the keys in the cache - cache.values() # Return an iterator over the values in the cache - cache.items() # Return an iterator over the (key, value) pairs in the - # cache. - # - # These calls have no effect on the cache order. - # lrucache is scan resistant when these calls are used. - # The iterators iterate over their respective elements - # in the order of most recently used to least recently - # used. - # - # WARNING - While these iterators do not affect the - # cache order the lookup, insert, and delete operations - # do. The result of changing the cache's order - # during iteration is undefined. If you really need to - # do something of the sort use list(cache.keys()), then - # loop over the list elements. - - for key in cache: # Caches support __iter__ so you can use them directly - pass # in a for loop to loop over the keys just like - # cache.keys() - - cache.size() # Returns the size of the cache - cache.size(x) # Changes the size of the cache. x MUST be greater than - # zero. Returns the new size x. - - x = len(cache) # Returns the number of items stored in the cache. - # x will be less than or equal to cache.size() - - cache.clear() # Remove all items from the cache. - - - Lrucache takes an optional callback function as a second argument. Since the cache has a fixed size, some operations (such as an insertion) may cause the least recently used key/value pair to be ejected. If the optional callback function is given it will be called when this occurs. For example:: - - import pylru - - def callback(key, value): - print (key, value) # A dumb callback that just prints the key/value - - size = 100 - cache = pylru.lrucache(size, callback) - - # Use the cache... When it gets full some pairs may be ejected due to - # the fixed cache size. But, not before the callback is called to let you - # know. - - WriteThroughCacheManager - ------------------------ - - Often a cache is used to speed up access to some other high latency object. For example, imagine you have a backend storage object that reads/writes from/to a remote server. Let us call this object *store*. If store has a dictionary interface a cache manager class can be used to compose the store object and an lrucache. The manager object exposes a dictionary interface. The programmer can then interact with the manager object as if it were the store. The manager object takes care of communicating with the store and caching key/value pairs in the lrucache object. - - Two different semantics are supported, write-through (WriteThroughCacheManager class) and write-back (WriteBackCacheManager class). With write-through, lookups from the store are cached for future lookups. Insertions and deletions are updated in the cache and written through to the store immediately. Write-back works the same way, but insertions are updated only in the cache. These "dirty" key/value pair will only be updated to the underlying store when they are ejected from the cache or when a sync is performed. The WriteBackCacheManager class is discussed more below. - - The WriteThroughCacheManager class takes as arguments the store object you want to compose and the cache size. It then creates an LRU cache and automatically manages it:: - - import pylru - - size = 100 - cached = pylru.WriteThroughCacheManager(store, size) - # Or - cached = pylru.lruwrap(store, size) - # This is a factory function that does the same thing. - - # Now the object *cached* can be used just like store, except caching is - # automatically handled. - - value = cached[key] # Lookup a value given its key. - cached[key] = value # Insert a key/value pair. - del cached[key] # Delete a value given its key. - - key in cache # Test for membership. Does not affect the cache order. - - cached.keys() # Returns store.keys() - cached.values() # Returns store.values() - cached.items() # Returns store.items() - # - # These calls have no effect on the cache order. - # The iterators iterate over their respective elements - # in the order dictated by store. - - for key in cached: # Same as store.keys() - - cached.size() # Returns the size of the cache - cached.size(x) # Changes the size of the cache. x MUST be greater than - # zero. Returns the new size x. - - x = len(cached) # Returns the number of items stored in the store. - - cached.clear() # Remove all items from the store and cache. - - - WriteBackCacheManager - --------------------- - - Similar to the WriteThroughCacheManager class except write-back semantics are used to manage the cache. The programmer is responsible for one more thing as well. They MUST call sync() when they are finished. This ensures that the last of the "dirty" entries in the cache are written back. This is not too bad as WriteBackCacheManager objects can be used in with statements. More about that below:: - - - import pylru - - size = 100 - cached = pylru.WriteBackCacheManager(store, size) - # Or - cached = pylru.lruwrap(store, size, True) - # This is a factory function that does the same thing. - - value = cached[key] # Lookup a value given its key. - cached[key] = value # Insert a key/value pair. - del cached[key] # Delete a value given its key. - - key in cache # Test for membership. Does not affect the cache order. - - - cached.keys() # Return an iterator over the keys in the cache/store - cached.values() # Return an iterator over the values in the cache/store - cached.items() # Return an iterator over the (key, value) pairs in the - # cache/store. - # - # The iterators iterate over a consistent view of the - # respective elements. That is, except for the order, - # the elements are the same as those returned if you - # first called sync() then called - # store.keys()[ or values() or items()] - # - # These calls have no effect on the cache order. - # The iterators iterate over their respective elements - # in arbitrary order. - # - # WARNING - While these iterators do not effect the - # cache order the lookup, insert, and delete operations - # do. The results of changing the cache's order - # during iteration is undefined. If you really need to - # do something of the sort use list(cached.keys()), - # then loop over the list elements. - - for key in cached: # Same as cached.keys() - - cached.size() # Returns the size of the cache - cached.size(x) # Changes the size of the cache. x MUST be greater than - # zero. Returns the new size x. - - cached.clear() # Remove all items from the store and cache. - - cached.sync() # Make the store and cache consistent. Write all - # cached changes to the store that have not been - # yet. - - cached.flush() # Calls sync() then clears the cache. - - - To help the programmer ensure that the final sync() is called, WriteBackCacheManager objects can be used in a with statement:: - - with pylru.WriteBackCacheManager(store, size) as cached: - # Use cached just like you would store. sync() is called automatically - # for you when leaving the with statement block. - - - FunctionCacheManager - --------------------- - - FunctionCacheManager allows you to compose a function with an lrucache. The resulting object can be called just like the original function, but the results are cached to speed up future calls. The fuction must have arguments that are hashable:: - - import pylru - - def square(x): - return x * x - - size = 100 - cached = pylru.FunctionCacheManager(square, size) - - y = cached(7) - - # The results of cached are the same as square, but automatically cached - # to speed up future calls. - - cached.size() # Returns the size of the cache - cached.size(x) # Changes the size of the cache. x MUST be greater than - # zero. Returns the new size x. - - cached.clear() # Remove all items from the cache. - - - - lrudecorator - ------------ - - PyLRU also provides a function decorator. This is basically the same functionality as FunctionCacheManager, but in the form of a decorator:: - - from pylru import lrudecorator - - @lrudecorator(100) - def square(x): - return x * x - - # The results of the square function are cached to speed up future calls. - - square.size() # Returns the size of the cache - square.size(x) # Changes the size of the cache. x MUST be greater than - # zero. Returns the new size x. - - square.clear() # Remove all items from the cache. - -Platform: UNKNOWN -Classifier: Programming Language :: Python :: 2.6 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: GNU General Public License (GPL) -Classifier: Operating System :: OS Independent -Classifier: Topic :: Software Development :: Libraries :: Python Modules diff --git a/third_party/python/pylru/README.txt b/third_party/python/pylru/README.txt deleted file mode 100644 index f37c9ccdca14..000000000000 --- a/third_party/python/pylru/README.txt +++ /dev/null @@ -1,245 +0,0 @@ - - -PyLRU -===== - -A least recently used (LRU) cache for Python. - -Introduction -============ - -Pylru implements a true LRU cache along with several support classes. The cache is efficient and written in pure Python. It works with Python 2.6+ including the 3.x series. Basic operations (lookup, insert, delete) all run in a constant amount of time. Pylru provides a cache class with a simple dict interface. It also provides classes to wrap any object that has a dict interface with a cache. Both write-through and write-back semantics are supported. Pylru also provides classes to wrap functions in a similar way, including a function decorator. - -You can install pylru or you can just copy the source file pylru.py and use it directly in your own project. The rest of this file explains what the pylru module provides and how to use it. If you want to know more examine pylru.py. The code is straightforward and well commented. - -Usage -===== - -lrucache --------- - -An lrucache object has a dictionary like interface and can be used in the same way:: - - import pylru - - size = 100 # Size of the cache. The maximum number of key/value - # pairs you want the cache to hold. - - cache = pylru.lrucache(size) - # Create a cache object. - - value = cache[key] # Lookup a value given its key. - cache[key] = value # Insert a key/value pair. - del cache[key] # Delete a value given its key. - # - # These three operations affect the order of the cache. - # Lookup and insert both move the key/value to the most - # recently used position. Delete (obviously) removes a - # key/value from whatever position it was in. - - key in cache # Test for membership. Does not affect the cache order. - - value = cache.peek(key) - # Lookup a value given its key. Does not affect the - # cache order. - - cache.keys() # Return an iterator over the keys in the cache - cache.values() # Return an iterator over the values in the cache - cache.items() # Return an iterator over the (key, value) pairs in the - # cache. - # - # These calls have no effect on the cache order. - # lrucache is scan resistant when these calls are used. - # The iterators iterate over their respective elements - # in the order of most recently used to least recently - # used. - # - # WARNING - While these iterators do not affect the - # cache order the lookup, insert, and delete operations - # do. The result of changing the cache's order - # during iteration is undefined. If you really need to - # do something of the sort use list(cache.keys()), then - # loop over the list elements. - - for key in cache: # Caches support __iter__ so you can use them directly - pass # in a for loop to loop over the keys just like - # cache.keys() - - cache.size() # Returns the size of the cache - cache.size(x) # Changes the size of the cache. x MUST be greater than - # zero. Returns the new size x. - - x = len(cache) # Returns the number of items stored in the cache. - # x will be less than or equal to cache.size() - - cache.clear() # Remove all items from the cache. - - -Lrucache takes an optional callback function as a second argument. Since the cache has a fixed size, some operations (such as an insertion) may cause the least recently used key/value pair to be ejected. If the optional callback function is given it will be called when this occurs. For example:: - - import pylru - - def callback(key, value): - print (key, value) # A dumb callback that just prints the key/value - - size = 100 - cache = pylru.lrucache(size, callback) - - # Use the cache... When it gets full some pairs may be ejected due to - # the fixed cache size. But, not before the callback is called to let you - # know. - -WriteThroughCacheManager ------------------------- - -Often a cache is used to speed up access to some other high latency object. For example, imagine you have a backend storage object that reads/writes from/to a remote server. Let us call this object *store*. If store has a dictionary interface a cache manager class can be used to compose the store object and an lrucache. The manager object exposes a dictionary interface. The programmer can then interact with the manager object as if it were the store. The manager object takes care of communicating with the store and caching key/value pairs in the lrucache object. - -Two different semantics are supported, write-through (WriteThroughCacheManager class) and write-back (WriteBackCacheManager class). With write-through, lookups from the store are cached for future lookups. Insertions and deletions are updated in the cache and written through to the store immediately. Write-back works the same way, but insertions are updated only in the cache. These "dirty" key/value pair will only be updated to the underlying store when they are ejected from the cache or when a sync is performed. The WriteBackCacheManager class is discussed more below. - -The WriteThroughCacheManager class takes as arguments the store object you want to compose and the cache size. It then creates an LRU cache and automatically manages it:: - - import pylru - - size = 100 - cached = pylru.WriteThroughCacheManager(store, size) - # Or - cached = pylru.lruwrap(store, size) - # This is a factory function that does the same thing. - - # Now the object *cached* can be used just like store, except caching is - # automatically handled. - - value = cached[key] # Lookup a value given its key. - cached[key] = value # Insert a key/value pair. - del cached[key] # Delete a value given its key. - - key in cache # Test for membership. Does not affect the cache order. - - cached.keys() # Returns store.keys() - cached.values() # Returns store.values() - cached.items() # Returns store.items() - # - # These calls have no effect on the cache order. - # The iterators iterate over their respective elements - # in the order dictated by store. - - for key in cached: # Same as store.keys() - - cached.size() # Returns the size of the cache - cached.size(x) # Changes the size of the cache. x MUST be greater than - # zero. Returns the new size x. - - x = len(cached) # Returns the number of items stored in the store. - - cached.clear() # Remove all items from the store and cache. - - -WriteBackCacheManager ---------------------- - -Similar to the WriteThroughCacheManager class except write-back semantics are used to manage the cache. The programmer is responsible for one more thing as well. They MUST call sync() when they are finished. This ensures that the last of the "dirty" entries in the cache are written back. This is not too bad as WriteBackCacheManager objects can be used in with statements. More about that below:: - - - import pylru - - size = 100 - cached = pylru.WriteBackCacheManager(store, size) - # Or - cached = pylru.lruwrap(store, size, True) - # This is a factory function that does the same thing. - - value = cached[key] # Lookup a value given its key. - cached[key] = value # Insert a key/value pair. - del cached[key] # Delete a value given its key. - - key in cache # Test for membership. Does not affect the cache order. - - - cached.keys() # Return an iterator over the keys in the cache/store - cached.values() # Return an iterator over the values in the cache/store - cached.items() # Return an iterator over the (key, value) pairs in the - # cache/store. - # - # The iterators iterate over a consistent view of the - # respective elements. That is, except for the order, - # the elements are the same as those returned if you - # first called sync() then called - # store.keys()[ or values() or items()] - # - # These calls have no effect on the cache order. - # The iterators iterate over their respective elements - # in arbitrary order. - # - # WARNING - While these iterators do not effect the - # cache order the lookup, insert, and delete operations - # do. The results of changing the cache's order - # during iteration is undefined. If you really need to - # do something of the sort use list(cached.keys()), - # then loop over the list elements. - - for key in cached: # Same as cached.keys() - - cached.size() # Returns the size of the cache - cached.size(x) # Changes the size of the cache. x MUST be greater than - # zero. Returns the new size x. - - cached.clear() # Remove all items from the store and cache. - - cached.sync() # Make the store and cache consistent. Write all - # cached changes to the store that have not been - # yet. - - cached.flush() # Calls sync() then clears the cache. - - -To help the programmer ensure that the final sync() is called, WriteBackCacheManager objects can be used in a with statement:: - - with pylru.WriteBackCacheManager(store, size) as cached: - # Use cached just like you would store. sync() is called automatically - # for you when leaving the with statement block. - - -FunctionCacheManager ---------------------- - -FunctionCacheManager allows you to compose a function with an lrucache. The resulting object can be called just like the original function, but the results are cached to speed up future calls. The fuction must have arguments that are hashable:: - - import pylru - - def square(x): - return x * x - - size = 100 - cached = pylru.FunctionCacheManager(square, size) - - y = cached(7) - - # The results of cached are the same as square, but automatically cached - # to speed up future calls. - - cached.size() # Returns the size of the cache - cached.size(x) # Changes the size of the cache. x MUST be greater than - # zero. Returns the new size x. - - cached.clear() # Remove all items from the cache. - - - -lrudecorator ------------- - -PyLRU also provides a function decorator. This is basically the same functionality as FunctionCacheManager, but in the form of a decorator:: - - from pylru import lrudecorator - - @lrudecorator(100) - def square(x): - return x * x - - # The results of the square function are cached to speed up future calls. - - square.size() # Returns the size of the cache - square.size(x) # Changes the size of the cache. x MUST be greater than - # zero. Returns the new size x. - - square.clear() # Remove all items from the cache. diff --git a/third_party/python/pylru/setup.py b/third_party/python/pylru/setup.py deleted file mode 100644 index 66d441ca9454..000000000000 --- a/third_party/python/pylru/setup.py +++ /dev/null @@ -1,23 +0,0 @@ -from distutils.core import setup - -setup( - name = "pylru", - version = "1.0.9", - py_modules=['pylru'], - description = "A least recently used (LRU) cache implementation", - author = "Jay Hutchinson", - author_email = "jlhutch+pylru@gmail.com", - url = "https://github.com/jlhutch/pylru", - classifiers = [ - "Programming Language :: Python :: 2.6", - "Programming Language :: Python :: 2.7", - "Programming Language :: Python :: 3", - "Development Status :: 5 - Production/Stable", - "Intended Audience :: Developers", - "License :: OSI Approved :: GNU General Public License (GPL)", - "Operating System :: OS Independent", - "Topic :: Software Development :: Libraries :: Python Modules", - ], - long_description=open('README.txt').read()) - - diff --git a/third_party/python/pystache/.gitignore b/third_party/python/pystache/.gitignore new file mode 100644 index 000000000000..758d62df92e2 --- /dev/null +++ b/third_party/python/pystache/.gitignore @@ -0,0 +1,17 @@ +*.pyc +.DS_Store +# Tox support. See: http://pypi.python.org/pypi/tox +.tox +# Our tox runs convert the doctests in *.rst files to Python 3 prior to +# running tests. Ignore these temporary files. +*.temp2to3.rst +# The setup.py "prep" command converts *.md to *.temp.rst (via *.temp.md). +*.temp.md +*.temp.rst +# TextMate project file +*.tmproj +# Distribution-related folders and files. +build +dist +MANIFEST +pystache.egg-info diff --git a/third_party/python/pystache/.gitmodules b/third_party/python/pystache/.gitmodules new file mode 100644 index 000000000000..c55c8e5e3b76 --- /dev/null +++ b/third_party/python/pystache/.gitmodules @@ -0,0 +1,3 @@ +[submodule "ext/spec"] + path = ext/spec + url = http://github.com/mustache/spec.git diff --git a/third_party/python/pystache/.travis.yml b/third_party/python/pystache/.travis.yml new file mode 100644 index 000000000000..00227053aae8 --- /dev/null +++ b/third_party/python/pystache/.travis.yml @@ -0,0 +1,14 @@ +language: python + +# Travis CI has no plans to support Jython and no longer supports Python 2.5. +python: + - 2.6 + - 2.7 + - 3.2 + - pypy + +script: + - python setup.py install + # Include the spec tests directory for Mustache spec tests and the + # project directory for doctests. + - pystache-test . ext/spec/specs diff --git a/third_party/python/pystache/PKG-INFO b/third_party/python/pystache/PKG-INFO deleted file mode 100644 index 92e0c7f8ed24..000000000000 --- a/third_party/python/pystache/PKG-INFO +++ /dev/null @@ -1,536 +0,0 @@ -Metadata-Version: 1.1 -Name: pystache -Version: 0.5.4 -Summary: Mustache for Python -Home-page: http://github.com/defunkt/pystache -Author: Chris Jerdonek -Author-email: chris.jerdonek@gmail.com -License: MIT -Description: .. Do not edit this file. This file is auto-generated for PyPI by setup.py - .. using pandoc, so edits should go in the source files rather than here. - - Pystache - ======== - - .. figure:: http://defunkt.github.com/pystache/images/logo_phillips.png - :alt: mustachioed, monocled snake by David Phillips - - .. figure:: https://secure.travis-ci.org/defunkt/pystache.png - :alt: Travis CI current build status - - `Pystache `__ is a Python - implementation of `Mustache `__. Mustache - is a framework-agnostic, logic-free templating system inspired by - `ctemplate `__ and - `et `__. - Like ctemplate, Mustache "emphasizes separating logic from presentation: - it is impossible to embed application logic in this template language." - - The `mustache(5) `__ man - page provides a good introduction to Mustache's syntax. For a more - complete (and more current) description of Mustache's behavior, see the - official `Mustache spec `__. - - Pystache is `semantically versioned `__ and can be - found on `PyPI `__. This version - of Pystache passes all tests in `version - 1.1.2 `__ of the spec. - - Requirements - ------------ - - Pystache is tested with-- - - - Python 2.4 (requires simplejson `version - 2.0.9 `__ or earlier) - - Python 2.5 (requires - `simplejson `__) - - Python 2.6 - - Python 2.7 - - Python 3.1 - - Python 3.2 - - Python 3.3 - - `PyPy `__ - - `Distribute `__ (the setuptools - fork) is recommended over - `setuptools `__, and is required - in some cases (e.g. for Python 3 support). If you use - `pip `__, you probably already satisfy - this requirement. - - JSON support is needed only for the command-line interface and to run - the spec tests. We require simplejson for earlier versions of Python - since Python's `json `__ - module was added in Python 2.6. - - For Python 2.4 we require an earlier version of simplejson since - simplejson stopped officially supporting Python 2.4 in simplejson - version 2.1.0. Earlier versions of simplejson can be installed manually, - as follows: - - :: - - pip install 'simplejson<2.1.0' - - Official support for Python 2.4 will end with Pystache version 0.6.0. - - Install It - ---------- - - :: - - pip install pystache - - And test it-- - - :: - - pystache-test - - To install and test from source (e.g. from GitHub), see the Develop - section. - - Use It - ------ - - :: - - >>> import pystache - >>> print pystache.render('Hi {{person}}!', {'person': 'Mom'}) - Hi Mom! - - You can also create dedicated view classes to hold your view logic. - - Here's your view class (in .../examples/readme.py): - - :: - - class SayHello(object): - def to(self): - return "Pizza" - - Instantiating like so: - - :: - - >>> from pystache.tests.examples.readme import SayHello - >>> hello = SayHello() - - Then your template, say\_hello.mustache (by default in the same - directory as your class definition): - - :: - - Hello, {{to}}! - - Pull it together: - - :: - - >>> renderer = pystache.Renderer() - >>> print renderer.render(hello) - Hello, Pizza! - - For greater control over rendering (e.g. to specify a custom template - directory), use the ``Renderer`` class like above. One can pass - attributes to the Renderer class constructor or set them on a Renderer - instance. To customize template loading on a per-view basis, subclass - ``TemplateSpec``. See the docstrings of the - `Renderer `__ - class and - `TemplateSpec `__ - class for more information. - - You can also pre-parse a template: - - :: - - >>> parsed = pystache.parse(u"Hey {{#who}}{{.}}!{{/who}}") - >>> print parsed - [u'Hey ', _SectionNode(key=u'who', index_begin=12, index_end=18, parsed=[_EscapeNode(key=u'.'), u'!'])] - - And then: - - :: - - >>> print renderer.render(parsed, {'who': 'Pops'}) - Hey Pops! - >>> print renderer.render(parsed, {'who': 'you'}) - Hey you! - - Python 3 - -------- - - Pystache has supported Python 3 since version 0.5.1. Pystache behaves - slightly differently between Python 2 and 3, as follows: - - - In Python 2, the default html-escape function ``cgi.escape()`` does - not escape single quotes. In Python 3, the default escape function - ``html.escape()`` does escape single quotes. - - In both Python 2 and 3, the string and file encodings default to - ``sys.getdefaultencoding()``. However, this function can return - different values under Python 2 and 3, even when run from the same - system. Check your own system for the behavior on your system, or do - not rely on the defaults by passing in the encodings explicitly (e.g. - to the ``Renderer`` class). - - Unicode - ------- - - This section describes how Pystache handles unicode, strings, and - encodings. - - Internally, Pystache uses `only unicode - strings `__ - (``str`` in Python 3 and ``unicode`` in Python 2). For input, Pystache - accepts both unicode strings and byte strings (``bytes`` in Python 3 and - ``str`` in Python 2). For output, Pystache's template rendering methods - return only unicode. - - Pystache's ``Renderer`` class supports a number of attributes to control - how Pystache converts byte strings to unicode on input. These include - the ``file_encoding``, ``string_encoding``, and ``decode_errors`` - attributes. - - The ``file_encoding`` attribute is the encoding the renderer uses to - convert to unicode any files read from the file system. Similarly, - ``string_encoding`` is the encoding the renderer uses to convert any - other byte strings encountered during the rendering process into unicode - (e.g. context values that are encoded byte strings). - - The ``decode_errors`` attribute is what the renderer passes as the - ``errors`` argument to Python's built-in unicode-decoding function - (``str()`` in Python 3 and ``unicode()`` in Python 2). The valid values - for this argument are ``strict``, ``ignore``, and ``replace``. - - Each of these attributes can be set via the ``Renderer`` class's - constructor using a keyword argument of the same name. See the Renderer - class's docstrings for further details. In addition, the - ``file_encoding`` attribute can be controlled on a per-view basis by - subclassing the ``TemplateSpec`` class. When not specified explicitly, - these attributes default to values set in Pystache's ``defaults`` - module. - - Develop - ------- - - To test from a source distribution (without installing)-- - - :: - - python test_pystache.py - - To test Pystache with multiple versions of Python (with a single - command!), you can use `tox `__: - - :: - - pip install 'virtualenv<1.8' # Version 1.8 dropped support for Python 2.4. - pip install 'tox<1.4' # Version 1.4 dropped support for Python 2.4. - tox - - If you do not have all Python versions listed in ``tox.ini``-- - - :: - - tox -e py26,py32 # for example - - The source distribution tests also include doctests and tests from the - Mustache spec. To include tests from the Mustache spec in your test - runs: - - :: - - git submodule init - git submodule update - - The test harness parses the spec's (more human-readable) yaml files if - `PyYAML `__ is present. Otherwise, - it parses the json files. To install PyYAML-- - - :: - - pip install pyyaml - - To run a subset of the tests, you can use - `nose `__: - - :: - - pip install nose - nosetests --tests pystache/tests/test_context.py:GetValueTests.test_dictionary__key_present - - Using Python 3 with Pystache from source - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - Pystache is written in Python 2 and must be converted to Python 3 prior - to using it with Python 3. The installation process (and tox) do this - automatically. - - To convert the code to Python 3 manually (while using Python 3)-- - - :: - - python setup.py build - - This writes the converted code to a subdirectory called ``build``. By - design, Python 3 builds - `cannot `__ - be created from Python 2. - - To convert the code without using setup.py, you can use - `2to3 `__ as follows (two - steps)-- - - :: - - 2to3 --write --nobackups --no-diffs --doctests_only pystache - 2to3 --write --nobackups --no-diffs pystache - - This converts the code (and doctests) in place. - - To ``import pystache`` from a source distribution while using Python 3, - be sure that you are importing from a directory containing a converted - version of the code (e.g. from the ``build`` directory after - converting), and not from the original (unconverted) source directory. - Otherwise, you will get a syntax error. You can help prevent this by not - running the Python IDE from the project directory when importing - Pystache while using Python 3. - - Mailing List - ------------ - - There is a `mailing list `__. - Note that there is a bit of a delay between posting a message and seeing - it appear in the mailing list archive. - - Credits - ------- - - :: - - >>> context = { 'author': 'Chris Wanstrath', 'maintainer': 'Chris Jerdonek' } - >>> print pystache.render("Author: {{author}}\nMaintainer: {{maintainer}}", context) - Author: Chris Wanstrath - Maintainer: Chris Jerdonek - - Pystache logo by `David Phillips `__ is - licensed under a `Creative Commons Attribution-ShareAlike 3.0 Unported - License `__. - |image0| - - History - ======= - - **Note:** Official support for Python 2.4 will end with Pystache version - 0.6.0. - - 0.5.4 (2014-07-11) - ------------------ - - - Bugfix: made test with filenames OS agnostic (issue #162). - - 0.5.3 (2012-11-03) - ------------------ - - - Added ability to customize string coercion (e.g. to have None render - as ``''``) (issue #130). - - Added Renderer.render\_name() to render a template by name (issue - #122). - - Added TemplateSpec.template\_path to specify an absolute path to a - template (issue #41). - - Added option of raising errors on missing tags/partials: - ``Renderer(missing_tags='strict')`` (issue #110). - - Added support for finding and loading templates by file name in - addition to by template name (issue #127). [xgecko] - - Added a ``parse()`` function that yields a printable, pre-compiled - parse tree. - - Added support for rendering pre-compiled templates. - - Added Python 3.3 to the list of supported versions. - - Added support for `PyPy `__ (issue #125). - - Added support for `Travis CI `__ (issue #124). - [msabramo] - - Bugfix: ``defaults.DELIMITERS`` can now be changed at runtime (issue - #135). [bennoleslie] - - Bugfix: exceptions raised from a property are no longer swallowed - when getting a key from a context stack (issue #110). - - Bugfix: lambda section values can now return non-ascii, non-unicode - strings (issue #118). - - Bugfix: allow ``test_pystache.py`` and ``tox`` to pass when run from - a downloaded sdist (i.e. without the spec test directory). - - Convert HISTORY and README files from reST to Markdown. - - More robust handling of byte strings in Python 3. - - Added Creative Commons license for David Phillips's logo. - - 0.5.2 (2012-05-03) - ------------------ - - - Added support for dot notation and version 1.1.2 of the spec (issue - #99). [rbp] - - Missing partials now render as empty string per latest version of - spec (issue #115). - - Bugfix: falsey values now coerced to strings using str(). - - Bugfix: lambda return values for sections no longer pushed onto - context stack (issue #113). - - Bugfix: lists of lambdas for sections were not rendered (issue #114). - - 0.5.1 (2012-04-24) - ------------------ - - - Added support for Python 3.1 and 3.2. - - Added tox support to test multiple Python versions. - - Added test script entry point: pystache-test. - - Added \_\_version\_\_ package attribute. - - Test harness now supports both YAML and JSON forms of Mustache spec. - - Test harness no longer requires nose. - - 0.5.0 (2012-04-03) - ------------------ - - This version represents a major rewrite and refactoring of the code base - that also adds features and fixes many bugs. All functionality and - nearly all unit tests have been preserved. However, some backwards - incompatible changes to the API have been made. - - Below is a selection of some of the changes (not exhaustive). - - Highlights: - - - Pystache now passes all tests in version 1.0.3 of the `Mustache - spec `__. [pvande] - - Removed View class: it is no longer necessary to subclass from View - or from any other class to create a view. - - Replaced Template with Renderer class: template rendering behavior - can be modified via the Renderer constructor or by setting attributes - on a Renderer instance. - - Added TemplateSpec class: template rendering can be specified on a - per-view basis by subclassing from TemplateSpec. - - Introduced separation of concerns and removed circular dependencies - (e.g. between Template and View classes, cf. `issue - #13 `__). - - Unicode now used consistently throughout the rendering process. - - Expanded test coverage: nosetests now runs doctests and ~105 test - cases from the Mustache spec (increasing the number of tests from 56 - to ~315). - - Added a rudimentary benchmarking script to gauge performance while - refactoring. - - Extensive documentation added (e.g. docstrings). - - Other changes: - - - Added a command-line interface. [vrde] - - The main rendering class now accepts a custom partial loader (e.g. a - dictionary) and a custom escape function. - - Non-ascii characters in str strings are now supported while - rendering. - - Added string encoding, file encoding, and errors options for decoding - to unicode. - - Removed the output encoding option. - - Removed the use of markupsafe. - - Bug fixes: - - - Context values no longer processed as template strings. - [jakearchibald] - - Whitespace surrounding sections is no longer altered, per the spec. - [heliodor] - - Zeroes now render correctly when using PyPy. [alex] - - Multline comments now permitted. [fczuardi] - - Extensionless template files are now supported. - - Passing ``**kwargs`` to ``Template()`` no longer modifies the - context. - - Passing ``**kwargs`` to ``Template()`` with no context no longer - raises an exception. - - 0.4.1 (2012-03-25) - ------------------ - - - Added support for Python 2.4. [wangtz, jvantuyl] - - 0.4.0 (2011-01-12) - ------------------ - - - Add support for nested contexts (within template and view) - - Add support for inverted lists - - Decoupled template loading - - 0.3.1 (2010-05-07) - ------------------ - - - Fix package - - 0.3.0 (2010-05-03) - ------------------ - - - View.template\_path can now hold a list of path - - Add {{& blah}} as an alias for {{{ blah }}} - - Higher Order Sections - - Inverted sections - - 0.2.0 (2010-02-15) - ------------------ - - - Bugfix: Methods returning False or None are not rendered - - Bugfix: Don't render an empty string when a tag's value is 0. - [enaeseth] - - Add support for using non-callables as View attributes. - [joshthecoder] - - Allow using View instances as attributes. [joshthecoder] - - Support for Unicode and non-ASCII-encoded bytestring output. - [enaeseth] - - Template file encoding awareness. [enaeseth] - - 0.1.1 (2009-11-13) - ------------------ - - - Ensure we're dealing with strings, always - - Tests can be run by executing the test file directly - - 0.1.0 (2009-11-12) - ------------------ - - - First release - - License - ======= - - Copyright (C) 2012 Chris Jerdonek. All rights reserved. - - Copyright (c) 2009 Chris Wanstrath - - Permission is hereby granted, free of charge, to any person obtaining a - copy of this software and associated documentation files (the - "Software"), to deal in the Software without restriction, including - without limitation the rights to use, copy, modify, merge, publish, - distribute, sublicense, and/or sell copies of the Software, and to - permit persons to whom the Software is furnished to do so, subject to - the following conditions: - - The above copyright notice and this permission notice shall be included - in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS - OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF - MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. - IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY - CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, - TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE - SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - - .. |image0| image:: http://i.creativecommons.org/l/by-sa/3.0/88x31.png - -Platform: UNKNOWN -Classifier: Development Status :: 4 - Beta -Classifier: License :: OSI Approved :: MIT License -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.4 -Classifier: Programming Language :: Python :: 2.5 -Classifier: Programming Language :: Python :: 2.6 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.1 -Classifier: Programming Language :: Python :: 3.2 -Classifier: Programming Language :: Python :: 3.3 -Classifier: Programming Language :: Python :: Implementation :: PyPy diff --git a/third_party/python/pystache/gh/images/logo_phillips.png b/third_party/python/pystache/gh/images/logo_phillips.png new file mode 100644 index 000000000000..749190136638 Binary files /dev/null and b/third_party/python/pystache/gh/images/logo_phillips.png differ diff --git a/third_party/python/pystache/pystache/parser.py b/third_party/python/pystache/pystache/parser.py index c6a171f0743d..9a4fba235b07 100644 --- a/third_party/python/pystache/pystache/parser.py +++ b/third_party/python/pystache/pystache/parser.py @@ -43,7 +43,7 @@ def parse(template, delimiters=None): def _compile_template_re(delimiters): """ - Return a regular expresssion object (re.RegexObject) instance. + Return a regular expression object (re.RegexObject) instance. """ # The possible tag type characters following the opening tag, diff --git a/third_party/python/pystache/pystache/tests/__init__.py b/third_party/python/pystache/pystache/tests/__init__.py deleted file mode 100644 index a0d386a38c98..000000000000 --- a/third_party/python/pystache/pystache/tests/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -""" -TODO: add a docstring. - -""" diff --git a/third_party/python/pystache/pystache/tests/benchmark.py b/third_party/python/pystache/pystache/tests/benchmark.py deleted file mode 100755 index d46e973327c6..000000000000 --- a/third_party/python/pystache/pystache/tests/benchmark.py +++ /dev/null @@ -1,94 +0,0 @@ -#!/usr/bin/env python -# coding: utf-8 - -""" -A rudimentary backward- and forward-compatible script to benchmark pystache. - -Usage: - -tests/benchmark.py 10000 - -""" - -import sys -from timeit import Timer - -import pystache - -# TODO: make the example realistic. - -examples = [ - # Test case: 1 - ("""{{#person}}Hi {{name}}{{/person}}""", - {"person": {"name": "Jon"}}, - "Hi Jon"), - - # Test case: 2 - ("""\ -
    -

    {{header}}

    -
      -{{#comments}}
    • -
      {{name}}

      {{body}}

      -
    • {{/comments}} -
    -
    """, - {'header': "My Post Comments", - 'comments': [ - {'name': "Joe", 'body': "Thanks for this post!"}, - {'name': "Sam", 'body': "Thanks for this post!"}, - {'name': "Heather", 'body': "Thanks for this post!"}, - {'name': "Kathy", 'body': "Thanks for this post!"}, - {'name': "George", 'body': "Thanks for this post!"}]}, - """\ -
    -

    My Post Comments

    -
      -
    • -
      Joe

      Thanks for this post!

      -
    • -
      Sam

      Thanks for this post!

      -
    • -
      Heather

      Thanks for this post!

      -
    • -
      Kathy

      Thanks for this post!

      -
    • -
      George

      Thanks for this post!

      -
    • -
    -
    """), -] - - -def make_test_function(example): - - template, context, expected = example - - def test(): - actual = pystache.render(template, context) - if actual != expected: - raise Exception("Benchmark mismatch: \n%s\n*** != ***\n%s" % (expected, actual)) - - return test - - -def main(sys_argv): - args = sys_argv[1:] - count = int(args[0]) - - print "Benchmarking: %sx" % count - print - - for example in examples: - - test = make_test_function(example) - - t = Timer(test,) - print min(t.repeat(repeat=3, number=count)) - - print "Done" - - -if __name__ == '__main__': - main(sys.argv) - diff --git a/third_party/python/pystache/pystache/tests/common.py b/third_party/python/pystache/pystache/tests/common.py deleted file mode 100644 index 222e14f23852..000000000000 --- a/third_party/python/pystache/pystache/tests/common.py +++ /dev/null @@ -1,237 +0,0 @@ -# coding: utf-8 - -""" -Provides test-related code that can be used by all tests. - -""" - -import os - -import pystache -from pystache import defaults -from pystache.tests import examples - -# Save a reference to the original function to avoid recursion. -_DEFAULT_TAG_ESCAPE = defaults.TAG_ESCAPE -_TESTS_DIR = os.path.dirname(pystache.tests.__file__) - -DATA_DIR = os.path.join(_TESTS_DIR, 'data') # i.e. 'pystache/tests/data'. -EXAMPLES_DIR = os.path.dirname(examples.__file__) -PACKAGE_DIR = os.path.dirname(pystache.__file__) -PROJECT_DIR = os.path.join(PACKAGE_DIR, '..') -# TEXT_DOCTEST_PATHS: the paths to text files (i.e. non-module files) -# containing doctests. The paths should be relative to the project directory. -TEXT_DOCTEST_PATHS = ['README.md'] - -UNITTEST_FILE_PREFIX = "test_" - - -def get_spec_test_dir(project_dir): - return os.path.join(project_dir, 'ext', 'spec', 'specs') - - -def html_escape(u): - """ - An html escape function that behaves the same in both Python 2 and 3. - - This function is needed because single quotes are escaped in Python 3 - (to '''), but not in Python 2. - - The global defaults.TAG_ESCAPE can be set to this function in the - setUp() and tearDown() of unittest test cases, for example, for - consistent test results. - - """ - u = _DEFAULT_TAG_ESCAPE(u) - return u.replace("'", ''') - - -def get_data_path(file_name=None): - """Return the path to a file in the test data directory.""" - if file_name is None: - file_name = "" - return os.path.join(DATA_DIR, file_name) - - -# Functions related to get_module_names(). - -def _find_files(root_dir, should_include): - """ - Return a list of paths to all modules below the given directory. - - Arguments: - - should_include: a function that accepts a file path and returns True or False. - - """ - paths = [] # Return value. - - is_module = lambda path: path.endswith(".py") - - # os.walk() is new in Python 2.3 - # http://docs.python.org/library/os.html#os.walk - for dir_path, dir_names, file_names in os.walk(root_dir): - new_paths = [os.path.join(dir_path, file_name) for file_name in file_names] - new_paths = filter(is_module, new_paths) - new_paths = filter(should_include, new_paths) - paths.extend(new_paths) - - return paths - - -def _make_module_names(package_dir, paths): - """ - Return a list of fully-qualified module names given a list of module paths. - - """ - package_dir = os.path.abspath(package_dir) - package_name = os.path.split(package_dir)[1] - - prefix_length = len(package_dir) - - module_names = [] - for path in paths: - path = os.path.abspath(path) # for example /subpackage/module.py - rel_path = path[prefix_length:] # for example /subpackage/module.py - rel_path = os.path.splitext(rel_path)[0] # for example /subpackage/module - - parts = [] - while True: - (rel_path, tail) = os.path.split(rel_path) - if not tail: - break - parts.insert(0, tail) - # We now have, for example, ['subpackage', 'module']. - parts.insert(0, package_name) - module = ".".join(parts) - module_names.append(module) - - return module_names - - -def get_module_names(package_dir=None, should_include=None): - """ - Return a list of fully-qualified module names in the given package. - - """ - if package_dir is None: - package_dir = PACKAGE_DIR - - if should_include is None: - should_include = lambda path: True - - paths = _find_files(package_dir, should_include) - names = _make_module_names(package_dir, paths) - names.sort() - - return names - - -class AssertStringMixin: - - """A unittest.TestCase mixin to check string equality.""" - - def assertString(self, actual, expected, format=None): - """ - Assert that the given strings are equal and have the same type. - - Arguments: - - format: a format string containing a single conversion specifier %s. - Defaults to "%s". - - """ - if format is None: - format = "%s" - - # Show both friendly and literal versions. - details = """String mismatch: %%s - - Expected: \"""%s\""" - Actual: \"""%s\""" - - Expected: %s - Actual: %s""" % (expected, actual, repr(expected), repr(actual)) - - def make_message(reason): - description = details % reason - return format % description - - self.assertEqual(actual, expected, make_message("different characters")) - - reason = "types different: %s != %s (actual)" % (repr(type(expected)), repr(type(actual))) - self.assertEqual(type(expected), type(actual), make_message(reason)) - - -class AssertIsMixin: - - """A unittest.TestCase mixin adding assertIs().""" - - # unittest.assertIs() is not available until Python 2.7: - # http://docs.python.org/library/unittest.html#unittest.TestCase.assertIsNone - def assertIs(self, first, second): - self.assertTrue(first is second, msg="%s is not %s" % (repr(first), repr(second))) - - -class AssertExceptionMixin: - - """A unittest.TestCase mixin adding assertException().""" - - # unittest.assertRaisesRegexp() is not available until Python 2.7: - # http://docs.python.org/library/unittest.html#unittest.TestCase.assertRaisesRegexp - def assertException(self, exception_type, msg, callable, *args, **kwds): - try: - callable(*args, **kwds) - raise Exception("Expected exception: %s: %s" % (exception_type, repr(msg))) - except exception_type, err: - self.assertEqual(str(err), msg) - - -class SetupDefaults(object): - - """ - Mix this class in to a unittest.TestCase for standard defaults. - - This class allows for consistent test results across Python 2/3. - - """ - - def setup_defaults(self): - self.original_decode_errors = defaults.DECODE_ERRORS - self.original_file_encoding = defaults.FILE_ENCODING - self.original_string_encoding = defaults.STRING_ENCODING - - defaults.DECODE_ERRORS = 'strict' - defaults.FILE_ENCODING = 'ascii' - defaults.STRING_ENCODING = 'ascii' - - def teardown_defaults(self): - defaults.DECODE_ERRORS = self.original_decode_errors - defaults.FILE_ENCODING = self.original_file_encoding - defaults.STRING_ENCODING = self.original_string_encoding - - -class Attachable(object): - """ - A class that attaches all constructor named parameters as attributes. - - For example-- - - >>> obj = Attachable(foo=42, size="of the universe") - >>> repr(obj) - "Attachable(foo=42, size='of the universe')" - >>> obj.foo - 42 - >>> obj.size - 'of the universe' - - """ - def __init__(self, **kwargs): - self.__args__ = kwargs - for arg, value in kwargs.iteritems(): - setattr(self, arg, value) - - def __repr__(self): - return "%s(%s)" % (self.__class__.__name__, - ", ".join("%s=%s" % (k, repr(v)) - for k, v in self.__args__.iteritems())) diff --git a/third_party/python/pystache/pystache/tests/data/__init__.py b/third_party/python/pystache/pystache/tests/data/__init__.py deleted file mode 100644 index a0d386a38c98..000000000000 --- a/third_party/python/pystache/pystache/tests/data/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -""" -TODO: add a docstring. - -""" diff --git a/third_party/python/pystache/pystache/tests/data/ascii.mustache b/third_party/python/pystache/pystache/tests/data/ascii.mustache deleted file mode 100644 index e86737ba001c..000000000000 --- a/third_party/python/pystache/pystache/tests/data/ascii.mustache +++ /dev/null @@ -1 +0,0 @@ -ascii: abc \ No newline at end of file diff --git a/third_party/python/pystache/pystache/tests/data/duplicate.mustache b/third_party/python/pystache/pystache/tests/data/duplicate.mustache deleted file mode 100644 index a0515e39151b..000000000000 --- a/third_party/python/pystache/pystache/tests/data/duplicate.mustache +++ /dev/null @@ -1 +0,0 @@ -This file is used to test locate_path()'s search order. \ No newline at end of file diff --git a/third_party/python/pystache/pystache/tests/data/locator/__init__.py b/third_party/python/pystache/pystache/tests/data/locator/__init__.py deleted file mode 100644 index a0d386a38c98..000000000000 --- a/third_party/python/pystache/pystache/tests/data/locator/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -""" -TODO: add a docstring. - -""" diff --git a/third_party/python/pystache/pystache/tests/data/locator/duplicate.mustache b/third_party/python/pystache/pystache/tests/data/locator/duplicate.mustache deleted file mode 100644 index a0515e39151b..000000000000 --- a/third_party/python/pystache/pystache/tests/data/locator/duplicate.mustache +++ /dev/null @@ -1 +0,0 @@ -This file is used to test locate_path()'s search order. \ No newline at end of file diff --git a/third_party/python/pystache/pystache/tests/data/locator/template.txt b/third_party/python/pystache/pystache/tests/data/locator/template.txt deleted file mode 100644 index bef81609d098..000000000000 --- a/third_party/python/pystache/pystache/tests/data/locator/template.txt +++ /dev/null @@ -1 +0,0 @@ -Test template file diff --git a/third_party/python/pystache/pystache/tests/data/non_ascii.mustache b/third_party/python/pystache/pystache/tests/data/non_ascii.mustache deleted file mode 100644 index bd69b61b4c7b..000000000000 --- a/third_party/python/pystache/pystache/tests/data/non_ascii.mustache +++ /dev/null @@ -1 +0,0 @@ -non-ascii: é \ No newline at end of file diff --git a/third_party/python/pystache/pystache/tests/data/sample_view.mustache b/third_party/python/pystache/pystache/tests/data/sample_view.mustache deleted file mode 100644 index e86737ba001c..000000000000 --- a/third_party/python/pystache/pystache/tests/data/sample_view.mustache +++ /dev/null @@ -1 +0,0 @@ -ascii: abc \ No newline at end of file diff --git a/third_party/python/pystache/pystache/tests/data/say_hello.mustache b/third_party/python/pystache/pystache/tests/data/say_hello.mustache deleted file mode 100644 index 84ab4c9921f9..000000000000 --- a/third_party/python/pystache/pystache/tests/data/say_hello.mustache +++ /dev/null @@ -1 +0,0 @@ -Hello, {{to}} \ No newline at end of file diff --git a/third_party/python/pystache/pystache/tests/data/views.py b/third_party/python/pystache/pystache/tests/data/views.py deleted file mode 100644 index 0b963093c85b..000000000000 --- a/third_party/python/pystache/pystache/tests/data/views.py +++ /dev/null @@ -1,21 +0,0 @@ -# coding: utf-8 - -""" -TODO: add a docstring. - -""" - -from pystache import TemplateSpec - -class SayHello(object): - - def to(self): - return "World" - - -class SampleView(TemplateSpec): - pass - - -class NonAscii(TemplateSpec): - pass diff --git a/third_party/python/pystache/pystache/tests/doctesting.py b/third_party/python/pystache/pystache/tests/doctesting.py deleted file mode 100644 index 1102b78e73a2..000000000000 --- a/third_party/python/pystache/pystache/tests/doctesting.py +++ /dev/null @@ -1,94 +0,0 @@ -# coding: utf-8 - -""" -Exposes a get_doctests() function for the project's test harness. - -""" - -import doctest -import os -import pkgutil -import sys -import traceback - -if sys.version_info >= (3,): - # Then pull in modules needed for 2to3 conversion. The modules - # below are not necessarily available in older versions of Python. - from lib2to3.main import main as lib2to3main # new in Python 2.6? - from shutil import copyfile - -from pystache.tests.common import TEXT_DOCTEST_PATHS -from pystache.tests.common import get_module_names - - -# This module follows the guidance documented here: -# -# http://docs.python.org/library/doctest.html#unittest-api -# - -def get_doctests(text_file_dir): - """ - Return a list of TestSuite instances for all doctests in the project. - - Arguments: - - text_file_dir: the directory in which to search for all text files - (i.e. non-module files) containing doctests. - - """ - # Since module_relative is False in our calls to DocFileSuite below, - # paths should be OS-specific. See the following for more info-- - # - # http://docs.python.org/library/doctest.html#doctest.DocFileSuite - # - paths = [os.path.normpath(os.path.join(text_file_dir, path)) for path in TEXT_DOCTEST_PATHS] - - if sys.version_info >= (3,): - # Skip the README doctests in Python 3 for now because examples - # rendering to unicode do not give consistent results - # (e.g. 'foo' vs u'foo'). - # paths = _convert_paths(paths) - paths = [] - - suites = [] - - for path in paths: - suite = doctest.DocFileSuite(path, module_relative=False) - suites.append(suite) - - modules = get_module_names() - for module in modules: - suite = doctest.DocTestSuite(module) - suites.append(suite) - - return suites - - -def _convert_2to3(path): - """ - Convert the given file, and return the path to the converted files. - - """ - base, ext = os.path.splitext(path) - # For example, "README.temp2to3.rst". - new_path = "%s.temp2to3%s" % (base, ext) - - copyfile(path, new_path) - - args = ['--doctests_only', '--no-diffs', '--write', '--nobackups', new_path] - lib2to3main("lib2to3.fixes", args=args) - - return new_path - - -def _convert_paths(paths): - """ - Convert the given files, and return the paths to the converted files. - - """ - new_paths = [] - for path in paths: - new_path = _convert_2to3(path) - new_paths.append(new_path) - - return new_paths diff --git a/third_party/python/pystache/pystache/tests/examples/__init__.py b/third_party/python/pystache/pystache/tests/examples/__init__.py deleted file mode 100644 index a0d386a38c98..000000000000 --- a/third_party/python/pystache/pystache/tests/examples/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -""" -TODO: add a docstring. - -""" diff --git a/third_party/python/pystache/pystache/tests/examples/comments.mustache b/third_party/python/pystache/pystache/tests/examples/comments.mustache deleted file mode 100644 index 2a2a08b33a35..000000000000 --- a/third_party/python/pystache/pystache/tests/examples/comments.mustache +++ /dev/null @@ -1 +0,0 @@ -

    {{title}}{{! just something interesting... #or not... }}

    \ No newline at end of file diff --git a/third_party/python/pystache/pystache/tests/examples/comments.py b/third_party/python/pystache/pystache/tests/examples/comments.py deleted file mode 100644 index 8d75f8835dd6..000000000000 --- a/third_party/python/pystache/pystache/tests/examples/comments.py +++ /dev/null @@ -1,10 +0,0 @@ - -""" -TODO: add a docstring. - -""" - -class Comments(object): - - def title(self): - return "A Comedy of Errors" diff --git a/third_party/python/pystache/pystache/tests/examples/complex.mustache b/third_party/python/pystache/pystache/tests/examples/complex.mustache deleted file mode 100644 index 6de758ba177d..000000000000 --- a/third_party/python/pystache/pystache/tests/examples/complex.mustache +++ /dev/null @@ -1,6 +0,0 @@ -

    {{ header }}

    -{{#list}} -
      -{{#item}}{{# current }}
    • {{name}}
    • -{{/ current }}{{#link}}
    • {{name}}
    • -{{/link}}{{/item}}
    {{/list}}{{#empty}}

    The list is empty.

    {{/empty}} \ No newline at end of file diff --git a/third_party/python/pystache/pystache/tests/examples/complex.py b/third_party/python/pystache/pystache/tests/examples/complex.py deleted file mode 100644 index c653db0cb7c1..000000000000 --- a/third_party/python/pystache/pystache/tests/examples/complex.py +++ /dev/null @@ -1,26 +0,0 @@ - -""" -TODO: add a docstring. - -""" - -class Complex(object): - - def header(self): - return "Colors" - - def item(self): - items = [] - items.append({ 'name': 'red', 'current': True, 'url': '#Red' }) - items.append({ 'name': 'green', 'link': True, 'url': '#Green' }) - items.append({ 'name': 'blue', 'link': True, 'url': '#Blue' }) - return items - - def list(self): - return not self.empty() - - def empty(self): - return len(self.item()) == 0 - - def empty_list(self): - return []; diff --git a/third_party/python/pystache/pystache/tests/examples/delimiters.mustache b/third_party/python/pystache/pystache/tests/examples/delimiters.mustache deleted file mode 100644 index 92bea6d2059c..000000000000 --- a/third_party/python/pystache/pystache/tests/examples/delimiters.mustache +++ /dev/null @@ -1,6 +0,0 @@ -{{=<% %>=}} -* <% first %> -<%=| |=%> -* | second | -|={{ }}=| -* {{ third }} diff --git a/third_party/python/pystache/pystache/tests/examples/delimiters.py b/third_party/python/pystache/pystache/tests/examples/delimiters.py deleted file mode 100644 index a31ec1b300e7..000000000000 --- a/third_party/python/pystache/pystache/tests/examples/delimiters.py +++ /dev/null @@ -1,16 +0,0 @@ - -""" -TODO: add a docstring. - -""" - -class Delimiters(object): - - def first(self): - return "It worked the first time." - - def second(self): - return "And it worked the second time." - - def third(self): - return "Then, surprisingly, it worked the third time." diff --git a/third_party/python/pystache/pystache/tests/examples/double_section.mustache b/third_party/python/pystache/pystache/tests/examples/double_section.mustache deleted file mode 100644 index 61f1917ac6e3..000000000000 --- a/third_party/python/pystache/pystache/tests/examples/double_section.mustache +++ /dev/null @@ -1,3 +0,0 @@ -{{#t}}* first{{/t}} -* {{two}} -{{#t}}* third{{/t}} \ No newline at end of file diff --git a/third_party/python/pystache/pystache/tests/examples/double_section.py b/third_party/python/pystache/pystache/tests/examples/double_section.py deleted file mode 100644 index c9736e413b18..000000000000 --- a/third_party/python/pystache/pystache/tests/examples/double_section.py +++ /dev/null @@ -1,13 +0,0 @@ - -""" -TODO: add a docstring. - -""" - -class DoubleSection(object): - - def t(self): - return True - - def two(self): - return "second" diff --git a/third_party/python/pystache/pystache/tests/examples/escaped.mustache b/third_party/python/pystache/pystache/tests/examples/escaped.mustache deleted file mode 100644 index 8be4ccb62a2c..000000000000 --- a/third_party/python/pystache/pystache/tests/examples/escaped.mustache +++ /dev/null @@ -1 +0,0 @@ -

    {{title}}

    \ No newline at end of file diff --git a/third_party/python/pystache/pystache/tests/examples/escaped.py b/third_party/python/pystache/pystache/tests/examples/escaped.py deleted file mode 100644 index 5d72dde3fdc8..000000000000 --- a/third_party/python/pystache/pystache/tests/examples/escaped.py +++ /dev/null @@ -1,10 +0,0 @@ - -""" -TODO: add a docstring. - -""" - -class Escaped(object): - - def title(self): - return "Bear > Shark" diff --git a/third_party/python/pystache/pystache/tests/examples/inner_partial.mustache b/third_party/python/pystache/pystache/tests/examples/inner_partial.mustache deleted file mode 100644 index 2863764e4bd5..000000000000 --- a/third_party/python/pystache/pystache/tests/examples/inner_partial.mustache +++ /dev/null @@ -1 +0,0 @@ -Again, {{title}}! \ No newline at end of file diff --git a/third_party/python/pystache/pystache/tests/examples/inner_partial.txt b/third_party/python/pystache/pystache/tests/examples/inner_partial.txt deleted file mode 100644 index 650c9598870e..000000000000 --- a/third_party/python/pystache/pystache/tests/examples/inner_partial.txt +++ /dev/null @@ -1 +0,0 @@ -## Again, {{title}}! ## \ No newline at end of file diff --git a/third_party/python/pystache/pystache/tests/examples/inverted.mustache b/third_party/python/pystache/pystache/tests/examples/inverted.mustache deleted file mode 100644 index fbea98d4303e..000000000000 --- a/third_party/python/pystache/pystache/tests/examples/inverted.mustache +++ /dev/null @@ -1 +0,0 @@ -{{^f}}one{{/f}}, {{ two }}, {{^f}}three{{/f}}{{^t}}, four!{{/t}}{{^empty_list}}, empty list{{/empty_list}}{{^populated_list}}, shouldn't see me{{/populated_list}} \ No newline at end of file diff --git a/third_party/python/pystache/pystache/tests/examples/inverted.py b/third_party/python/pystache/pystache/tests/examples/inverted.py deleted file mode 100644 index 12212b49a562..000000000000 --- a/third_party/python/pystache/pystache/tests/examples/inverted.py +++ /dev/null @@ -1,33 +0,0 @@ - -""" -TODO: add a docstring. - -""" - -from pystache import TemplateSpec - -class Inverted(object): - - def t(self): - return True - - def f(self): - return False - - def two(self): - return 'two' - - def empty_list(self): - return [] - - def populated_list(self): - return ['some_value'] - -class InvertedLists(Inverted, TemplateSpec): - template_name = 'inverted' - - def t(self): - return [0, 1, 2] - - def f(self): - return [] diff --git a/third_party/python/pystache/pystache/tests/examples/lambdas.mustache b/third_party/python/pystache/pystache/tests/examples/lambdas.mustache deleted file mode 100644 index 9dffca594abb..000000000000 --- a/third_party/python/pystache/pystache/tests/examples/lambdas.mustache +++ /dev/null @@ -1 +0,0 @@ -{{#replace_foo_with_bar}}foo != bar. oh, it does!{{/replace_foo_with_bar}} \ No newline at end of file diff --git a/third_party/python/pystache/pystache/tests/examples/lambdas.py b/third_party/python/pystache/pystache/tests/examples/lambdas.py deleted file mode 100644 index 3bc08ff3cf15..000000000000 --- a/third_party/python/pystache/pystache/tests/examples/lambdas.py +++ /dev/null @@ -1,38 +0,0 @@ - -""" -TODO: add a docstring. - -""" - -from pystache import TemplateSpec - -def rot(s, n=13): - r = "" - for c in s: - cc = c - if cc.isalpha(): - cc = cc.lower() - o = ord(cc) - ro = (o+n) % 122 - if ro == 0: ro = 122 - if ro < 97: ro += 96 - cc = chr(ro) - r = ''.join((r,cc)) - return r - -def replace(subject, this='foo', with_this='bar'): - return subject.replace(this, with_this) - - -# This class subclasses TemplateSpec because at least one unit test -# sets the template attribute. -class Lambdas(TemplateSpec): - - def replace_foo_with_bar(self, text=None): - return replace - - def rot13(self, text=None): - return rot - - def sort(self, text=None): - return lambda text: ''.join(sorted(text)) diff --git a/third_party/python/pystache/pystache/tests/examples/looping_partial.mustache b/third_party/python/pystache/pystache/tests/examples/looping_partial.mustache deleted file mode 100644 index 577f736e81c4..000000000000 --- a/third_party/python/pystache/pystache/tests/examples/looping_partial.mustache +++ /dev/null @@ -1 +0,0 @@ -Looping partial {{item}}! \ No newline at end of file diff --git a/third_party/python/pystache/pystache/tests/examples/nested_context.mustache b/third_party/python/pystache/pystache/tests/examples/nested_context.mustache deleted file mode 100644 index ce570d693d9e..000000000000 --- a/third_party/python/pystache/pystache/tests/examples/nested_context.mustache +++ /dev/null @@ -1 +0,0 @@ -{{#foo}}{{thing1}} and {{thing2}} and {{outer_thing}}{{/foo}}{{^foo}}Not foo!{{/foo}} \ No newline at end of file diff --git a/third_party/python/pystache/pystache/tests/examples/nested_context.py b/third_party/python/pystache/pystache/tests/examples/nested_context.py deleted file mode 100644 index a2661b9c42b3..000000000000 --- a/third_party/python/pystache/pystache/tests/examples/nested_context.py +++ /dev/null @@ -1,32 +0,0 @@ - -""" -TODO: add a docstring. - -""" - -from pystache import TemplateSpec - -class NestedContext(TemplateSpec): - - def __init__(self, renderer): - self.renderer = renderer - - def _context_get(self, key): - return self.renderer.context.get(key) - - def outer_thing(self): - return "two" - - def foo(self): - return {'thing1': 'one', 'thing2': 'foo'} - - def derp(self): - return [{'inner': 'car'}] - - def herp(self): - return [{'outer': 'car'}] - - def nested_context_in_view(self): - if self._context_get('outer') == self._context_get('inner'): - return 'it works!' - return '' diff --git a/third_party/python/pystache/pystache/tests/examples/partial_in_partial.mustache b/third_party/python/pystache/pystache/tests/examples/partial_in_partial.mustache deleted file mode 100644 index c61ceb1d0075..000000000000 --- a/third_party/python/pystache/pystache/tests/examples/partial_in_partial.mustache +++ /dev/null @@ -1 +0,0 @@ -{{>simple}} \ No newline at end of file diff --git a/third_party/python/pystache/pystache/tests/examples/partial_with_lambda.mustache b/third_party/python/pystache/pystache/tests/examples/partial_with_lambda.mustache deleted file mode 100644 index 2989f56c7b5e..000000000000 --- a/third_party/python/pystache/pystache/tests/examples/partial_with_lambda.mustache +++ /dev/null @@ -1 +0,0 @@ -{{#rot13}}abcdefghijklm{{/rot13}} \ No newline at end of file diff --git a/third_party/python/pystache/pystache/tests/examples/partial_with_partial_and_lambda.mustache b/third_party/python/pystache/pystache/tests/examples/partial_with_partial_and_lambda.mustache deleted file mode 100644 index 0729e10fb5b2..000000000000 --- a/third_party/python/pystache/pystache/tests/examples/partial_with_partial_and_lambda.mustache +++ /dev/null @@ -1 +0,0 @@ -{{>partial_with_lambda}}{{#rot13}}abcdefghijklm{{/rot13}} \ No newline at end of file diff --git a/third_party/python/pystache/pystache/tests/examples/partials_with_lambdas.py b/third_party/python/pystache/pystache/tests/examples/partials_with_lambdas.py deleted file mode 100644 index 638aa3631321..000000000000 --- a/third_party/python/pystache/pystache/tests/examples/partials_with_lambdas.py +++ /dev/null @@ -1,12 +0,0 @@ - -""" -TODO: add a docstring. - -""" - -from pystache.tests.examples.lambdas import rot - -class PartialsWithLambdas(object): - - def rot(self): - return rot diff --git a/third_party/python/pystache/pystache/tests/examples/readme.py b/third_party/python/pystache/pystache/tests/examples/readme.py deleted file mode 100644 index 8dcee434a59a..000000000000 --- a/third_party/python/pystache/pystache/tests/examples/readme.py +++ /dev/null @@ -1,9 +0,0 @@ - -""" -TODO: add a docstring. - -""" - -class SayHello(object): - def to(self): - return "Pizza" diff --git a/third_party/python/pystache/pystache/tests/examples/say_hello.mustache b/third_party/python/pystache/pystache/tests/examples/say_hello.mustache deleted file mode 100644 index 7d8dfea5adda..000000000000 --- a/third_party/python/pystache/pystache/tests/examples/say_hello.mustache +++ /dev/null @@ -1 +0,0 @@ -Hello, {{to}}! \ No newline at end of file diff --git a/third_party/python/pystache/pystache/tests/examples/simple.mustache b/third_party/python/pystache/pystache/tests/examples/simple.mustache deleted file mode 100644 index 9214dabde71b..000000000000 --- a/third_party/python/pystache/pystache/tests/examples/simple.mustache +++ /dev/null @@ -1 +0,0 @@ -Hi {{thing}}!{{blank}} \ No newline at end of file diff --git a/third_party/python/pystache/pystache/tests/examples/simple.py b/third_party/python/pystache/pystache/tests/examples/simple.py deleted file mode 100644 index ea82e9d00d5c..000000000000 --- a/third_party/python/pystache/pystache/tests/examples/simple.py +++ /dev/null @@ -1,15 +0,0 @@ - -""" -TODO: add a docstring. - -""" - -from pystache import TemplateSpec - -class Simple(TemplateSpec): - - def thing(self): - return "pizza" - - def blank(self): - return '' diff --git a/third_party/python/pystache/pystache/tests/examples/tagless.mustache b/third_party/python/pystache/pystache/tests/examples/tagless.mustache deleted file mode 100644 index ad4dd3179ac6..000000000000 --- a/third_party/python/pystache/pystache/tests/examples/tagless.mustache +++ /dev/null @@ -1 +0,0 @@ -No tags... \ No newline at end of file diff --git a/third_party/python/pystache/pystache/tests/examples/template_partial.mustache b/third_party/python/pystache/pystache/tests/examples/template_partial.mustache deleted file mode 100644 index 03f76cfa8254..000000000000 --- a/third_party/python/pystache/pystache/tests/examples/template_partial.mustache +++ /dev/null @@ -1,2 +0,0 @@ -

    {{title}}

    -{{>inner_partial}} \ No newline at end of file diff --git a/third_party/python/pystache/pystache/tests/examples/template_partial.py b/third_party/python/pystache/pystache/tests/examples/template_partial.py deleted file mode 100644 index 1c4d1a012582..000000000000 --- a/third_party/python/pystache/pystache/tests/examples/template_partial.py +++ /dev/null @@ -1,27 +0,0 @@ - -""" -TODO: add a docstring. - -""" - -from pystache import TemplateSpec - -class TemplatePartial(TemplateSpec): - - def __init__(self, renderer): - self.renderer = renderer - - def _context_get(self, key): - return self.renderer.context.get(key) - - def title(self): - return "Welcome" - - def title_bars(self): - return '-' * len(self.title()) - - def looping(self): - return [{'item': 'one'}, {'item': 'two'}, {'item': 'three'}] - - def thing(self): - return self._context_get('prop') diff --git a/third_party/python/pystache/pystache/tests/examples/template_partial.txt b/third_party/python/pystache/pystache/tests/examples/template_partial.txt deleted file mode 100644 index d9b5f6efb227..000000000000 --- a/third_party/python/pystache/pystache/tests/examples/template_partial.txt +++ /dev/null @@ -1,4 +0,0 @@ -{{title}} -{{title_bars}} - -{{>inner_partial}} diff --git a/third_party/python/pystache/pystache/tests/examples/unescaped.mustache b/third_party/python/pystache/pystache/tests/examples/unescaped.mustache deleted file mode 100644 index 9982708a721e..000000000000 --- a/third_party/python/pystache/pystache/tests/examples/unescaped.mustache +++ /dev/null @@ -1 +0,0 @@ -

    {{{title}}}

    \ No newline at end of file diff --git a/third_party/python/pystache/pystache/tests/examples/unescaped.py b/third_party/python/pystache/pystache/tests/examples/unescaped.py deleted file mode 100644 index 92889af4dabe..000000000000 --- a/third_party/python/pystache/pystache/tests/examples/unescaped.py +++ /dev/null @@ -1,10 +0,0 @@ - -""" -TODO: add a docstring. - -""" - -class Unescaped(object): - - def title(self): - return "Bear > Shark" diff --git a/third_party/python/pystache/pystache/tests/examples/unicode_input.mustache b/third_party/python/pystache/pystache/tests/examples/unicode_input.mustache deleted file mode 100644 index f654cd15a4c3..000000000000 --- a/third_party/python/pystache/pystache/tests/examples/unicode_input.mustache +++ /dev/null @@ -1 +0,0 @@ -abcdé \ No newline at end of file diff --git a/third_party/python/pystache/pystache/tests/examples/unicode_input.py b/third_party/python/pystache/pystache/tests/examples/unicode_input.py deleted file mode 100644 index d0457575e73e..000000000000 --- a/third_party/python/pystache/pystache/tests/examples/unicode_input.py +++ /dev/null @@ -1,14 +0,0 @@ - -""" -TODO: add a docstring. - -""" - -from pystache import TemplateSpec - -class UnicodeInput(TemplateSpec): - - template_encoding = 'utf8' - - def age(self): - return 156 diff --git a/third_party/python/pystache/pystache/tests/examples/unicode_output.mustache b/third_party/python/pystache/pystache/tests/examples/unicode_output.mustache deleted file mode 100644 index 8495f561928e..000000000000 --- a/third_party/python/pystache/pystache/tests/examples/unicode_output.mustache +++ /dev/null @@ -1 +0,0 @@ -

    Name: {{name}}

    \ No newline at end of file diff --git a/third_party/python/pystache/pystache/tests/examples/unicode_output.py b/third_party/python/pystache/pystache/tests/examples/unicode_output.py deleted file mode 100644 index da0e1d2e6ff8..000000000000 --- a/third_party/python/pystache/pystache/tests/examples/unicode_output.py +++ /dev/null @@ -1,11 +0,0 @@ -# encoding: utf-8 - -""" -TODO: add a docstring. - -""" - -class UnicodeOutput(object): - - def name(self): - return u'Henri Poincaré' diff --git a/third_party/python/pystache/pystache/tests/main.py b/third_party/python/pystache/pystache/tests/main.py deleted file mode 100644 index 8af6b2ee0cdb..000000000000 --- a/third_party/python/pystache/pystache/tests/main.py +++ /dev/null @@ -1,190 +0,0 @@ -# coding: utf-8 - -""" -Exposes a main() function that runs all tests in the project. - -This module is for our test console script. - -""" - -import os -import sys -import unittest -from unittest import TestCase, TestProgram - -import pystache -from pystache.tests.common import PACKAGE_DIR, PROJECT_DIR, UNITTEST_FILE_PREFIX -from pystache.tests.common import get_module_names, get_spec_test_dir -from pystache.tests.doctesting import get_doctests -from pystache.tests.spectesting import get_spec_tests - - -# If this command option is present, then the spec test and doctest directories -# will be inserted if not provided. -FROM_SOURCE_OPTION = "--from-source" - - -def make_extra_tests(text_doctest_dir, spec_test_dir): - tests = [] - - if text_doctest_dir is not None: - doctest_suites = get_doctests(text_doctest_dir) - tests.extend(doctest_suites) - - if spec_test_dir is not None: - spec_testcases = get_spec_tests(spec_test_dir) - tests.extend(spec_testcases) - - return unittest.TestSuite(tests) - - -def make_test_program_class(extra_tests): - """ - Return a subclass of unittest.TestProgram. - - """ - # The function unittest.main() is an alias for unittest.TestProgram's - # constructor. TestProgram's constructor does the following: - # - # 1. calls self.parseArgs(argv), - # 2. which in turn calls self.createTests(). - # 3. then the constructor calls self.runTests(). - # - # The createTests() method sets the self.test attribute by calling one - # of self.testLoader's "loadTests" methods. Each loadTest method returns - # a unittest.TestSuite instance. Thus, self.test is set to a TestSuite - # instance prior to calling runTests(). - class PystacheTestProgram(TestProgram): - - """ - Instantiating an instance of this class runs all tests. - - """ - - def createTests(self): - """ - Load tests and set self.test to a unittest.TestSuite instance - - Compare-- - - http://docs.python.org/library/unittest.html#unittest.TestSuite - - """ - super(PystacheTestProgram, self).createTests() - self.test.addTests(extra_tests) - - return PystacheTestProgram - - -# Do not include "test" in this function's name to avoid it getting -# picked up by nosetests. -def main(sys_argv): - """ - Run all tests in the project. - - Arguments: - - sys_argv: a reference to sys.argv. - - """ - # TODO: use logging module - print "pystache: running tests: argv: %s" % repr(sys_argv) - - should_source_exist = False - spec_test_dir = None - project_dir = None - - if len(sys_argv) > 1 and sys_argv[1] == FROM_SOURCE_OPTION: - # This usually means the test_pystache.py convenience script - # in the source directory was run. - should_source_exist = True - sys_argv.pop(1) - - try: - # TODO: use optparse command options instead. - project_dir = sys_argv[1] - sys_argv.pop(1) - except IndexError: - if should_source_exist: - project_dir = PROJECT_DIR - - try: - # TODO: use optparse command options instead. - spec_test_dir = sys_argv[1] - sys_argv.pop(1) - except IndexError: - if project_dir is not None: - # Then auto-detect the spec test directory. - _spec_test_dir = get_spec_test_dir(project_dir) - if not os.path.exists(_spec_test_dir): - # Then the user is probably using a downloaded sdist rather - # than a repository clone (since the sdist does not include - # the spec test directory). - print("pystache: skipping spec tests: spec test directory " - "not found") - else: - spec_test_dir = _spec_test_dir - - if len(sys_argv) <= 1 or sys_argv[-1].startswith("-"): - # Then no explicit module or test names were provided, so - # auto-detect all unit tests. - module_names = _discover_test_modules(PACKAGE_DIR) - sys_argv.extend(module_names) - if project_dir is not None: - # Add the current module for unit tests contained here (e.g. - # to include SetupTests). - sys_argv.append(__name__) - - SetupTests.project_dir = project_dir - - extra_tests = make_extra_tests(project_dir, spec_test_dir) - test_program_class = make_test_program_class(extra_tests) - - # We pass None for the module because we do not want the unittest - # module to resolve module names relative to a given module. - # (This would require importing all of the unittest modules from - # this module.) See the loadTestsFromName() method of the - # unittest.TestLoader class for more details on this parameter. - test_program_class(argv=sys_argv, module=None) - # No need to return since unitttest.main() exits. - - -def _discover_test_modules(package_dir): - """ - Discover and return a sorted list of the names of unit-test modules. - - """ - def is_unittest_module(path): - file_name = os.path.basename(path) - return file_name.startswith(UNITTEST_FILE_PREFIX) - - names = get_module_names(package_dir=package_dir, should_include=is_unittest_module) - - # This is a sanity check to ensure that the unit-test discovery - # methods are working. - if len(names) < 1: - raise Exception("No unit-test modules found--\n in %s" % package_dir) - - return names - - -class SetupTests(TestCase): - - """Tests about setup.py.""" - - project_dir = None - - def test_version(self): - """ - Test that setup.py's version matches the package's version. - - """ - original_path = list(sys.path) - - sys.path.insert(0, self.project_dir) - - try: - from setup import VERSION - self.assertEqual(VERSION, pystache.__version__) - finally: - sys.path = original_path diff --git a/third_party/python/pystache/pystache/tests/spectesting.py b/third_party/python/pystache/pystache/tests/spectesting.py deleted file mode 100644 index ec8a08df928a..000000000000 --- a/third_party/python/pystache/pystache/tests/spectesting.py +++ /dev/null @@ -1,285 +0,0 @@ -# coding: utf-8 - -""" -Exposes a get_spec_tests() function for the project's test harness. - -Creates a unittest.TestCase for the tests defined in the mustache spec. - -""" - -# TODO: this module can be cleaned up somewhat. -# TODO: move all of this code to pystache/tests/spectesting.py and -# have it expose a get_spec_tests(spec_test_dir) function. - -FILE_ENCODING = 'utf-8' # the encoding of the spec test files. - -yaml = None - -try: - # We try yaml first since it is more convenient when adding and modifying - # test cases by hand (since the YAML is human-readable and is the master - # from which the JSON format is generated). - import yaml -except ImportError: - try: - import json - except: - # The module json is not available prior to Python 2.6, whereas - # simplejson is. The simplejson package dropped support for Python 2.4 - # in simplejson v2.1.0, so Python 2.4 requires a simplejson install - # older than the most recent version. - try: - import simplejson as json - except ImportError: - # Raise an error with a type different from ImportError as a hack around - # this issue: - # http://bugs.python.org/issue7559 - from sys import exc_info - ex_type, ex_value, tb = exc_info() - new_ex = Exception("%s: %s" % (ex_type.__name__, ex_value)) - raise new_ex.__class__, new_ex, tb - file_extension = 'json' - parser = json -else: - file_extension = 'yml' - parser = yaml - - -import codecs -import glob -import os.path -import unittest - -import pystache -from pystache import common -from pystache.renderer import Renderer -from pystache.tests.common import AssertStringMixin - - -def get_spec_tests(spec_test_dir): - """ - Return a list of unittest.TestCase instances. - - """ - # TODO: use logging module instead. - print "pystache: spec tests: using %s" % _get_parser_info() - - cases = [] - - # Make this absolute for easier diagnosis in case of error. - spec_test_dir = os.path.abspath(spec_test_dir) - spec_paths = glob.glob(os.path.join(spec_test_dir, '*.%s' % file_extension)) - - for path in spec_paths: - new_cases = _read_spec_tests(path) - cases.extend(new_cases) - - # Store this as a value so that CheckSpecTestsFound is not checking - # a reference to cases that contains itself. - spec_test_count = len(cases) - - # This test case lets us alert the user that spec tests are missing. - class CheckSpecTestsFound(unittest.TestCase): - - def runTest(self): - if spec_test_count > 0: - return - raise Exception("Spec tests not found--\n in %s\n" - " Consult the README file on how to add the Mustache spec tests." % repr(spec_test_dir)) - - case = CheckSpecTestsFound() - cases.append(case) - - return cases - - -def _get_parser_info(): - return "%s (version %s)" % (parser.__name__, parser.__version__) - - -def _read_spec_tests(path): - """ - Return a list of unittest.TestCase instances. - - """ - b = common.read(path) - u = unicode(b, encoding=FILE_ENCODING) - spec_data = parse(u) - tests = spec_data['tests'] - - cases = [] - for data in tests: - case = _deserialize_spec_test(data, path) - cases.append(case) - - return cases - - -# TODO: simplify the implementation of this function. -def _convert_children(node): - """ - Recursively convert to functions all "code strings" below the node. - - This function is needed only for the json format. - - """ - if not isinstance(node, (list, dict)): - # Then there is nothing to iterate over and recurse. - return - - if isinstance(node, list): - for child in node: - _convert_children(child) - return - # Otherwise, node is a dict, so attempt the conversion. - - for key in node.keys(): - val = node[key] - - if not isinstance(val, dict) or val.get('__tag__') != 'code': - _convert_children(val) - continue - # Otherwise, we are at a "leaf" node. - - val = eval(val['python']) - node[key] = val - continue - - -def _deserialize_spec_test(data, file_path): - """ - Return a unittest.TestCase instance representing a spec test. - - Arguments: - - data: the dictionary of attributes for a single test. - - """ - context = data['data'] - description = data['desc'] - # PyYAML seems to leave ASCII strings as byte strings. - expected = unicode(data['expected']) - # TODO: switch to using dict.get(). - partials = data.has_key('partials') and data['partials'] or {} - template = data['template'] - test_name = data['name'] - - _convert_children(context) - - test_case = _make_spec_test(expected, template, context, partials, description, test_name, file_path) - - return test_case - - -def _make_spec_test(expected, template, context, partials, description, test_name, file_path): - """ - Return a unittest.TestCase instance representing a spec test. - - """ - file_name = os.path.basename(file_path) - test_method_name = "Mustache spec (%s): %s" % (file_name, repr(test_name)) - - # We subclass SpecTestBase in order to control the test method name (for - # the purposes of improved reporting). - class SpecTest(SpecTestBase): - pass - - def run_test(self): - self._runTest() - - # TODO: should we restore this logic somewhere? - # If we don't convert unicode to str, we get the following error: - # "TypeError: __name__ must be set to a string object" - # test.__name__ = str(name) - setattr(SpecTest, test_method_name, run_test) - case = SpecTest(test_method_name) - - case._context = context - case._description = description - case._expected = expected - case._file_path = file_path - case._partials = partials - case._template = template - case._test_name = test_name - - return case - - -def parse(u): - """ - Parse the contents of a spec test file, and return a dict. - - Arguments: - - u: a unicode string. - - """ - # TODO: find a cleaner mechanism for choosing between the two. - if yaml is None: - # Then use json. - - # The only way to get the simplejson module to return unicode strings - # is to pass it unicode. See, for example-- - # - # http://code.google.com/p/simplejson/issues/detail?id=40 - # - # and the documentation of simplejson.loads(): - # - # "If s is a str then decoded JSON strings that contain only ASCII - # characters may be parsed as str for performance and memory reasons. - # If your code expects only unicode the appropriate solution is - # decode s to unicode prior to calling loads." - # - return json.loads(u) - # Otherwise, yaml. - - def code_constructor(loader, node): - value = loader.construct_mapping(node) - return eval(value['python'], {}) - - yaml.add_constructor(u'!code', code_constructor) - return yaml.load(u) - - -class SpecTestBase(unittest.TestCase, AssertStringMixin): - - def _runTest(self): - context = self._context - description = self._description - expected = self._expected - file_path = self._file_path - partials = self._partials - template = self._template - test_name = self._test_name - - renderer = Renderer(partials=partials) - actual = renderer.render(template, context) - - # We need to escape the strings that occur in our format string because - # they can contain % symbols, for example (in delimiters.yml)-- - # - # "template: '{{=<% %>=}}(<%text%>)'" - # - def escape(s): - return s.replace("%", "%%") - - parser_info = _get_parser_info() - subs = [repr(test_name), description, os.path.abspath(file_path), - template, repr(context), parser_info] - subs = tuple([escape(sub) for sub in subs]) - # We include the parsing module version info to help with troubleshooting - # yaml/json/simplejson issues. - message = """%s: %s - - File: %s - - Template: \"""%s\""" - - Context: %s - - %%s - - [using %s] - """ % subs - - self.assertString(actual, expected, format=message) diff --git a/third_party/python/pystache/pystache/tests/test___init__.py b/third_party/python/pystache/pystache/tests/test___init__.py deleted file mode 100644 index eae42c1a4b2a..000000000000 --- a/third_party/python/pystache/pystache/tests/test___init__.py +++ /dev/null @@ -1,36 +0,0 @@ -# coding: utf-8 - -""" -Tests of __init__.py. - -""" - -# Calling "import *" is allowed only at the module level. -GLOBALS_INITIAL = globals().keys() -from pystache import * -GLOBALS_PYSTACHE_IMPORTED = globals().keys() - -import unittest - -import pystache - - -class InitTests(unittest.TestCase): - - def test___all__(self): - """ - Test that "from pystache import *" works as expected. - - """ - actual = set(GLOBALS_PYSTACHE_IMPORTED) - set(GLOBALS_INITIAL) - expected = set(['parse', 'render', 'Renderer', 'TemplateSpec', 'GLOBALS_INITIAL']) - - self.assertEqual(actual, expected) - - def test_version_defined(self): - """ - Test that pystache.__version__ is set. - - """ - actual_version = pystache.__version__ - self.assertTrue(actual_version) diff --git a/third_party/python/pystache/pystache/tests/test_commands.py b/third_party/python/pystache/pystache/tests/test_commands.py deleted file mode 100644 index 2529d2578503..000000000000 --- a/third_party/python/pystache/pystache/tests/test_commands.py +++ /dev/null @@ -1,45 +0,0 @@ -# coding: utf-8 - -""" -Unit tests of commands.py. - -""" - -import sys -import unittest - -from pystache.commands.render import main - - -ORIGINAL_STDOUT = sys.stdout - - -class MockStdout(object): - - def __init__(self): - self.output = "" - - def write(self, str): - self.output += str - - -class CommandsTestCase(unittest.TestCase): - - def setUp(self): - sys.stdout = MockStdout() - - def callScript(self, template, context): - argv = ['pystache', template, context] - main(argv) - return sys.stdout.output - - def testMainSimple(self): - """ - Test a simple command-line case. - - """ - actual = self.callScript("Hi {{thing}}", '{"thing": "world"}') - self.assertEqual(actual, u"Hi world\n") - - def tearDown(self): - sys.stdout = ORIGINAL_STDOUT diff --git a/third_party/python/pystache/pystache/tests/test_context.py b/third_party/python/pystache/pystache/tests/test_context.py deleted file mode 100644 index 238e4b004dfc..000000000000 --- a/third_party/python/pystache/pystache/tests/test_context.py +++ /dev/null @@ -1,499 +0,0 @@ -# coding: utf-8 - -""" -Unit tests of context.py. - -""" - -from datetime import datetime -import unittest - -from pystache.context import _NOT_FOUND, _get_value, KeyNotFoundError, ContextStack -from pystache.tests.common import AssertIsMixin, AssertStringMixin, AssertExceptionMixin, Attachable - -class SimpleObject(object): - - """A sample class that does not define __getitem__().""" - - def __init__(self): - self.foo = "bar" - - def foo_callable(self): - return "called..." - - -class DictLike(object): - - """A sample class that implements __getitem__() and __contains__().""" - - def __init__(self): - self._dict = {'foo': 'bar'} - self.fuzz = 'buzz' - - def __contains__(self, key): - return key in self._dict - - def __getitem__(self, key): - return self._dict[key] - - -class GetValueTestCase(unittest.TestCase, AssertIsMixin): - - """Test context._get_value().""" - - def assertNotFound(self, item, key): - """ - Assert that a call to _get_value() returns _NOT_FOUND. - - """ - self.assertIs(_get_value(item, key), _NOT_FOUND) - - ### Case: the item is a dictionary. - - def test_dictionary__key_present(self): - """ - Test getting a key from a dictionary. - - """ - item = {"foo": "bar"} - self.assertEqual(_get_value(item, "foo"), "bar") - - def test_dictionary__callable_not_called(self): - """ - Test that callable values are returned as-is (and in particular not called). - - """ - def foo_callable(self): - return "bar" - - item = {"foo": foo_callable} - self.assertNotEqual(_get_value(item, "foo"), "bar") - self.assertTrue(_get_value(item, "foo") is foo_callable) - - def test_dictionary__key_missing(self): - """ - Test getting a missing key from a dictionary. - - """ - item = {} - self.assertNotFound(item, "missing") - - def test_dictionary__attributes_not_checked(self): - """ - Test that dictionary attributes are not checked. - - """ - item = {1: 2, 3: 4} - # I was not able to find a "public" attribute of dict that is - # the same across Python 2/3. - attr_name = "__len__" - self.assertEqual(getattr(item, attr_name)(), 2) - self.assertNotFound(item, attr_name) - - def test_dictionary__dict_subclass(self): - """ - Test that subclasses of dict are treated as dictionaries. - - """ - class DictSubclass(dict): pass - - item = DictSubclass() - item["foo"] = "bar" - - self.assertEqual(_get_value(item, "foo"), "bar") - - ### Case: the item is an object. - - def test_object__attribute_present(self): - """ - Test getting an attribute from an object. - - """ - item = SimpleObject() - self.assertEqual(_get_value(item, "foo"), "bar") - - def test_object__attribute_missing(self): - """ - Test getting a missing attribute from an object. - - """ - item = SimpleObject() - self.assertNotFound(item, "missing") - - def test_object__attribute_is_callable(self): - """ - Test getting a callable attribute from an object. - - """ - item = SimpleObject() - self.assertEqual(_get_value(item, "foo_callable"), "called...") - - def test_object__non_built_in_type(self): - """ - Test getting an attribute from an instance of a type that isn't built-in. - - """ - item = datetime(2012, 1, 2) - self.assertEqual(_get_value(item, "day"), 2) - - def test_object__dict_like(self): - """ - Test getting a key from a dict-like object (an object that implements '__getitem__'). - - """ - item = DictLike() - self.assertEqual(item["foo"], "bar") - self.assertNotFound(item, "foo") - - def test_object__property__raising_exception(self): - """ - Test getting a property that raises an exception. - - """ - class Foo(object): - - @property - def bar(self): - return 1 - - @property - def baz(self): - raise ValueError("test") - - foo = Foo() - self.assertEqual(_get_value(foo, 'bar'), 1) - self.assertNotFound(foo, 'missing') - self.assertRaises(ValueError, _get_value, foo, 'baz') - - ### Case: the item is an instance of a built-in type. - - def test_built_in_type__integer(self): - """ - Test getting from an integer. - - """ - class MyInt(int): pass - - cust_int = MyInt(10) - pure_int = 10 - - # We have to use a built-in method like __neg__ because "public" - # attributes like "real" were not added to Python until Python 2.6, - # when the numeric type hierarchy was added: - # - # http://docs.python.org/library/numbers.html - # - self.assertEqual(cust_int.__neg__(), -10) - self.assertEqual(pure_int.__neg__(), -10) - - self.assertEqual(_get_value(cust_int, '__neg__'), -10) - self.assertNotFound(pure_int, '__neg__') - - def test_built_in_type__string(self): - """ - Test getting from a string. - - """ - class MyStr(str): pass - - item1 = MyStr('abc') - item2 = 'abc' - - self.assertEqual(item1.upper(), 'ABC') - self.assertEqual(item2.upper(), 'ABC') - - self.assertEqual(_get_value(item1, 'upper'), 'ABC') - self.assertNotFound(item2, 'upper') - - def test_built_in_type__list(self): - """ - Test getting from a list. - - """ - class MyList(list): pass - - item1 = MyList([1, 2, 3]) - item2 = [1, 2, 3] - - self.assertEqual(item1.pop(), 3) - self.assertEqual(item2.pop(), 3) - - self.assertEqual(_get_value(item1, 'pop'), 2) - self.assertNotFound(item2, 'pop') - - -class ContextStackTestCase(unittest.TestCase, AssertIsMixin, AssertStringMixin, - AssertExceptionMixin): - - """ - Test the ContextStack class. - - """ - - def test_init__no_elements(self): - """ - Check that passing nothing to __init__() raises no exception. - - """ - context = ContextStack() - - def test_init__many_elements(self): - """ - Check that passing more than two items to __init__() raises no exception. - - """ - context = ContextStack({}, {}, {}) - - def test__repr(self): - context = ContextStack() - self.assertEqual(repr(context), 'ContextStack()') - - context = ContextStack({'foo': 'bar'}) - self.assertEqual(repr(context), "ContextStack({'foo': 'bar'},)") - - context = ContextStack({'foo': 'bar'}, {'abc': 123}) - self.assertEqual(repr(context), "ContextStack({'foo': 'bar'}, {'abc': 123})") - - def test__str(self): - context = ContextStack() - self.assertEqual(str(context), 'ContextStack()') - - context = ContextStack({'foo': 'bar'}) - self.assertEqual(str(context), "ContextStack({'foo': 'bar'},)") - - context = ContextStack({'foo': 'bar'}, {'abc': 123}) - self.assertEqual(str(context), "ContextStack({'foo': 'bar'}, {'abc': 123})") - - ## Test the static create() method. - - def test_create__dictionary(self): - """ - Test passing a dictionary. - - """ - context = ContextStack.create({'foo': 'bar'}) - self.assertEqual(context.get('foo'), 'bar') - - def test_create__none(self): - """ - Test passing None. - - """ - context = ContextStack.create({'foo': 'bar'}, None) - self.assertEqual(context.get('foo'), 'bar') - - def test_create__object(self): - """ - Test passing an object. - - """ - class Foo(object): - foo = 'bar' - context = ContextStack.create(Foo()) - self.assertEqual(context.get('foo'), 'bar') - - def test_create__context(self): - """ - Test passing a ContextStack instance. - - """ - obj = ContextStack({'foo': 'bar'}) - context = ContextStack.create(obj) - self.assertEqual(context.get('foo'), 'bar') - - def test_create__kwarg(self): - """ - Test passing a keyword argument. - - """ - context = ContextStack.create(foo='bar') - self.assertEqual(context.get('foo'), 'bar') - - def test_create__precedence_positional(self): - """ - Test precedence of positional arguments. - - """ - context = ContextStack.create({'foo': 'bar'}, {'foo': 'buzz'}) - self.assertEqual(context.get('foo'), 'buzz') - - def test_create__precedence_keyword(self): - """ - Test precedence of keyword arguments. - - """ - context = ContextStack.create({'foo': 'bar'}, foo='buzz') - self.assertEqual(context.get('foo'), 'buzz') - - ## Test the get() method. - - def test_get__single_dot(self): - """ - Test getting a single dot ("."). - - """ - context = ContextStack("a", "b") - self.assertEqual(context.get("."), "b") - - def test_get__single_dot__missing(self): - """ - Test getting a single dot (".") with an empty context stack. - - """ - context = ContextStack() - self.assertException(KeyNotFoundError, "Key '.' not found: empty context stack", context.get, ".") - - def test_get__key_present(self): - """ - Test getting a key. - - """ - context = ContextStack({"foo": "bar"}) - self.assertEqual(context.get("foo"), "bar") - - def test_get__key_missing(self): - """ - Test getting a missing key. - - """ - context = ContextStack() - self.assertException(KeyNotFoundError, "Key 'foo' not found: first part", context.get, "foo") - - def test_get__precedence(self): - """ - Test that get() respects the order of precedence (later items first). - - """ - context = ContextStack({"foo": "bar"}, {"foo": "buzz"}) - self.assertEqual(context.get("foo"), "buzz") - - def test_get__fallback(self): - """ - Check that first-added stack items are queried on context misses. - - """ - context = ContextStack({"fuzz": "buzz"}, {"foo": "bar"}) - self.assertEqual(context.get("fuzz"), "buzz") - - def test_push(self): - """ - Test push(). - - """ - key = "foo" - context = ContextStack({key: "bar"}) - self.assertEqual(context.get(key), "bar") - - context.push({key: "buzz"}) - self.assertEqual(context.get(key), "buzz") - - def test_pop(self): - """ - Test pop(). - - """ - key = "foo" - context = ContextStack({key: "bar"}, {key: "buzz"}) - self.assertEqual(context.get(key), "buzz") - - item = context.pop() - self.assertEqual(item, {"foo": "buzz"}) - self.assertEqual(context.get(key), "bar") - - def test_top(self): - key = "foo" - context = ContextStack({key: "bar"}, {key: "buzz"}) - self.assertEqual(context.get(key), "buzz") - - top = context.top() - self.assertEqual(top, {"foo": "buzz"}) - # Make sure calling top() didn't remove the item from the stack. - self.assertEqual(context.get(key), "buzz") - - def test_copy(self): - key = "foo" - original = ContextStack({key: "bar"}, {key: "buzz"}) - self.assertEqual(original.get(key), "buzz") - - new = original.copy() - # Confirm that the copy behaves the same. - self.assertEqual(new.get(key), "buzz") - # Change the copy, and confirm it is changed. - new.pop() - self.assertEqual(new.get(key), "bar") - # Confirm the original is unchanged. - self.assertEqual(original.get(key), "buzz") - - def test_dot_notation__dict(self): - name = "foo.bar" - stack = ContextStack({"foo": {"bar": "baz"}}) - self.assertEqual(stack.get(name), "baz") - - # Works all the way down - name = "a.b.c.d.e.f.g" - stack = ContextStack({"a": {"b": {"c": {"d": {"e": {"f": {"g": "w00t!"}}}}}}}) - self.assertEqual(stack.get(name), "w00t!") - - def test_dot_notation__user_object(self): - name = "foo.bar" - stack = ContextStack({"foo": Attachable(bar="baz")}) - self.assertEqual(stack.get(name), "baz") - - # Works on multiple levels, too - name = "a.b.c.d.e.f.g" - A = Attachable - stack = ContextStack({"a": A(b=A(c=A(d=A(e=A(f=A(g="w00t!"))))))}) - self.assertEqual(stack.get(name), "w00t!") - - def test_dot_notation__mixed_dict_and_obj(self): - name = "foo.bar.baz.bak" - stack = ContextStack({"foo": Attachable(bar={"baz": Attachable(bak=42)})}) - self.assertEqual(stack.get(name), 42) - - def test_dot_notation__missing_attr_or_key(self): - name = "foo.bar.baz.bak" - stack = ContextStack({"foo": {"bar": {}}}) - self.assertException(KeyNotFoundError, "Key 'foo.bar.baz.bak' not found: missing 'baz'", stack.get, name) - - stack = ContextStack({"foo": Attachable(bar=Attachable())}) - self.assertException(KeyNotFoundError, "Key 'foo.bar.baz.bak' not found: missing 'baz'", stack.get, name) - - def test_dot_notation__missing_part_terminates_search(self): - """ - Test that dotted name resolution terminates on a later part not found. - - Check that if a later dotted name part is not found in the result from - the former resolution, then name resolution terminates rather than - starting the search over with the next element of the context stack. - From the spec (interpolation section)-- - - 5) If any name parts were retained in step 1, each should be resolved - against a context stack containing only the result from the former - resolution. If any part fails resolution, the result should be considered - falsey, and should interpolate as the empty string. - - This test case is equivalent to the test case in the following pull - request: - - https://github.com/mustache/spec/pull/48 - - """ - stack = ContextStack({'a': {'b': 'A.B'}}, {'a': 'A'}) - self.assertEqual(stack.get('a'), 'A') - self.assertException(KeyNotFoundError, "Key 'a.b' not found: missing 'b'", stack.get, "a.b") - stack.pop() - self.assertEqual(stack.get('a.b'), 'A.B') - - def test_dot_notation__autocall(self): - name = "foo.bar.baz" - - # When any element in the path is callable, it should be automatically invoked - stack = ContextStack({"foo": Attachable(bar=Attachable(baz=lambda: "Called!"))}) - self.assertEqual(stack.get(name), "Called!") - - class Foo(object): - def bar(self): - return Attachable(baz='Baz') - - stack = ContextStack({"foo": Foo()}) - self.assertEqual(stack.get(name), "Baz") diff --git a/third_party/python/pystache/pystache/tests/test_defaults.py b/third_party/python/pystache/pystache/tests/test_defaults.py deleted file mode 100644 index c78ea7c86062..000000000000 --- a/third_party/python/pystache/pystache/tests/test_defaults.py +++ /dev/null @@ -1,68 +0,0 @@ -# coding: utf-8 - -""" -Unit tests for defaults.py. - -""" - -import unittest - -import pystache - -from pystache.tests.common import AssertStringMixin - - -# TODO: make sure each default has at least one test. -class DefaultsConfigurableTestCase(unittest.TestCase, AssertStringMixin): - - """Tests that the user can change the defaults at runtime.""" - - # TODO: switch to using a context manager after 2.4 is deprecated. - def setUp(self): - """Save the defaults.""" - defaults = [ - 'DECODE_ERRORS', 'DELIMITERS', - 'FILE_ENCODING', 'MISSING_TAGS', - 'SEARCH_DIRS', 'STRING_ENCODING', - 'TAG_ESCAPE', 'TEMPLATE_EXTENSION' - ] - self.saved = {} - for e in defaults: - self.saved[e] = getattr(pystache.defaults, e) - - def tearDown(self): - for key, value in self.saved.items(): - setattr(pystache.defaults, key, value) - - def test_tag_escape(self): - """Test that changes to defaults.TAG_ESCAPE take effect.""" - template = u"{{foo}}" - context = {'foo': '<'} - actual = pystache.render(template, context) - self.assertString(actual, u"<") - - pystache.defaults.TAG_ESCAPE = lambda u: u - actual = pystache.render(template, context) - self.assertString(actual, u"<") - - def test_delimiters(self): - """Test that changes to defaults.DELIMITERS take effect.""" - template = u"[[foo]]{{foo}}" - context = {'foo': 'FOO'} - actual = pystache.render(template, context) - self.assertString(actual, u"[[foo]]FOO") - - pystache.defaults.DELIMITERS = ('[[', ']]') - actual = pystache.render(template, context) - self.assertString(actual, u"FOO{{foo}}") - - def test_missing_tags(self): - """Test that changes to defaults.MISSING_TAGS take effect.""" - template = u"{{foo}}" - context = {} - actual = pystache.render(template, context) - self.assertString(actual, u"") - - pystache.defaults.MISSING_TAGS = 'strict' - self.assertRaises(pystache.context.KeyNotFoundError, - pystache.render, template, context) diff --git a/third_party/python/pystache/pystache/tests/test_examples.py b/third_party/python/pystache/pystache/tests/test_examples.py deleted file mode 100644 index 5c9f74dae68b..000000000000 --- a/third_party/python/pystache/pystache/tests/test_examples.py +++ /dev/null @@ -1,106 +0,0 @@ -# encoding: utf-8 - -""" -TODO: add a docstring. - -""" - -import unittest - -from examples.comments import Comments -from examples.double_section import DoubleSection -from examples.escaped import Escaped -from examples.unescaped import Unescaped -from examples.template_partial import TemplatePartial -from examples.delimiters import Delimiters -from examples.unicode_output import UnicodeOutput -from examples.unicode_input import UnicodeInput -from examples.nested_context import NestedContext -from pystache import Renderer -from pystache.tests.common import EXAMPLES_DIR -from pystache.tests.common import AssertStringMixin - - -class TestView(unittest.TestCase, AssertStringMixin): - - def _assert(self, obj, expected): - renderer = Renderer() - actual = renderer.render(obj) - self.assertString(actual, expected) - - def test_comments(self): - self._assert(Comments(), u"

    A Comedy of Errors

    ") - - def test_double_section(self): - self._assert(DoubleSection(), u"* first\n* second\n* third") - - def test_unicode_output(self): - renderer = Renderer() - actual = renderer.render(UnicodeOutput()) - self.assertString(actual, u'

    Name: Henri Poincaré

    ') - - def test_unicode_input(self): - renderer = Renderer() - actual = renderer.render(UnicodeInput()) - self.assertString(actual, u'abcdé') - - def test_escaping(self): - self._assert(Escaped(), u"

    Bear > Shark

    ") - - def test_literal(self): - renderer = Renderer() - actual = renderer.render(Unescaped()) - self.assertString(actual, u"

    Bear > Shark

    ") - - def test_template_partial(self): - renderer = Renderer(search_dirs=EXAMPLES_DIR) - actual = renderer.render(TemplatePartial(renderer=renderer)) - - self.assertString(actual, u"""

    Welcome

    -Again, Welcome!""") - - def test_template_partial_extension(self): - renderer = Renderer(search_dirs=EXAMPLES_DIR, file_extension='txt') - - view = TemplatePartial(renderer=renderer) - - actual = renderer.render(view) - self.assertString(actual, u"""Welcome -------- - -## Again, Welcome! ##""") - - def test_delimiters(self): - renderer = Renderer() - actual = renderer.render(Delimiters()) - self.assertString(actual, u"""\ -* It worked the first time. -* And it worked the second time. -* Then, surprisingly, it worked the third time. -""") - - def test_nested_context(self): - renderer = Renderer() - actual = renderer.render(NestedContext(renderer)) - self.assertString(actual, u"one and foo and two") - - def test_nested_context_is_available_in_view(self): - renderer = Renderer() - - view = NestedContext(renderer) - view.template = '{{#herp}}{{#derp}}{{nested_context_in_view}}{{/derp}}{{/herp}}' - - actual = renderer.render(view) - self.assertString(actual, u'it works!') - - def test_partial_in_partial_has_access_to_grand_parent_context(self): - renderer = Renderer(search_dirs=EXAMPLES_DIR) - - view = TemplatePartial(renderer=renderer) - view.template = '''{{>partial_in_partial}}''' - - actual = renderer.render(view, {'prop': 'derp'}) - self.assertEqual(actual, 'Hi derp!') - -if __name__ == '__main__': - unittest.main() diff --git a/third_party/python/pystache/pystache/tests/test_loader.py b/third_party/python/pystache/pystache/tests/test_loader.py deleted file mode 100644 index f2c21874727c..000000000000 --- a/third_party/python/pystache/pystache/tests/test_loader.py +++ /dev/null @@ -1,209 +0,0 @@ -# encoding: utf-8 - -""" -Unit tests of loader.py. - -""" - -import os -import sys -import unittest - -from pystache.tests.common import AssertStringMixin, DATA_DIR, SetupDefaults -from pystache import defaults -from pystache.loader import Loader - - -# We use the same directory as the locator tests for now. -LOADER_DATA_DIR = os.path.join(DATA_DIR, 'locator') - - -class LoaderTests(unittest.TestCase, AssertStringMixin, SetupDefaults): - - def setUp(self): - self.setup_defaults() - - def tearDown(self): - self.teardown_defaults() - - def test_init__extension(self): - loader = Loader(extension='foo') - self.assertEqual(loader.extension, 'foo') - - def test_init__extension__default(self): - # Test the default value. - loader = Loader() - self.assertEqual(loader.extension, 'mustache') - - def test_init__file_encoding(self): - loader = Loader(file_encoding='bar') - self.assertEqual(loader.file_encoding, 'bar') - - def test_init__file_encoding__default(self): - file_encoding = defaults.FILE_ENCODING - try: - defaults.FILE_ENCODING = 'foo' - loader = Loader() - self.assertEqual(loader.file_encoding, 'foo') - finally: - defaults.FILE_ENCODING = file_encoding - - def test_init__to_unicode(self): - to_unicode = lambda x: x - loader = Loader(to_unicode=to_unicode) - self.assertEqual(loader.to_unicode, to_unicode) - - def test_init__to_unicode__default(self): - loader = Loader() - self.assertRaises(TypeError, loader.to_unicode, u"abc") - - decode_errors = defaults.DECODE_ERRORS - string_encoding = defaults.STRING_ENCODING - - nonascii = u'abcdé'.encode('utf-8') - - loader = Loader() - self.assertRaises(UnicodeDecodeError, loader.to_unicode, nonascii) - - defaults.DECODE_ERRORS = 'ignore' - loader = Loader() - self.assertString(loader.to_unicode(nonascii), u'abcd') - - defaults.STRING_ENCODING = 'utf-8' - loader = Loader() - self.assertString(loader.to_unicode(nonascii), u'abcdé') - - - def _get_path(self, filename): - return os.path.join(DATA_DIR, filename) - - def test_unicode__basic__input_str(self): - """ - Test unicode(): default arguments with str input. - - """ - loader = Loader() - actual = loader.unicode("foo") - - self.assertString(actual, u"foo") - - def test_unicode__basic__input_unicode(self): - """ - Test unicode(): default arguments with unicode input. - - """ - loader = Loader() - actual = loader.unicode(u"foo") - - self.assertString(actual, u"foo") - - def test_unicode__basic__input_unicode_subclass(self): - """ - Test unicode(): default arguments with unicode-subclass input. - - """ - class UnicodeSubclass(unicode): - pass - - s = UnicodeSubclass(u"foo") - - loader = Loader() - actual = loader.unicode(s) - - self.assertString(actual, u"foo") - - def test_unicode__to_unicode__attribute(self): - """ - Test unicode(): encoding attribute. - - """ - loader = Loader() - - non_ascii = u'abcdé'.encode('utf-8') - self.assertRaises(UnicodeDecodeError, loader.unicode, non_ascii) - - def to_unicode(s, encoding=None): - if encoding is None: - encoding = 'utf-8' - return unicode(s, encoding) - - loader.to_unicode = to_unicode - self.assertString(loader.unicode(non_ascii), u"abcdé") - - def test_unicode__encoding_argument(self): - """ - Test unicode(): encoding argument. - - """ - loader = Loader() - - non_ascii = u'abcdé'.encode('utf-8') - - self.assertRaises(UnicodeDecodeError, loader.unicode, non_ascii) - - actual = loader.unicode(non_ascii, encoding='utf-8') - self.assertString(actual, u'abcdé') - - # TODO: check the read() unit tests. - def test_read(self): - """ - Test read(). - - """ - loader = Loader() - path = self._get_path('ascii.mustache') - actual = loader.read(path) - self.assertString(actual, u'ascii: abc') - - def test_read__file_encoding__attribute(self): - """ - Test read(): file_encoding attribute respected. - - """ - loader = Loader() - path = self._get_path('non_ascii.mustache') - - self.assertRaises(UnicodeDecodeError, loader.read, path) - - loader.file_encoding = 'utf-8' - actual = loader.read(path) - self.assertString(actual, u'non-ascii: é') - - def test_read__encoding__argument(self): - """ - Test read(): encoding argument respected. - - """ - loader = Loader() - path = self._get_path('non_ascii.mustache') - - self.assertRaises(UnicodeDecodeError, loader.read, path) - - actual = loader.read(path, encoding='utf-8') - self.assertString(actual, u'non-ascii: é') - - def test_read__to_unicode__attribute(self): - """ - Test read(): to_unicode attribute respected. - - """ - loader = Loader() - path = self._get_path('non_ascii.mustache') - - self.assertRaises(UnicodeDecodeError, loader.read, path) - - #loader.decode_errors = 'ignore' - #actual = loader.read(path) - #self.assertString(actual, u'non-ascii: ') - - def test_load_file(self): - loader = Loader(search_dirs=[DATA_DIR, LOADER_DATA_DIR]) - template = loader.load_file('template.txt') - self.assertEqual(template, 'Test template file\n') - - def test_load_name(self): - loader = Loader(search_dirs=[DATA_DIR, LOADER_DATA_DIR], - extension='txt') - template = loader.load_name('template') - self.assertEqual(template, 'Test template file\n') - diff --git a/third_party/python/pystache/pystache/tests/test_locator.py b/third_party/python/pystache/pystache/tests/test_locator.py deleted file mode 100644 index ee1c2ff84618..000000000000 --- a/third_party/python/pystache/pystache/tests/test_locator.py +++ /dev/null @@ -1,179 +0,0 @@ -# encoding: utf-8 - -""" -Unit tests for locator.py. - -""" - -from datetime import datetime -import os -import sys -import unittest - -# TODO: remove this alias. -from pystache.common import TemplateNotFoundError -from pystache.loader import Loader as Reader -from pystache.locator import Locator - -from pystache.tests.common import DATA_DIR, EXAMPLES_DIR, AssertExceptionMixin -from pystache.tests.data.views import SayHello - - -LOCATOR_DATA_DIR = os.path.join(DATA_DIR, 'locator') - - -class LocatorTests(unittest.TestCase, AssertExceptionMixin): - - def _locator(self): - return Locator(search_dirs=DATA_DIR) - - def test_init__extension(self): - # Test the default value. - locator = Locator() - self.assertEqual(locator.template_extension, 'mustache') - - locator = Locator(extension='txt') - self.assertEqual(locator.template_extension, 'txt') - - locator = Locator(extension=False) - self.assertTrue(locator.template_extension is False) - - def _assert_paths(self, actual, expected): - """ - Assert that two paths are the same. - - """ - self.assertEqual(actual, expected) - - def test_get_object_directory(self): - locator = Locator() - - obj = SayHello() - actual = locator.get_object_directory(obj) - - self._assert_paths(actual, DATA_DIR) - - def test_get_object_directory__not_hasattr_module(self): - locator = Locator() - - # Previously, we used a genuine object -- a datetime instance -- - # because datetime instances did not have the __module__ attribute - # in CPython. See, for example-- - # - # http://bugs.python.org/issue15223 - # - # However, since datetime instances do have the __module__ attribute - # in PyPy, we needed to switch to something else once we added - # support for PyPi. This was so that our test runs would pass - # in all systems. - obj = "abc" - self.assertFalse(hasattr(obj, '__module__')) - self.assertEqual(locator.get_object_directory(obj), None) - - self.assertFalse(hasattr(None, '__module__')) - self.assertEqual(locator.get_object_directory(None), None) - - def test_make_file_name(self): - locator = Locator() - - locator.template_extension = 'bar' - self.assertEqual(locator.make_file_name('foo'), 'foo.bar') - - locator.template_extension = False - self.assertEqual(locator.make_file_name('foo'), 'foo') - - locator.template_extension = '' - self.assertEqual(locator.make_file_name('foo'), 'foo.') - - def test_make_file_name__template_extension_argument(self): - locator = Locator() - - self.assertEqual(locator.make_file_name('foo', template_extension='bar'), 'foo.bar') - - def test_find_file(self): - locator = Locator() - path = locator.find_file('template.txt', [LOCATOR_DATA_DIR]) - - expected_path = os.path.join(LOCATOR_DATA_DIR, 'template.txt') - self.assertEqual(path, expected_path) - - def test_find_name(self): - locator = Locator() - path = locator.find_name(search_dirs=[EXAMPLES_DIR], template_name='simple') - - self.assertEqual(os.path.basename(path), 'simple.mustache') - - def test_find_name__using_list_of_paths(self): - locator = Locator() - path = locator.find_name(search_dirs=[EXAMPLES_DIR, 'doesnt_exist'], template_name='simple') - - self.assertTrue(path) - - def test_find_name__precedence(self): - """ - Test the order in which find_name() searches directories. - - """ - locator = Locator() - - dir1 = DATA_DIR - dir2 = LOCATOR_DATA_DIR - - self.assertTrue(locator.find_name(search_dirs=[dir1], template_name='duplicate')) - self.assertTrue(locator.find_name(search_dirs=[dir2], template_name='duplicate')) - - path = locator.find_name(search_dirs=[dir2, dir1], template_name='duplicate') - dirpath = os.path.dirname(path) - dirname = os.path.split(dirpath)[-1] - - self.assertEqual(dirname, 'locator') - - def test_find_name__non_existent_template_fails(self): - locator = Locator() - - self.assertException(TemplateNotFoundError, "File 'doesnt_exist.mustache' not found in dirs: []", - locator.find_name, search_dirs=[], template_name='doesnt_exist') - - def test_find_object(self): - locator = Locator() - - obj = SayHello() - - actual = locator.find_object(search_dirs=[], obj=obj, file_name='sample_view.mustache') - expected = os.path.join(DATA_DIR, 'sample_view.mustache') - - self._assert_paths(actual, expected) - - def test_find_object__none_file_name(self): - locator = Locator() - - obj = SayHello() - - actual = locator.find_object(search_dirs=[], obj=obj) - expected = os.path.join(DATA_DIR, 'say_hello.mustache') - - self.assertEqual(actual, expected) - - def test_find_object__none_object_directory(self): - locator = Locator() - - obj = None - self.assertEqual(None, locator.get_object_directory(obj)) - - actual = locator.find_object(search_dirs=[DATA_DIR], obj=obj, file_name='say_hello.mustache') - expected = os.path.join(DATA_DIR, 'say_hello.mustache') - - self.assertEqual(actual, expected) - - def test_make_template_name(self): - """ - Test make_template_name(). - - """ - locator = Locator() - - class FooBar(object): - pass - foo = FooBar() - - self.assertEqual(locator.make_template_name(foo), 'foo_bar') diff --git a/third_party/python/pystache/pystache/tests/test_parser.py b/third_party/python/pystache/pystache/tests/test_parser.py deleted file mode 100644 index 92248ea2475d..000000000000 --- a/third_party/python/pystache/pystache/tests/test_parser.py +++ /dev/null @@ -1,27 +0,0 @@ -# coding: utf-8 - -""" -Unit tests of parser.py. - -""" - -import unittest - -from pystache.defaults import DELIMITERS -from pystache.parser import _compile_template_re as make_re - - -class RegularExpressionTestCase(unittest.TestCase): - - """Tests the regular expression returned by _compile_template_re().""" - - def test_re(self): - """ - Test getting a key from a dictionary. - - """ - re = make_re(DELIMITERS) - match = re.search("b {{test}}") - - self.assertEqual(match.start(), 1) - diff --git a/third_party/python/pystache/pystache/tests/test_pystache.py b/third_party/python/pystache/pystache/tests/test_pystache.py deleted file mode 100644 index 5447f8da5179..000000000000 --- a/third_party/python/pystache/pystache/tests/test_pystache.py +++ /dev/null @@ -1,127 +0,0 @@ -# encoding: utf-8 - -import unittest - -import pystache -from pystache import defaults -from pystache import renderer -from pystache.tests.common import html_escape - - -class PystacheTests(unittest.TestCase): - - - def setUp(self): - self.original_escape = defaults.TAG_ESCAPE - defaults.TAG_ESCAPE = html_escape - - def tearDown(self): - defaults.TAG_ESCAPE = self.original_escape - - def _assert_rendered(self, expected, template, context): - actual = pystache.render(template, context) - self.assertEqual(actual, expected) - - def test_basic(self): - ret = pystache.render("Hi {{thing}}!", { 'thing': 'world' }) - self.assertEqual(ret, "Hi world!") - - def test_kwargs(self): - ret = pystache.render("Hi {{thing}}!", thing='world') - self.assertEqual(ret, "Hi world!") - - def test_less_basic(self): - template = "It's a nice day for {{beverage}}, right {{person}}?" - context = { 'beverage': 'soda', 'person': 'Bob' } - self._assert_rendered("It's a nice day for soda, right Bob?", template, context) - - def test_even_less_basic(self): - template = "I think {{name}} wants a {{thing}}, right {{name}}?" - context = { 'name': 'Jon', 'thing': 'racecar' } - self._assert_rendered("I think Jon wants a racecar, right Jon?", template, context) - - def test_ignores_misses(self): - template = "I think {{name}} wants a {{thing}}, right {{name}}?" - context = { 'name': 'Jon' } - self._assert_rendered("I think Jon wants a , right Jon?", template, context) - - def test_render_zero(self): - template = 'My value is {{value}}.' - context = { 'value': 0 } - self._assert_rendered('My value is 0.', template, context) - - def test_comments(self): - template = "What {{! the }} what?" - actual = pystache.render(template) - self.assertEqual("What what?", actual) - - def test_false_sections_are_hidden(self): - template = "Ready {{#set}}set {{/set}}go!" - context = { 'set': False } - self._assert_rendered("Ready go!", template, context) - - def test_true_sections_are_shown(self): - template = "Ready {{#set}}set{{/set}} go!" - context = { 'set': True } - self._assert_rendered("Ready set go!", template, context) - - non_strings_expected = """(123 & ['something'])(chris & 0.9)""" - - def test_non_strings(self): - template = "{{#stats}}({{key}} & {{value}}){{/stats}}" - stats = [] - stats.append({'key': 123, 'value': ['something']}) - stats.append({'key': u"chris", 'value': 0.900}) - context = { 'stats': stats } - self._assert_rendered(self.non_strings_expected, template, context) - - def test_unicode(self): - template = 'Name: {{name}}; Age: {{age}}' - context = {'name': u'Henri Poincaré', 'age': 156 } - self._assert_rendered(u'Name: Henri Poincaré; Age: 156', template, context) - - def test_sections(self): - template = """
      {{#users}}
    • {{name}}
    • {{/users}}
    """ - - context = { 'users': [ {'name': 'Chris'}, {'name': 'Tom'}, {'name': 'PJ'} ] } - expected = """
    • Chris
    • Tom
    • PJ
    """ - self._assert_rendered(expected, template, context) - - def test_implicit_iterator(self): - template = """
      {{#users}}
    • {{.}}
    • {{/users}}
    """ - context = { 'users': [ 'Chris', 'Tom','PJ' ] } - expected = """
    • Chris
    • Tom
    • PJ
    """ - self._assert_rendered(expected, template, context) - - # The spec says that sections should not alter surrounding whitespace. - def test_surrounding_whitepace_not_altered(self): - template = "first{{#spacing}} second {{/spacing}}third" - context = {"spacing": True} - self._assert_rendered("first second third", template, context) - - def test__section__non_false_value(self): - """ - Test when a section value is a (non-list) "non-false value". - - From mustache(5): - - When the value [of a section key] is non-false but not a list, it - will be used as the context for a single rendering of the block. - - """ - template = """{{#person}}Hi {{name}}{{/person}}""" - context = {"person": {"name": "Jon"}} - self._assert_rendered("Hi Jon", template, context) - - def test_later_list_section_with_escapable_character(self): - """ - This is a simple test case intended to cover issue #53. - - The test case failed with markupsafe enabled, as follows: - - AssertionError: Markup(u'foo <') != 'foo <' - - """ - template = """{{#s1}}foo{{/s1}} {{#s2}}<{{/s2}}""" - context = {'s1': True, 's2': [True]} - self._assert_rendered("foo <", template, context) diff --git a/third_party/python/pystache/pystache/tests/test_renderengine.py b/third_party/python/pystache/pystache/tests/test_renderengine.py deleted file mode 100644 index db916f77ba82..000000000000 --- a/third_party/python/pystache/pystache/tests/test_renderengine.py +++ /dev/null @@ -1,769 +0,0 @@ -# coding: utf-8 - -""" -Unit tests of renderengine.py. - -""" - -import sys -import unittest - -from pystache.context import ContextStack, KeyNotFoundError -from pystache import defaults -from pystache.parser import ParsingError -from pystache.renderer import Renderer -from pystache.renderengine import context_get, RenderEngine -from pystache.tests.common import AssertStringMixin, AssertExceptionMixin, Attachable - - -def _get_unicode_char(): - if sys.version_info < (3, ): - return 'u' - return '' - -_UNICODE_CHAR = _get_unicode_char() - - -def mock_literal(s): - """ - For use as the literal keyword argument to the RenderEngine constructor. - - Arguments: - - s: a byte string or unicode string. - - """ - if isinstance(s, unicode): - # Strip off unicode super classes, if present. - u = unicode(s) - else: - u = unicode(s, encoding='ascii') - - # We apply upper() to make sure we are actually using our custom - # function in the tests - return u.upper() - - - -class RenderEngineTestCase(unittest.TestCase): - - """Test the RenderEngine class.""" - - def test_init(self): - """ - Test that __init__() stores all of the arguments correctly. - - """ - # In real-life, these arguments would be functions - engine = RenderEngine(resolve_partial="foo", literal="literal", - escape="escape", to_str="str") - - self.assertEqual(engine.escape, "escape") - self.assertEqual(engine.literal, "literal") - self.assertEqual(engine.resolve_partial, "foo") - self.assertEqual(engine.to_str, "str") - - -class RenderTests(unittest.TestCase, AssertStringMixin, AssertExceptionMixin): - - """ - Tests RenderEngine.render(). - - Explicit spec-test-like tests best go in this class since the - RenderEngine class contains all parsing logic. This way, the unit tests - will be more focused and fail "closer to the code". - - """ - - def _engine(self): - """ - Create and return a default RenderEngine for testing. - - """ - renderer = Renderer(string_encoding='utf-8', missing_tags='strict') - engine = renderer._make_render_engine() - - return engine - - def _assert_render(self, expected, template, *context, **kwargs): - """ - Test rendering the given template using the given context. - - """ - partials = kwargs.get('partials') - engine = kwargs.get('engine', self._engine()) - - if partials is not None: - engine.resolve_partial = lambda key: unicode(partials[key]) - - context = ContextStack(*context) - - # RenderEngine.render() only accepts unicode template strings. - actual = engine.render(unicode(template), context) - - self.assertString(actual=actual, expected=expected) - - def test_render(self): - self._assert_render(u'Hi Mom', 'Hi {{person}}', {'person': 'Mom'}) - - def test__resolve_partial(self): - """ - Test that render() uses the load_template attribute. - - """ - engine = self._engine() - partials = {'partial': u"{{person}}"} - engine.resolve_partial = lambda key: partials[key] - - self._assert_render(u'Hi Mom', 'Hi {{>partial}}', {'person': 'Mom'}, engine=engine) - - def test__literal(self): - """ - Test that render() uses the literal attribute. - - """ - engine = self._engine() - engine.literal = lambda s: s.upper() - - self._assert_render(u'BAR', '{{{foo}}}', {'foo': 'bar'}, engine=engine) - - def test_literal__sigil(self): - template = "

    {{& thing}}

    " - context = {'thing': 'Bear > Giraffe'} - - expected = u"

    Bear > Giraffe

    " - - self._assert_render(expected, template, context) - - def test__escape(self): - """ - Test that render() uses the escape attribute. - - """ - engine = self._engine() - engine.escape = lambda s: "**" + s - - self._assert_render(u'**bar', '{{foo}}', {'foo': 'bar'}, engine=engine) - - def test__escape_does_not_call_literal(self): - """ - Test that render() does not call literal before or after calling escape. - - """ - engine = self._engine() - engine.literal = lambda s: s.upper() # a test version - engine.escape = lambda s: "**" + s - - template = 'literal: {{{foo}}} escaped: {{foo}}' - context = {'foo': 'bar'} - - self._assert_render(u'literal: BAR escaped: **bar', template, context, engine=engine) - - def test__escape_preserves_unicode_subclasses(self): - """ - Test that render() preserves unicode subclasses when passing to escape. - - This is useful, for example, if one wants to respect whether a - variable value is markupsafe.Markup when escaping. - - """ - class MyUnicode(unicode): - pass - - def escape(s): - if type(s) is MyUnicode: - return "**" + s - else: - return s + "**" - - engine = self._engine() - engine.escape = escape - - template = '{{foo1}} {{foo2}}' - context = {'foo1': MyUnicode('bar'), 'foo2': 'bar'} - - self._assert_render(u'**bar bar**', template, context, engine=engine) - - # Custom to_str for testing purposes. - def _to_str(self, val): - if not val: - return '' - else: - return str(val) - - def test_to_str(self): - """Test the to_str attribute.""" - engine = self._engine() - template = '{{value}}' - context = {'value': None} - - self._assert_render(u'None', template, context, engine=engine) - engine.to_str = self._to_str - self._assert_render(u'', template, context, engine=engine) - - def test_to_str__lambda(self): - """Test the to_str attribute for a lambda.""" - engine = self._engine() - template = '{{value}}' - context = {'value': lambda: None} - - self._assert_render(u'None', template, context, engine=engine) - engine.to_str = self._to_str - self._assert_render(u'', template, context, engine=engine) - - def test_to_str__section_list(self): - """Test the to_str attribute for a section list.""" - engine = self._engine() - template = '{{#list}}{{.}}{{/list}}' - context = {'list': [None, None]} - - self._assert_render(u'NoneNone', template, context, engine=engine) - engine.to_str = self._to_str - self._assert_render(u'', template, context, engine=engine) - - def test_to_str__section_lambda(self): - # TODO: add a test for a "method with an arity of 1". - pass - - def test__non_basestring__literal_and_escaped(self): - """ - Test a context value that is not a basestring instance. - - """ - engine = self._engine() - engine.escape = mock_literal - engine.literal = mock_literal - - self.assertRaises(TypeError, engine.literal, 100) - - template = '{{text}} {{int}} {{{int}}}' - context = {'int': 100, 'text': 'foo'} - - self._assert_render(u'FOO 100 100', template, context, engine=engine) - - def test_tag__output_not_interpolated(self): - """ - Context values should not be treated as templates (issue #44). - - """ - template = '{{template}}: {{planet}}' - context = {'template': '{{planet}}', 'planet': 'Earth'} - self._assert_render(u'{{planet}}: Earth', template, context) - - def test_tag__output_not_interpolated__section(self): - """ - Context values should not be treated as templates (issue #44). - - """ - template = '{{test}}' - context = {'test': '{{#hello}}'} - self._assert_render(u'{{#hello}}', template, context) - - ## Test interpolation with "falsey" values - # - # In these test cases, we test the part of the spec that says that - # "data should be coerced into a string (and escaped, if appropriate) - # before interpolation." We test this for data that is "falsey." - - def test_interpolation__falsey__zero(self): - template = '{{.}}' - context = 0 - self._assert_render(u'0', template, context) - - def test_interpolation__falsey__none(self): - template = '{{.}}' - context = None - self._assert_render(u'None', template, context) - - def test_interpolation__falsey__zero(self): - template = '{{.}}' - context = False - self._assert_render(u'False', template, context) - - # Built-in types: - # - # Confirm that we not treat instances of built-in types as objects, - # for example by calling a method on a built-in type instance when it - # has a method whose name matches the current key. - # - # Each test case puts an instance of a built-in type on top of the - # context stack before interpolating a tag whose key matches an - # attribute (method or property) of the instance. - # - - def _assert_builtin_attr(self, item, attr_name, expected_attr): - self.assertTrue(hasattr(item, attr_name)) - actual = getattr(item, attr_name) - if callable(actual): - actual = actual() - self.assertEqual(actual, expected_attr) - - def _assert_builtin_type(self, item, attr_name, expected_attr, expected_template): - self._assert_builtin_attr(item, attr_name, expected_attr) - - template = '{{#section}}{{%s}}{{/section}}' % attr_name - context = {'section': item, attr_name: expected_template} - self._assert_render(expected_template, template, context) - - def test_interpolation__built_in_type__string(self): - """ - Check tag interpolation with a built-in type: string. - - """ - self._assert_builtin_type('abc', 'upper', 'ABC', u'xyz') - - def test_interpolation__built_in_type__integer(self): - """ - Check tag interpolation with a built-in type: integer. - - """ - # Since public attributes weren't added to integers until Python 2.6 - # (for example the "real" attribute of the numeric type hierarchy)-- - # - # http://docs.python.org/library/numbers.html - # - # we need to resort to built-in attributes (double-underscored) on - # the integer type. - self._assert_builtin_type(15, '__neg__', -15, u'999') - - def test_interpolation__built_in_type__list(self): - """ - Check tag interpolation with a built-in type: list. - - """ - item = [[1, 2, 3]] - attr_name = 'pop' - # Make a copy to prevent changes to item[0]. - self._assert_builtin_attr(list(item[0]), attr_name, 3) - - template = '{{#section}}{{%s}}{{/section}}' % attr_name - context = {'section': item, attr_name: 7} - self._assert_render(u'7', template, context) - - # This test is also important for testing 2to3. - def test_interpolation__nonascii_nonunicode(self): - """ - Test a tag whose value is a non-ascii, non-unicode string. - - """ - template = '{{nonascii}}' - context = {'nonascii': u'abcdé'.encode('utf-8')} - self._assert_render(u'abcdé', template, context) - - def test_implicit_iterator__literal(self): - """ - Test an implicit iterator in a literal tag. - - """ - template = """{{#test}}{{{.}}}{{/test}}""" - context = {'test': ['<', '>']} - - self._assert_render(u'<>', template, context) - - def test_implicit_iterator__escaped(self): - """ - Test an implicit iterator in a normal tag. - - """ - template = """{{#test}}{{.}}{{/test}}""" - context = {'test': ['<', '>']} - - self._assert_render(u'<>', template, context) - - def test_literal__in_section(self): - """ - Check that literals work in sections. - - """ - template = '{{#test}}1 {{{less_than}}} 2{{/test}}' - context = {'test': {'less_than': '<'}} - - self._assert_render(u'1 < 2', template, context) - - def test_literal__in_partial(self): - """ - Check that literals work in partials. - - """ - template = '{{>partial}}' - partials = {'partial': '1 {{{less_than}}} 2'} - context = {'less_than': '<'} - - self._assert_render(u'1 < 2', template, context, partials=partials) - - def test_partial(self): - partials = {'partial': "{{person}}"} - self._assert_render(u'Hi Mom', 'Hi {{>partial}}', {'person': 'Mom'}, partials=partials) - - def test_partial__context_values(self): - """ - Test that escape and literal work on context values in partials. - - """ - engine = self._engine() - - template = '{{>partial}}' - partials = {'partial': 'unescaped: {{{foo}}} escaped: {{foo}}'} - context = {'foo': '<'} - - self._assert_render(u'unescaped: < escaped: <', template, context, engine=engine, partials=partials) - - ## Test cases related specifically to lambdas. - - # This test is also important for testing 2to3. - def test_section__nonascii_nonunicode(self): - """ - Test a section whose value is a non-ascii, non-unicode string. - - """ - template = '{{#nonascii}}{{.}}{{/nonascii}}' - context = {'nonascii': u'abcdé'.encode('utf-8')} - self._assert_render(u'abcdé', template, context) - - # This test is also important for testing 2to3. - def test_lambda__returning_nonascii_nonunicode(self): - """ - Test a lambda tag value returning a non-ascii, non-unicode string. - - """ - template = '{{lambda}}' - context = {'lambda': lambda: u'abcdé'.encode('utf-8')} - self._assert_render(u'abcdé', template, context) - - ## Test cases related specifically to sections. - - def test_section__end_tag_with_no_start_tag(self): - """ - Check what happens if there is an end tag with no start tag. - - """ - template = '{{/section}}' - try: - self._assert_render(None, template) - except ParsingError, err: - self.assertEqual(str(err), "Section end tag mismatch: section != None") - - def test_section__end_tag_mismatch(self): - """ - Check what happens if the end tag doesn't match. - - """ - template = '{{#section_start}}{{/section_end}}' - try: - self._assert_render(None, template) - except ParsingError, err: - self.assertEqual(str(err), "Section end tag mismatch: section_end != section_start") - - def test_section__context_values(self): - """ - Test that escape and literal work on context values in sections. - - """ - engine = self._engine() - - template = '{{#test}}unescaped: {{{foo}}} escaped: {{foo}}{{/test}}' - context = {'test': {'foo': '<'}} - - self._assert_render(u'unescaped: < escaped: <', template, context, engine=engine) - - def test_section__context_precedence(self): - """ - Check that items higher in the context stack take precedence. - - """ - template = '{{entree}} : {{#vegetarian}}{{entree}}{{/vegetarian}}' - context = {'entree': 'chicken', 'vegetarian': {'entree': 'beans and rice'}} - self._assert_render(u'chicken : beans and rice', template, context) - - def test_section__list_referencing_outer_context(self): - """ - Check that list items can access the parent context. - - For sections whose value is a list, check that items in the list - have access to the values inherited from the parent context - when rendering. - - """ - context = { - "greeting": "Hi", - "list": [{"name": "Al"}, {"name": "Bob"}], - } - - template = "{{#list}}{{greeting}} {{name}}, {{/list}}" - - self._assert_render(u"Hi Al, Hi Bob, ", template, context) - - def test_section__output_not_interpolated(self): - """ - Check that rendered section output is not interpolated. - - """ - template = '{{#section}}{{template}}{{/section}}: {{planet}}' - context = {'section': True, 'template': '{{planet}}', 'planet': 'Earth'} - self._assert_render(u'{{planet}}: Earth', template, context) - - # TODO: have this test case added to the spec. - def test_section__string_values_not_lists(self): - """ - Check that string section values are not interpreted as lists. - - """ - template = '{{#section}}foo{{/section}}' - context = {'section': '123'} - # If strings were interpreted as lists, this would give "foofoofoo". - self._assert_render(u'foo', template, context) - - def test_section__nested_truthy(self): - """ - Check that "nested truthy" sections get rendered. - - Test case for issue #24: https://github.com/defunkt/pystache/issues/24 - - This test is copied from the spec. We explicitly include it to - prevent regressions for those who don't pull down the spec tests. - - """ - template = '| A {{#bool}}B {{#bool}}C{{/bool}} D{{/bool}} E |' - context = {'bool': True} - self._assert_render(u'| A B C D E |', template, context) - - def test_section__nested_with_same_keys(self): - """ - Check a doubly-nested section with the same context key. - - Test case for issue #36: https://github.com/defunkt/pystache/issues/36 - - """ - # Start with an easier, working case. - template = '{{#x}}{{#z}}{{y}}{{/z}}{{/x}}' - context = {'x': {'z': {'y': 1}}} - self._assert_render(u'1', template, context) - - template = '{{#x}}{{#x}}{{y}}{{/x}}{{/x}}' - context = {'x': {'x': {'y': 1}}} - self._assert_render(u'1', template, context) - - def test_section__lambda(self): - template = '{{#test}}Mom{{/test}}' - context = {'test': (lambda text: 'Hi %s' % text)} - self._assert_render(u'Hi Mom', template, context) - - # This test is also important for testing 2to3. - def test_section__lambda__returning_nonascii_nonunicode(self): - """ - Test a lambda section value returning a non-ascii, non-unicode string. - - """ - template = '{{#lambda}}{{/lambda}}' - context = {'lambda': lambda text: u'abcdé'.encode('utf-8')} - self._assert_render(u'abcdé', template, context) - - def test_section__lambda__returning_nonstring(self): - """ - Test a lambda section value returning a non-string. - - """ - template = '{{#lambda}}foo{{/lambda}}' - context = {'lambda': lambda text: len(text)} - self._assert_render(u'3', template, context) - - def test_section__iterable(self): - """ - Check that objects supporting iteration (aside from dicts) behave like lists. - - """ - template = '{{#iterable}}{{.}}{{/iterable}}' - - context = {'iterable': (i for i in range(3))} # type 'generator' - self._assert_render(u'012', template, context) - - context = {'iterable': xrange(4)} # type 'xrange' - self._assert_render(u'0123', template, context) - - d = {'foo': 0, 'bar': 0} - # We don't know what order of keys we'll be given, but from the - # Python documentation: - # "If items(), keys(), values(), iteritems(), iterkeys(), and - # itervalues() are called with no intervening modifications to - # the dictionary, the lists will directly correspond." - expected = u''.join(d.keys()) - context = {'iterable': d.iterkeys()} # type 'dictionary-keyiterator' - self._assert_render(expected, template, context) - - def test_section__lambda__tag_in_output(self): - """ - Check that callable output is treated as a template string (issue #46). - - The spec says-- - - When used as the data value for a Section tag, the lambda MUST - be treatable as an arity 1 function, and invoked as such (passing - a String containing the unprocessed section contents). The - returned value MUST be rendered against the current delimiters, - then interpolated in place of the section. - - """ - template = '{{#test}}Hi {{person}}{{/test}}' - context = {'person': 'Mom', 'test': (lambda text: text + " :)")} - self._assert_render(u'Hi Mom :)', template, context) - - def test_section__lambda__list(self): - """ - Check that lists of lambdas are processed correctly for sections. - - This test case is equivalent to a test submitted to the Mustache spec here: - - https://github.com/mustache/spec/pull/47 . - - """ - template = '<{{#lambdas}}foo{{/lambdas}}>' - context = {'foo': 'bar', - 'lambdas': [lambda text: "~{{%s}}~" % text, - lambda text: "#{{%s}}#" % text]} - - self._assert_render(u'<~bar~#bar#>', template, context) - - def test_section__lambda__mixed_list(self): - """ - Test a mixed list of lambdas and non-lambdas as a section value. - - This test case is equivalent to a test submitted to the Mustache spec here: - - https://github.com/mustache/spec/pull/47 . - - """ - template = '<{{#lambdas}}foo{{/lambdas}}>' - context = {'foo': 'bar', - 'lambdas': [lambda text: "~{{%s}}~" % text, 1]} - - self._assert_render(u'<~bar~foo>', template, context) - - def test_section__lambda__not_on_context_stack(self): - """ - Check that section lambdas are not pushed onto the context stack. - - Even though the sections spec says that section data values should be - pushed onto the context stack prior to rendering, this does not apply - to lambdas. Lambdas obey their own special case. - - This test case is equivalent to a test submitted to the Mustache spec here: - - https://github.com/mustache/spec/pull/47 . - - """ - context = {'foo': 'bar', 'lambda': (lambda text: "{{.}}")} - template = '{{#foo}}{{#lambda}}blah{{/lambda}}{{/foo}}' - self._assert_render(u'bar', template, context) - - def test_section__lambda__no_reinterpolation(self): - """ - Check that section lambda return values are not re-interpolated. - - This test is a sanity check that the rendered lambda return value - is not re-interpolated as could be construed by reading the - section part of the Mustache spec. - - This test case is equivalent to a test submitted to the Mustache spec here: - - https://github.com/mustache/spec/pull/47 . - - """ - template = '{{#planet}}{{#lambda}}dot{{/lambda}}{{/planet}}' - context = {'planet': 'Earth', 'dot': '~{{.}}~', 'lambda': (lambda text: "#{{%s}}#" % text)} - self._assert_render(u'#~{{.}}~#', template, context) - - def test_comment__multiline(self): - """ - Check that multiline comments are permitted. - - """ - self._assert_render(u'foobar', 'foo{{! baz }}bar') - self._assert_render(u'foobar', 'foo{{! \nbaz }}bar') - - def test_custom_delimiters__sections(self): - """ - Check that custom delimiters can be used to start a section. - - Test case for issue #20: https://github.com/defunkt/pystache/issues/20 - - """ - template = '{{=[[ ]]=}}[[#foo]]bar[[/foo]]' - context = {'foo': True} - self._assert_render(u'bar', template, context) - - def test_custom_delimiters__not_retroactive(self): - """ - Check that changing custom delimiters back is not "retroactive." - - Test case for issue #35: https://github.com/defunkt/pystache/issues/35 - - """ - expected = u' {{foo}} ' - self._assert_render(expected, '{{=$ $=}} {{foo}} ') - self._assert_render(expected, '{{=$ $=}} {{foo}} $={{ }}=$') # was yielding u' '. - - def test_dot_notation(self): - """ - Test simple dot notation cases. - - Check that we can use dot notation when the variable is a dict, - user-defined object, or combination of both. - - """ - template = 'Hello, {{person.name}}. I see you are {{person.details.age}}.' - person = Attachable(name='Biggles', details={'age': 42}) - context = {'person': person} - self._assert_render(u'Hello, Biggles. I see you are 42.', template, context) - - def test_dot_notation__multiple_levels(self): - """ - Test dot notation with multiple levels. - - """ - template = """Hello, Mr. {{person.name.lastname}}. - I see you're back from {{person.travels.last.country.city}}.""" - expected = u"""Hello, Mr. Pither. - I see you're back from Cornwall.""" - context = {'person': {'name': {'firstname': 'unknown', 'lastname': 'Pither'}, - 'travels': {'last': {'country': {'city': 'Cornwall'}}}, - 'details': {'public': 'likes cycling'}}} - self._assert_render(expected, template, context) - - # It should also work with user-defined objects - context = {'person': Attachable(name={'firstname': 'unknown', 'lastname': 'Pither'}, - travels=Attachable(last=Attachable(country=Attachable(city='Cornwall'))), - details=Attachable())} - self._assert_render(expected, template, context) - - def test_dot_notation__missing_part_terminates_search(self): - """ - Test that dotted name resolution terminates on a later part not found. - - Check that if a later dotted name part is not found in the result from - the former resolution, then name resolution terminates rather than - starting the search over with the next element of the context stack. - From the spec (interpolation section)-- - - 5) If any name parts were retained in step 1, each should be resolved - against a context stack containing only the result from the former - resolution. If any part fails resolution, the result should be considered - falsey, and should interpolate as the empty string. - - This test case is equivalent to the test case in the following pull - request: - - https://github.com/mustache/spec/pull/48 - - """ - context = {'a': {'b': 'A.B'}, 'c': {'a': 'A'} } - - template = '{{a.b}}' - self._assert_render(u'A.B', template, context) - - template = '{{#c}}{{a}}{{/c}}' - self._assert_render(u'A', template, context) - - template = '{{#c}}{{a.b}}{{/c}}' - self.assertException(KeyNotFoundError, "Key %(unicode)s'a.b' not found: missing %(unicode)s'b'" % - {'unicode': _UNICODE_CHAR}, - self._assert_render, 'A.B :: (A :: )', template, context) diff --git a/third_party/python/pystache/pystache/tests/test_renderer.py b/third_party/python/pystache/pystache/tests/test_renderer.py deleted file mode 100644 index 0dbe0d99eb2b..000000000000 --- a/third_party/python/pystache/pystache/tests/test_renderer.py +++ /dev/null @@ -1,725 +0,0 @@ -# coding: utf-8 - -""" -Unit tests of template.py. - -""" - -import codecs -import os -import sys -import unittest - -from examples.simple import Simple -from pystache import Renderer -from pystache import TemplateSpec -from pystache.common import TemplateNotFoundError -from pystache.context import ContextStack, KeyNotFoundError -from pystache.loader import Loader - -from pystache.tests.common import get_data_path, AssertStringMixin, AssertExceptionMixin -from pystache.tests.data.views import SayHello - - -def _make_renderer(): - """ - Return a default Renderer instance for testing purposes. - - """ - renderer = Renderer(string_encoding='ascii', file_encoding='ascii') - return renderer - - -def mock_unicode(b, encoding=None): - if encoding is None: - encoding = 'ascii' - u = unicode(b, encoding=encoding) - return u.upper() - - -class RendererInitTestCase(unittest.TestCase): - - """ - Tests the Renderer.__init__() method. - - """ - - def test_partials__default(self): - """ - Test the default value. - - """ - renderer = Renderer() - self.assertTrue(renderer.partials is None) - - def test_partials(self): - """ - Test that the attribute is set correctly. - - """ - renderer = Renderer(partials={'foo': 'bar'}) - self.assertEqual(renderer.partials, {'foo': 'bar'}) - - def test_escape__default(self): - escape = Renderer().escape - - self.assertEqual(escape(">"), ">") - self.assertEqual(escape('"'), """) - # Single quotes are escaped only in Python 3.2 and later. - if sys.version_info < (3, 2): - expected = "'" - else: - expected = ''' - self.assertEqual(escape("'"), expected) - - def test_escape(self): - escape = lambda s: "**" + s - renderer = Renderer(escape=escape) - self.assertEqual(renderer.escape("bar"), "**bar") - - def test_decode_errors__default(self): - """ - Check the default value. - - """ - renderer = Renderer() - self.assertEqual(renderer.decode_errors, 'strict') - - def test_decode_errors(self): - """ - Check that the constructor sets the attribute correctly. - - """ - renderer = Renderer(decode_errors="foo") - self.assertEqual(renderer.decode_errors, "foo") - - def test_file_encoding__default(self): - """ - Check the file_encoding default. - - """ - renderer = Renderer() - self.assertEqual(renderer.file_encoding, renderer.string_encoding) - - def test_file_encoding(self): - """ - Check that the file_encoding attribute is set correctly. - - """ - renderer = Renderer(file_encoding='foo') - self.assertEqual(renderer.file_encoding, 'foo') - - def test_file_extension__default(self): - """ - Check the file_extension default. - - """ - renderer = Renderer() - self.assertEqual(renderer.file_extension, 'mustache') - - def test_file_extension(self): - """ - Check that the file_encoding attribute is set correctly. - - """ - renderer = Renderer(file_extension='foo') - self.assertEqual(renderer.file_extension, 'foo') - - def test_missing_tags(self): - """ - Check that the missing_tags attribute is set correctly. - - """ - renderer = Renderer(missing_tags='foo') - self.assertEqual(renderer.missing_tags, 'foo') - - def test_missing_tags__default(self): - """ - Check the missing_tags default. - - """ - renderer = Renderer() - self.assertEqual(renderer.missing_tags, 'ignore') - - def test_search_dirs__default(self): - """ - Check the search_dirs default. - - """ - renderer = Renderer() - self.assertEqual(renderer.search_dirs, [os.curdir]) - - def test_search_dirs__string(self): - """ - Check that the search_dirs attribute is set correctly when a string. - - """ - renderer = Renderer(search_dirs='foo') - self.assertEqual(renderer.search_dirs, ['foo']) - - def test_search_dirs__list(self): - """ - Check that the search_dirs attribute is set correctly when a list. - - """ - renderer = Renderer(search_dirs=['foo']) - self.assertEqual(renderer.search_dirs, ['foo']) - - def test_string_encoding__default(self): - """ - Check the default value. - - """ - renderer = Renderer() - self.assertEqual(renderer.string_encoding, sys.getdefaultencoding()) - - def test_string_encoding(self): - """ - Check that the constructor sets the attribute correctly. - - """ - renderer = Renderer(string_encoding="foo") - self.assertEqual(renderer.string_encoding, "foo") - - -class RendererTests(unittest.TestCase, AssertStringMixin): - - """Test the Renderer class.""" - - def _renderer(self): - return Renderer() - - ## Test Renderer.unicode(). - - def test_unicode__string_encoding(self): - """ - Test that the string_encoding attribute is respected. - - """ - renderer = self._renderer() - b = u"é".encode('utf-8') - - renderer.string_encoding = "ascii" - self.assertRaises(UnicodeDecodeError, renderer.unicode, b) - - renderer.string_encoding = "utf-8" - self.assertEqual(renderer.unicode(b), u"é") - - def test_unicode__decode_errors(self): - """ - Test that the decode_errors attribute is respected. - - """ - renderer = self._renderer() - renderer.string_encoding = "ascii" - b = u"déf".encode('utf-8') - - renderer.decode_errors = "ignore" - self.assertEqual(renderer.unicode(b), "df") - - renderer.decode_errors = "replace" - # U+FFFD is the official Unicode replacement character. - self.assertEqual(renderer.unicode(b), u'd\ufffd\ufffdf') - - ## Test the _make_loader() method. - - def test__make_loader__return_type(self): - """ - Test that _make_loader() returns a Loader. - - """ - renderer = self._renderer() - loader = renderer._make_loader() - - self.assertEqual(type(loader), Loader) - - def test__make_loader__attributes(self): - """ - Test that _make_loader() sets all attributes correctly.. - - """ - unicode_ = lambda x: x - - renderer = self._renderer() - renderer.file_encoding = 'enc' - renderer.file_extension = 'ext' - renderer.unicode = unicode_ - - loader = renderer._make_loader() - - self.assertEqual(loader.extension, 'ext') - self.assertEqual(loader.file_encoding, 'enc') - self.assertEqual(loader.to_unicode, unicode_) - - ## Test the render() method. - - def test_render__return_type(self): - """ - Check that render() returns a string of type unicode. - - """ - renderer = self._renderer() - rendered = renderer.render('foo') - self.assertEqual(type(rendered), unicode) - - def test_render__unicode(self): - renderer = self._renderer() - actual = renderer.render(u'foo') - self.assertEqual(actual, u'foo') - - def test_render__str(self): - renderer = self._renderer() - actual = renderer.render('foo') - self.assertEqual(actual, 'foo') - - def test_render__non_ascii_character(self): - renderer = self._renderer() - actual = renderer.render(u'Poincaré') - self.assertEqual(actual, u'Poincaré') - - def test_render__context(self): - """ - Test render(): passing a context. - - """ - renderer = self._renderer() - self.assertEqual(renderer.render('Hi {{person}}', {'person': 'Mom'}), 'Hi Mom') - - def test_render__context_and_kwargs(self): - """ - Test render(): passing a context and **kwargs. - - """ - renderer = self._renderer() - template = 'Hi {{person1}} and {{person2}}' - self.assertEqual(renderer.render(template, {'person1': 'Mom'}, person2='Dad'), 'Hi Mom and Dad') - - def test_render__kwargs_and_no_context(self): - """ - Test render(): passing **kwargs and no context. - - """ - renderer = self._renderer() - self.assertEqual(renderer.render('Hi {{person}}', person='Mom'), 'Hi Mom') - - def test_render__context_and_kwargs__precedence(self): - """ - Test render(): **kwargs takes precedence over context. - - """ - renderer = self._renderer() - self.assertEqual(renderer.render('Hi {{person}}', {'person': 'Mom'}, person='Dad'), 'Hi Dad') - - def test_render__kwargs_does_not_modify_context(self): - """ - Test render(): passing **kwargs does not modify the passed context. - - """ - context = {} - renderer = self._renderer() - renderer.render('Hi {{person}}', context=context, foo="bar") - self.assertEqual(context, {}) - - def test_render__nonascii_template(self): - """ - Test passing a non-unicode template with non-ascii characters. - - """ - renderer = _make_renderer() - template = u"déf".encode("utf-8") - - # Check that decode_errors and string_encoding are both respected. - renderer.decode_errors = 'ignore' - renderer.string_encoding = 'ascii' - self.assertEqual(renderer.render(template), "df") - - renderer.string_encoding = 'utf_8' - self.assertEqual(renderer.render(template), u"déf") - - def test_make_resolve_partial(self): - """ - Test the _make_resolve_partial() method. - - """ - renderer = Renderer() - renderer.partials = {'foo': 'bar'} - resolve_partial = renderer._make_resolve_partial() - - actual = resolve_partial('foo') - self.assertEqual(actual, 'bar') - self.assertEqual(type(actual), unicode, "RenderEngine requires that " - "resolve_partial return unicode strings.") - - def test_make_resolve_partial__unicode(self): - """ - Test _make_resolve_partial(): that resolve_partial doesn't "double-decode" Unicode. - - """ - renderer = Renderer() - - renderer.partials = {'partial': 'foo'} - resolve_partial = renderer._make_resolve_partial() - self.assertEqual(resolve_partial("partial"), "foo") - - # Now with a value that is already unicode. - renderer.partials = {'partial': u'foo'} - resolve_partial = renderer._make_resolve_partial() - # If the next line failed, we would get the following error: - # TypeError: decoding Unicode is not supported - self.assertEqual(resolve_partial("partial"), "foo") - - def test_render_name(self): - """Test the render_name() method.""" - data_dir = get_data_path() - renderer = Renderer(search_dirs=data_dir) - actual = renderer.render_name("say_hello", to='foo') - self.assertString(actual, u"Hello, foo") - - def test_render_path(self): - """ - Test the render_path() method. - - """ - renderer = Renderer() - path = get_data_path('say_hello.mustache') - actual = renderer.render_path(path, to='foo') - self.assertEqual(actual, "Hello, foo") - - def test_render__object(self): - """ - Test rendering an object instance. - - """ - renderer = Renderer() - - say_hello = SayHello() - actual = renderer.render(say_hello) - self.assertEqual('Hello, World', actual) - - actual = renderer.render(say_hello, to='Mars') - self.assertEqual('Hello, Mars', actual) - - def test_render__template_spec(self): - """ - Test rendering a TemplateSpec instance. - - """ - renderer = Renderer() - - class Spec(TemplateSpec): - template = "hello, {{to}}" - to = 'world' - - spec = Spec() - actual = renderer.render(spec) - self.assertString(actual, u'hello, world') - - def test_render__view(self): - """ - Test rendering a View instance. - - """ - renderer = Renderer() - - view = Simple() - actual = renderer.render(view) - self.assertEqual('Hi pizza!', actual) - - def test_custom_string_coercion_via_assignment(self): - """ - Test that string coercion can be customized via attribute assignment. - - """ - renderer = self._renderer() - def to_str(val): - if not val: - return '' - else: - return str(val) - - self.assertEqual(renderer.render('{{value}}', value=None), 'None') - renderer.str_coerce = to_str - self.assertEqual(renderer.render('{{value}}', value=None), '') - - def test_custom_string_coercion_via_subclassing(self): - """ - Test that string coercion can be customized via subclassing. - - """ - class MyRenderer(Renderer): - def str_coerce(self, val): - if not val: - return '' - else: - return str(val) - renderer1 = Renderer() - renderer2 = MyRenderer() - - self.assertEqual(renderer1.render('{{value}}', value=None), 'None') - self.assertEqual(renderer2.render('{{value}}', value=None), '') - - -# By testing that Renderer.render() constructs the right RenderEngine, -# we no longer need to exercise all rendering code paths through -# the Renderer. It suffices to test rendering paths through the -# RenderEngine for the same amount of code coverage. -class Renderer_MakeRenderEngineTests(unittest.TestCase, AssertStringMixin, AssertExceptionMixin): - - """ - Check the RenderEngine returned by Renderer._make_render_engine(). - - """ - - def _make_renderer(self): - """ - Return a default Renderer instance for testing purposes. - - """ - return _make_renderer() - - ## Test the engine's resolve_partial attribute. - - def test__resolve_partial__returns_unicode(self): - """ - Check that resolve_partial returns unicode (and not a subclass). - - """ - class MyUnicode(unicode): - pass - - renderer = Renderer() - renderer.string_encoding = 'ascii' - renderer.partials = {'str': 'foo', 'subclass': MyUnicode('abc')} - - engine = renderer._make_render_engine() - - actual = engine.resolve_partial('str') - self.assertEqual(actual, "foo") - self.assertEqual(type(actual), unicode) - - # Check that unicode subclasses are not preserved. - actual = engine.resolve_partial('subclass') - self.assertEqual(actual, "abc") - self.assertEqual(type(actual), unicode) - - def test__resolve_partial__not_found(self): - """ - Check that resolve_partial returns the empty string when a template is not found. - - """ - renderer = Renderer() - - engine = renderer._make_render_engine() - resolve_partial = engine.resolve_partial - - self.assertString(resolve_partial('foo'), u'') - - def test__resolve_partial__not_found__missing_tags_strict(self): - """ - Check that resolve_partial provides a nice message when a template is not found. - - """ - renderer = Renderer() - renderer.missing_tags = 'strict' - - engine = renderer._make_render_engine() - resolve_partial = engine.resolve_partial - - self.assertException(TemplateNotFoundError, "File 'foo.mustache' not found in dirs: ['.']", - resolve_partial, "foo") - - def test__resolve_partial__not_found__partials_dict(self): - """ - Check that resolve_partial returns the empty string when a template is not found. - - """ - renderer = Renderer() - renderer.partials = {} - - engine = renderer._make_render_engine() - resolve_partial = engine.resolve_partial - - self.assertString(resolve_partial('foo'), u'') - - def test__resolve_partial__not_found__partials_dict__missing_tags_strict(self): - """ - Check that resolve_partial provides a nice message when a template is not found. - - """ - renderer = Renderer() - renderer.missing_tags = 'strict' - renderer.partials = {} - - engine = renderer._make_render_engine() - resolve_partial = engine.resolve_partial - - # Include dict directly since str(dict) is different in Python 2 and 3: - # versus , respectively. - self.assertException(TemplateNotFoundError, "Name 'foo' not found in partials: %s" % dict, - resolve_partial, "foo") - - ## Test the engine's literal attribute. - - def test__literal__uses_renderer_unicode(self): - """ - Test that literal uses the renderer's unicode function. - - """ - renderer = self._make_renderer() - renderer.unicode = mock_unicode - - engine = renderer._make_render_engine() - literal = engine.literal - - b = u"foo".encode("ascii") - self.assertEqual(literal(b), "FOO") - - def test__literal__handles_unicode(self): - """ - Test that literal doesn't try to "double decode" unicode. - - """ - renderer = Renderer() - renderer.string_encoding = 'ascii' - - engine = renderer._make_render_engine() - literal = engine.literal - - self.assertEqual(literal(u"foo"), "foo") - - def test__literal__returns_unicode(self): - """ - Test that literal returns unicode (and not a subclass). - - """ - renderer = Renderer() - renderer.string_encoding = 'ascii' - - engine = renderer._make_render_engine() - literal = engine.literal - - self.assertEqual(type(literal("foo")), unicode) - - class MyUnicode(unicode): - pass - - s = MyUnicode("abc") - - self.assertEqual(type(s), MyUnicode) - self.assertTrue(isinstance(s, unicode)) - self.assertEqual(type(literal(s)), unicode) - - ## Test the engine's escape attribute. - - def test__escape__uses_renderer_escape(self): - """ - Test that escape uses the renderer's escape function. - - """ - renderer = Renderer() - renderer.escape = lambda s: "**" + s - - engine = renderer._make_render_engine() - escape = engine.escape - - self.assertEqual(escape("foo"), "**foo") - - def test__escape__uses_renderer_unicode(self): - """ - Test that escape uses the renderer's unicode function. - - """ - renderer = Renderer() - renderer.unicode = mock_unicode - - engine = renderer._make_render_engine() - escape = engine.escape - - b = u"foo".encode('ascii') - self.assertEqual(escape(b), "FOO") - - def test__escape__has_access_to_original_unicode_subclass(self): - """ - Test that escape receives strings with the unicode subclass intact. - - """ - renderer = Renderer() - renderer.escape = lambda s: unicode(type(s).__name__) - - engine = renderer._make_render_engine() - escape = engine.escape - - class MyUnicode(unicode): - pass - - self.assertEqual(escape(u"foo".encode('ascii')), unicode.__name__) - self.assertEqual(escape(u"foo"), unicode.__name__) - self.assertEqual(escape(MyUnicode("foo")), MyUnicode.__name__) - - def test__escape__returns_unicode(self): - """ - Test that literal returns unicode (and not a subclass). - - """ - renderer = Renderer() - renderer.string_encoding = 'ascii' - - engine = renderer._make_render_engine() - escape = engine.escape - - self.assertEqual(type(escape("foo")), unicode) - - # Check that literal doesn't preserve unicode subclasses. - class MyUnicode(unicode): - pass - - s = MyUnicode("abc") - - self.assertEqual(type(s), MyUnicode) - self.assertTrue(isinstance(s, unicode)) - self.assertEqual(type(escape(s)), unicode) - - ## Test the missing_tags attribute. - - def test__missing_tags__unknown_value(self): - """ - Check missing_tags attribute: setting an unknown value. - - """ - renderer = Renderer() - renderer.missing_tags = 'foo' - - self.assertException(Exception, "Unsupported 'missing_tags' value: 'foo'", - renderer._make_render_engine) - - ## Test the engine's resolve_context attribute. - - def test__resolve_context(self): - """ - Check resolve_context(): default arguments. - - """ - renderer = Renderer() - - engine = renderer._make_render_engine() - - stack = ContextStack({'foo': 'bar'}) - - self.assertEqual('bar', engine.resolve_context(stack, 'foo')) - self.assertString(u'', engine.resolve_context(stack, 'missing')) - - def test__resolve_context__missing_tags_strict(self): - """ - Check resolve_context(): missing_tags 'strict'. - - """ - renderer = Renderer() - renderer.missing_tags = 'strict' - - engine = renderer._make_render_engine() - - stack = ContextStack({'foo': 'bar'}) - - self.assertEqual('bar', engine.resolve_context(stack, 'foo')) - self.assertException(KeyNotFoundError, "Key 'missing' not found: first part", - engine.resolve_context, stack, 'missing') diff --git a/third_party/python/pystache/pystache/tests/test_simple.py b/third_party/python/pystache/pystache/tests/test_simple.py deleted file mode 100644 index 07b059f50e6f..000000000000 --- a/third_party/python/pystache/pystache/tests/test_simple.py +++ /dev/null @@ -1,83 +0,0 @@ -import unittest - -import pystache -from pystache import Renderer -from examples.nested_context import NestedContext -from examples.complex import Complex -from examples.lambdas import Lambdas -from examples.template_partial import TemplatePartial -from examples.simple import Simple - -from pystache.tests.common import EXAMPLES_DIR -from pystache.tests.common import AssertStringMixin - - -class TestSimple(unittest.TestCase, AssertStringMixin): - - def test_nested_context(self): - renderer = Renderer() - view = NestedContext(renderer) - view.template = '{{#foo}}{{thing1}} and {{thing2}} and {{outer_thing}}{{/foo}}{{^foo}}Not foo!{{/foo}}' - - actual = renderer.render(view) - self.assertString(actual, u"one and foo and two") - - def test_looping_and_negation_context(self): - template = '{{#item}}{{header}}: {{name}} {{/item}}{{^item}} Shouldnt see me{{/item}}' - context = Complex() - - renderer = Renderer() - actual = renderer.render(template, context) - self.assertEqual(actual, "Colors: red Colors: green Colors: blue ") - - def test_empty_context(self): - template = '{{#empty_list}}Shouldnt see me {{/empty_list}}{{^empty_list}}Should see me{{/empty_list}}' - self.assertEqual(pystache.Renderer().render(template), "Should see me") - - def test_callables(self): - view = Lambdas() - view.template = '{{#replace_foo_with_bar}}foo != bar. oh, it does!{{/replace_foo_with_bar}}' - - renderer = Renderer() - actual = renderer.render(view) - self.assertString(actual, u'bar != bar. oh, it does!') - - def test_rendering_partial(self): - renderer = Renderer(search_dirs=EXAMPLES_DIR) - - view = TemplatePartial(renderer=renderer) - view.template = '{{>inner_partial}}' - - actual = renderer.render(view) - self.assertString(actual, u'Again, Welcome!') - - view.template = '{{#looping}}{{>inner_partial}} {{/looping}}' - actual = renderer.render(view) - self.assertString(actual, u"Again, Welcome! Again, Welcome! Again, Welcome! ") - - def test_non_existent_value_renders_blank(self): - view = Simple() - template = '{{not_set}} {{blank}}' - self.assertEqual(pystache.Renderer().render(template), ' ') - - - def test_template_partial_extension(self): - """ - Side note: - - From the spec-- - - Partial tags SHOULD be treated as standalone when appropriate. - - In particular, this means that trailing newlines should be removed. - - """ - renderer = Renderer(search_dirs=EXAMPLES_DIR, file_extension='txt') - - view = TemplatePartial(renderer=renderer) - - actual = renderer.render(view) - self.assertString(actual, u"""Welcome -------- - -## Again, Welcome! ##""") diff --git a/third_party/python/pystache/pystache/tests/test_specloader.py b/third_party/python/pystache/pystache/tests/test_specloader.py deleted file mode 100644 index cacc0fc954c6..000000000000 --- a/third_party/python/pystache/pystache/tests/test_specloader.py +++ /dev/null @@ -1,435 +0,0 @@ -# coding: utf-8 - -""" -Unit tests for template_spec.py. - -""" - -import os.path -import sys -import unittest - -import examples -from examples.simple import Simple -from examples.complex import Complex -from examples.lambdas import Lambdas -from examples.inverted import Inverted, InvertedLists -from pystache import Renderer -from pystache import TemplateSpec -from pystache.common import TemplateNotFoundError -from pystache.locator import Locator -from pystache.loader import Loader -from pystache.specloader import SpecLoader -from pystache.tests.common import DATA_DIR, EXAMPLES_DIR -from pystache.tests.common import AssertIsMixin, AssertStringMixin -from pystache.tests.data.views import SampleView -from pystache.tests.data.views import NonAscii - - -class Thing(object): - pass - - -class AssertPathsMixin: - - """A unittest.TestCase mixin to check path equality.""" - - def assertPaths(self, actual, expected): - self.assertEqual(actual, expected) - - -class ViewTestCase(unittest.TestCase, AssertStringMixin): - - def test_template_rel_directory(self): - """ - Test that View.template_rel_directory is respected. - - """ - class Tagless(TemplateSpec): - pass - - view = Tagless() - renderer = Renderer() - - self.assertRaises(TemplateNotFoundError, renderer.render, view) - - # TODO: change this test to remove the following brittle line. - view.template_rel_directory = "examples" - actual = renderer.render(view) - self.assertEqual(actual, "No tags...") - - def test_template_path_for_partials(self): - """ - Test that View.template_rel_path is respected for partials. - - """ - spec = TemplateSpec() - spec.template = "Partial: {{>tagless}}" - - renderer1 = Renderer() - renderer2 = Renderer(search_dirs=EXAMPLES_DIR) - - actual = renderer1.render(spec) - self.assertString(actual, u"Partial: ") - - actual = renderer2.render(spec) - self.assertEqual(actual, "Partial: No tags...") - - def test_basic_method_calls(self): - renderer = Renderer() - actual = renderer.render(Simple()) - - self.assertString(actual, u"Hi pizza!") - - def test_non_callable_attributes(self): - view = Simple() - view.thing = 'Chris' - - renderer = Renderer() - actual = renderer.render(view) - self.assertEqual(actual, "Hi Chris!") - - def test_complex(self): - renderer = Renderer() - actual = renderer.render(Complex()) - self.assertString(actual, u"""\ -

    Colors

    -""") - - def test_higher_order_replace(self): - renderer = Renderer() - actual = renderer.render(Lambdas()) - self.assertEqual(actual, 'bar != bar. oh, it does!') - - def test_higher_order_rot13(self): - view = Lambdas() - view.template = '{{#rot13}}abcdefghijklm{{/rot13}}' - - renderer = Renderer() - actual = renderer.render(view) - self.assertString(actual, u'nopqrstuvwxyz') - - def test_higher_order_lambda(self): - view = Lambdas() - view.template = '{{#sort}}zyxwvutsrqponmlkjihgfedcba{{/sort}}' - - renderer = Renderer() - actual = renderer.render(view) - self.assertString(actual, u'abcdefghijklmnopqrstuvwxyz') - - def test_partials_with_lambda(self): - view = Lambdas() - view.template = '{{>partial_with_lambda}}' - - renderer = Renderer(search_dirs=EXAMPLES_DIR) - actual = renderer.render(view) - self.assertEqual(actual, u'nopqrstuvwxyz') - - def test_hierarchical_partials_with_lambdas(self): - view = Lambdas() - view.template = '{{>partial_with_partial_and_lambda}}' - - renderer = Renderer(search_dirs=EXAMPLES_DIR) - actual = renderer.render(view) - self.assertString(actual, u'nopqrstuvwxyznopqrstuvwxyz') - - def test_inverted(self): - renderer = Renderer() - actual = renderer.render(Inverted()) - self.assertString(actual, u"""one, two, three, empty list""") - - def test_accessing_properties_on_parent_object_from_child_objects(self): - parent = Thing() - parent.this = 'derp' - parent.children = [Thing()] - view = Simple() - view.template = "{{#parent}}{{#children}}{{this}}{{/children}}{{/parent}}" - - renderer = Renderer() - actual = renderer.render(view, {'parent': parent}) - - self.assertString(actual, u'derp') - - def test_inverted_lists(self): - renderer = Renderer() - actual = renderer.render(InvertedLists()) - self.assertString(actual, u"""one, two, three, empty list""") - - -def _make_specloader(): - """ - Return a default SpecLoader instance for testing purposes. - - """ - # Python 2 and 3 have different default encodings. Thus, to have - # consistent test results across both versions, we need to specify - # the string and file encodings explicitly rather than relying on - # the defaults. - def to_unicode(s, encoding=None): - """ - Raises a TypeError exception if the given string is already unicode. - - """ - if encoding is None: - encoding = 'ascii' - return unicode(s, encoding, 'strict') - - loader = Loader(file_encoding='ascii', to_unicode=to_unicode) - return SpecLoader(loader=loader) - - -class SpecLoaderTests(unittest.TestCase, AssertIsMixin, AssertStringMixin, - AssertPathsMixin): - - """ - Tests template_spec.SpecLoader. - - """ - - def _make_specloader(self): - return _make_specloader() - - def test_init__defaults(self): - spec_loader = SpecLoader() - - # Check the loader attribute. - loader = spec_loader.loader - self.assertEqual(loader.extension, 'mustache') - self.assertEqual(loader.file_encoding, sys.getdefaultencoding()) - # TODO: finish testing the other Loader attributes. - to_unicode = loader.to_unicode - - def test_init__loader(self): - loader = Loader() - custom = SpecLoader(loader=loader) - - self.assertIs(custom.loader, loader) - - # TODO: rename to something like _assert_load(). - def _assert_template(self, loader, custom, expected): - self.assertString(loader.load(custom), expected) - - def test_load__template__type_str(self): - """ - Test the template attribute: str string. - - """ - custom = TemplateSpec() - custom.template = "abc" - - spec_loader = self._make_specloader() - self._assert_template(spec_loader, custom, u"abc") - - def test_load__template__type_unicode(self): - """ - Test the template attribute: unicode string. - - """ - custom = TemplateSpec() - custom.template = u"abc" - - spec_loader = self._make_specloader() - self._assert_template(spec_loader, custom, u"abc") - - def test_load__template__unicode_non_ascii(self): - """ - Test the template attribute: non-ascii unicode string. - - """ - custom = TemplateSpec() - custom.template = u"é" - - spec_loader = self._make_specloader() - self._assert_template(spec_loader, custom, u"é") - - def test_load__template__with_template_encoding(self): - """ - Test the template attribute: with template encoding attribute. - - """ - custom = TemplateSpec() - custom.template = u'é'.encode('utf-8') - - spec_loader = self._make_specloader() - - self.assertRaises(UnicodeDecodeError, self._assert_template, spec_loader, custom, u'é') - - custom.template_encoding = 'utf-8' - self._assert_template(spec_loader, custom, u'é') - - # TODO: make this test complete. - def test_load__template__correct_loader(self): - """ - Test that reader.unicode() is called correctly. - - This test tests that the correct reader is called with the correct - arguments. This is a catch-all test to supplement the other - test cases. It tests SpecLoader.load() independent of reader.unicode() - being implemented correctly (and tested). - - """ - class MockLoader(Loader): - - def __init__(self): - self.s = None - self.encoding = None - - # Overrides the existing method. - def unicode(self, s, encoding=None): - self.s = s - self.encoding = encoding - return u"foo" - - loader = MockLoader() - custom_loader = SpecLoader() - custom_loader.loader = loader - - view = TemplateSpec() - view.template = "template-foo" - view.template_encoding = "encoding-foo" - - # Check that our unicode() above was called. - self._assert_template(custom_loader, view, u'foo') - self.assertEqual(loader.s, "template-foo") - self.assertEqual(loader.encoding, "encoding-foo") - - def test_find__template_path(self): - """Test _find() with TemplateSpec.template_path.""" - loader = self._make_specloader() - custom = TemplateSpec() - custom.template_path = "path/foo" - actual = loader._find(custom) - self.assertPaths(actual, "path/foo") - - -# TODO: migrate these tests into the SpecLoaderTests class. -# TODO: rename the get_template() tests to test load(). -# TODO: condense, reorganize, and rename the tests so that it is -# clear whether we have full test coverage (e.g. organized by -# TemplateSpec attributes or something). -class TemplateSpecTests(unittest.TestCase, AssertPathsMixin): - - def _make_loader(self): - return _make_specloader() - - def _assert_template_location(self, view, expected): - loader = self._make_loader() - actual = loader._find_relative(view) - self.assertEqual(actual, expected) - - def test_find_relative(self): - """ - Test _find_relative(): default behavior (no attributes set). - - """ - view = SampleView() - self._assert_template_location(view, (None, 'sample_view.mustache')) - - def test_find_relative__template_rel_path__file_name_only(self): - """ - Test _find_relative(): template_rel_path attribute. - - """ - view = SampleView() - view.template_rel_path = 'template.txt' - self._assert_template_location(view, ('', 'template.txt')) - - def test_find_relative__template_rel_path__file_name_with_directory(self): - """ - Test _find_relative(): template_rel_path attribute. - - """ - view = SampleView() - view.template_rel_path = 'foo/bar/template.txt' - self._assert_template_location(view, ('foo/bar', 'template.txt')) - - def test_find_relative__template_rel_directory(self): - """ - Test _find_relative(): template_rel_directory attribute. - - """ - view = SampleView() - view.template_rel_directory = 'foo' - - self._assert_template_location(view, ('foo', 'sample_view.mustache')) - - def test_find_relative__template_name(self): - """ - Test _find_relative(): template_name attribute. - - """ - view = SampleView() - view.template_name = 'new_name' - self._assert_template_location(view, (None, 'new_name.mustache')) - - def test_find_relative__template_extension(self): - """ - Test _find_relative(): template_extension attribute. - - """ - view = SampleView() - view.template_extension = 'txt' - self._assert_template_location(view, (None, 'sample_view.txt')) - - def test_find__with_directory(self): - """ - Test _find() with a view that has a directory specified. - - """ - loader = self._make_loader() - - view = SampleView() - view.template_rel_path = os.path.join('foo', 'bar.txt') - self.assertTrue(loader._find_relative(view)[0] is not None) - - actual = loader._find(view) - expected = os.path.join(DATA_DIR, 'foo', 'bar.txt') - - self.assertPaths(actual, expected) - - def test_find__without_directory(self): - """ - Test _find() with a view that doesn't have a directory specified. - - """ - loader = self._make_loader() - - view = SampleView() - self.assertTrue(loader._find_relative(view)[0] is None) - - actual = loader._find(view) - expected = os.path.join(DATA_DIR, 'sample_view.mustache') - - self.assertPaths(actual, expected) - - def _assert_get_template(self, custom, expected): - loader = self._make_loader() - actual = loader.load(custom) - - self.assertEqual(type(actual), unicode) - self.assertEqual(actual, expected) - - def test_get_template(self): - """ - Test get_template(): default behavior (no attributes set). - - """ - view = SampleView() - - self._assert_get_template(view, u"ascii: abc") - - def test_get_template__template_encoding(self): - """ - Test get_template(): template_encoding attribute. - - """ - view = NonAscii() - - self.assertRaises(UnicodeDecodeError, self._assert_get_template, view, 'foo') - - view.template_encoding = 'utf-8' - self._assert_get_template(view, u"non-ascii: é") diff --git a/third_party/python/requests-unixsocket/.travis.yml b/third_party/python/requests-unixsocket/.travis.yml new file mode 100644 index 000000000000..ec0465e2abcc --- /dev/null +++ b/third_party/python/requests-unixsocket/.travis.yml @@ -0,0 +1,17 @@ +language: python + +env: + - TOXENV=py26 + - TOXENV=py27 + - TOXENV=py33 + - TOXENV=py34 + - TOXENV=py35 + - TOXENV=pypy + - TOXENV=flake8 + - TOXENV=coverage + +install: + - travis_retry pip install tox + +script: + - tox diff --git a/third_party/python/requests_unixsocket/requests_unixsocket-0.2.0.dist-info/AUTHORS b/third_party/python/requests-unixsocket/AUTHORS similarity index 58% rename from third_party/python/requests_unixsocket/requests_unixsocket-0.2.0.dist-info/AUTHORS rename to third_party/python/requests-unixsocket/AUTHORS index 37da4b99b481..b9a817ac1687 100644 --- a/third_party/python/requests_unixsocket/requests_unixsocket-0.2.0.dist-info/AUTHORS +++ b/third_party/python/requests-unixsocket/AUTHORS @@ -1,11 +1,6 @@ -Aaron Gallagher <_@habnab.it> Ben Jackson -David Preece Esben Haabendal -Marc Abramowitz Marc Abramowitz -Marc Abramowitz -Ondřej Kobližel Tomaz Solc Will Rouesnel William Rouesnel diff --git a/third_party/python/requests-unixsocket/ChangeLog b/third_party/python/requests-unixsocket/ChangeLog new file mode 100644 index 000000000000..86e52f70e46c --- /dev/null +++ b/third_party/python/requests-unixsocket/ChangeLog @@ -0,0 +1,67 @@ +CHANGES +======= + +0.1.5 +----- + +* Fix test_unix_domain_adapter_connection_proxies_error +* .travis.yml tweaks +* Remove py32; Add py35 +* Only reject proxies if they are relevant (which should be never) +* Add urllib3 requirement +* Add basic tests for all supported methods +* More PEP8 compliance refactoring +* Fix up some oversights in method parsing +* Tweak a few things in PR 12 +* Make PEP8 compliant with autopep8 +* Improve the monkey-patching library to replicate requests more closely + +0.1.4 +----- + +* README.rst: Add PyPI badge +* Monkeypatch requests.request + +0.1.3 +----- + +* Fix #6 ("GET parameters stripped from URL") +* GH-7: Fallback to import from urllib3 + +0.1.2 +----- + +* Tweak monkeypatch code +* Move/expose testutils like UnixSocketServerThread +* Make monkeypatch url_scheme arg optional + +0.1.1 +----- + +* Remove :class: role from README.rst + +0.1.0 +----- + +* Doc tweaks +* Expose Session and monkeypatch +* Add Travis CI build badge +* Test Python 3.2 with tox and Travis CI +* Use threading.Event to less chance of race cond +* Add .travis.yml for Travis CI +* Change process => thread for test UnixSocketServer +* Make WSGIApp use server attribute for shutdown +* Use WSGIApp callable instead of closure +* In tests, try to gracefully kill waitress server +* Display text coverage report in tox coverage env +* Add test for proxies error +* Use b literal in test; fix py3 test failures +* tox.ini: Correct name of env pep8 => flake8 +* tox.ini: Rename pep8 => flake8 +* .gitignore: Add AUTHORS and ChangeLog +* Add pytest-pep8 +* Improve tests +* Yay, tests are passing +* .gitignore: Add .eggs/ for setuptools==7.0 +* Rename README.md -> README.rst +* Initial commit diff --git a/third_party/python/requests_unixsocket/requests_unixsocket-0.2.0.dist-info/LICENSE b/third_party/python/requests-unixsocket/LICENSE similarity index 100% rename from third_party/python/requests_unixsocket/requests_unixsocket-0.2.0.dist-info/LICENSE rename to third_party/python/requests-unixsocket/LICENSE diff --git a/third_party/python/requests-unixsocket/PKG-INFO b/third_party/python/requests-unixsocket/PKG-INFO new file mode 100644 index 000000000000..3c70b883469a --- /dev/null +++ b/third_party/python/requests-unixsocket/PKG-INFO @@ -0,0 +1,84 @@ +Metadata-Version: 1.1 +Name: requests-unixsocket +Version: 0.1.5 +Summary: Use requests to talk HTTP via a UNIX domain socket +Home-page: https://github.com/msabramo/requests-unixsocket +Author: Marc Abramowitz +Author-email: marc@marc-abramowitz.com +License: Apache-2 +Description: requests-unixsocket + =================== + + .. image:: https://pypip.in/version/requests-unixsocket/badge.svg?style=flat + :target: https://pypi.python.org/pypi/requests-unixsocket/ + :alt: Latest Version + + .. image:: https://travis-ci.org/msabramo/requests-unixsocket.svg?branch=master + :target: https://travis-ci.org/msabramo/requests-unixsocket + + Use `requests `_ to talk HTTP via a UNIX domain socket + + Usage + ----- + + Explicit + ++++++++ + + You can use it by instantiating a special ``Session`` object: + + .. code-block:: python + + import requests_unixsocket + + session = requests_unixsocket.Session() + + # Access /path/to/page from /tmp/profilesvc.sock + r = session.get('http+unix://%2Ftmp%2Fprofilesvc.sock/path/to/page') + assert r.status_code == 200 + + Implicit (monkeypatching) + +++++++++++++++++++++++++ + + Monkeypatching allows you to use the functionality in this module, while making + minimal changes to your code. Note that in the above example we had to + instantiate a special ``requests_unixsocket.Session`` object and call the + ``get`` method on that object. Calling ``requests.get(url)`` (the easiest way + to use requests and probably very common), would not work. But we can make it + work by doing monkeypatching. + + You can monkeypatch globally: + + .. code-block:: python + + import requests_unixsocket + + requests_unixsocket.monkeypatch() + + # Access /path/to/page from /tmp/profilesvc.sock + r = requests.get('http+unix://%2Ftmp%2Fprofilesvc.sock/path/to/page') + assert r.status_code == 200 + + or you can do it temporarily using a context manager: + + .. code-block:: python + + import requests_unixsocket + + with requests_unixsocket.monkeypatch(): + # Access /path/to/page from /tmp/profilesvc.sock + r = requests.get('http+unix://%2Ftmp%2Fprofilesvc.sock/path/to/page') + assert r.status_code == 200 + + +Platform: UNKNOWN +Classifier: Development Status :: 3 - Alpha +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: Information Technology +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 diff --git a/third_party/python/requests-unixsocket/README.rst b/third_party/python/requests-unixsocket/README.rst new file mode 100644 index 000000000000..d582f4c059aa --- /dev/null +++ b/third_party/python/requests-unixsocket/README.rst @@ -0,0 +1,62 @@ +requests-unixsocket +=================== + +.. image:: https://pypip.in/version/requests-unixsocket/badge.svg?style=flat + :target: https://pypi.python.org/pypi/requests-unixsocket/ + :alt: Latest Version + +.. image:: https://travis-ci.org/msabramo/requests-unixsocket.svg?branch=master + :target: https://travis-ci.org/msabramo/requests-unixsocket + +Use `requests `_ to talk HTTP via a UNIX domain socket + +Usage +----- + +Explicit +++++++++ + +You can use it by instantiating a special ``Session`` object: + +.. code-block:: python + + import requests_unixsocket + + session = requests_unixsocket.Session() + + # Access /path/to/page from /tmp/profilesvc.sock + r = session.get('http+unix://%2Ftmp%2Fprofilesvc.sock/path/to/page') + assert r.status_code == 200 + +Implicit (monkeypatching) ++++++++++++++++++++++++++ + +Monkeypatching allows you to use the functionality in this module, while making +minimal changes to your code. Note that in the above example we had to +instantiate a special ``requests_unixsocket.Session`` object and call the +``get`` method on that object. Calling ``requests.get(url)`` (the easiest way +to use requests and probably very common), would not work. But we can make it +work by doing monkeypatching. + +You can monkeypatch globally: + +.. code-block:: python + + import requests_unixsocket + + requests_unixsocket.monkeypatch() + + # Access /path/to/page from /tmp/profilesvc.sock + r = requests.get('http+unix://%2Ftmp%2Fprofilesvc.sock/path/to/page') + assert r.status_code == 200 + +or you can do it temporarily using a context manager: + +.. code-block:: python + + import requests_unixsocket + + with requests_unixsocket.monkeypatch(): + # Access /path/to/page from /tmp/profilesvc.sock + r = requests.get('http+unix://%2Ftmp%2Fprofilesvc.sock/path/to/page') + assert r.status_code == 200 diff --git a/third_party/python/requests-unixsocket/pytest.ini b/third_party/python/requests-unixsocket/pytest.ini new file mode 100644 index 000000000000..dd405b466915 --- /dev/null +++ b/third_party/python/requests-unixsocket/pytest.ini @@ -0,0 +1,2 @@ +[pytest] +addopts = --tb=short --pep8 diff --git a/third_party/python/requests_unixsocket/requests_unixsocket/__init__.py b/third_party/python/requests-unixsocket/requests_unixsocket/__init__.py similarity index 100% rename from third_party/python/requests_unixsocket/requests_unixsocket/__init__.py rename to third_party/python/requests-unixsocket/requests_unixsocket/__init__.py diff --git a/third_party/python/requests-unixsocket/requests_unixsocket/adapters.py b/third_party/python/requests-unixsocket/requests_unixsocket/adapters.py new file mode 100644 index 000000000000..8449b868c5c5 --- /dev/null +++ b/third_party/python/requests-unixsocket/requests_unixsocket/adapters.py @@ -0,0 +1,60 @@ +import socket + +from requests.adapters import HTTPAdapter +from requests.compat import urlparse, unquote +try: + from requests.packages.urllib3.connection import HTTPConnection + from requests.packages.urllib3.connectionpool import HTTPConnectionPool +except ImportError: + from urllib3.connection import HTTPConnection + from urllib3.connectionpool import HTTPConnectionPool + + +# The following was adapted from some code from docker-py +# https://github.com/docker/docker-py/blob/master/docker/unixconn/unixconn.py +class UnixHTTPConnection(HTTPConnection): + + def __init__(self, unix_socket_url, timeout=60): + """Create an HTTP connection to a unix domain socket + + :param unix_socket_url: A URL with a scheme of 'http+unix' and the + netloc is a percent-encoded path to a unix domain socket. E.g.: + 'http+unix://%2Ftmp%2Fprofilesvc.sock/status/pid' + """ + HTTPConnection.__init__(self, 'localhost', timeout=timeout) + self.unix_socket_url = unix_socket_url + self.timeout = timeout + + def connect(self): + sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) + sock.settimeout(self.timeout) + socket_path = unquote(urlparse(self.unix_socket_url).netloc) + sock.connect(socket_path) + self.sock = sock + + +class UnixHTTPConnectionPool(HTTPConnectionPool): + + def __init__(self, socket_path, timeout=60): + HTTPConnectionPool.__init__(self, 'localhost', timeout=timeout) + self.socket_path = socket_path + self.timeout = timeout + + def _new_conn(self): + return UnixHTTPConnection(self.socket_path, self.timeout) + + +class UnixAdapter(HTTPAdapter): + + def __init__(self, timeout=60): + super(UnixAdapter, self).__init__() + self.timeout = timeout + + def get_connection(self, socket_path, proxies=None): + proxies = proxies or {} + proxy = proxies.get(urlparse(socket_path.lower()).scheme) + + if proxy: + raise ValueError('%s does not support specifying proxies' + % self.__class__.__name__) + return UnixHTTPConnectionPool(socket_path, self.timeout) diff --git a/third_party/python/requests-unixsocket/requests_unixsocket/tests/test_requests_unixsocket.py b/third_party/python/requests-unixsocket/requests_unixsocket/tests/test_requests_unixsocket.py new file mode 100755 index 000000000000..34151b2b1823 --- /dev/null +++ b/third_party/python/requests-unixsocket/requests_unixsocket/tests/test_requests_unixsocket.py @@ -0,0 +1,121 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +"""Tests for requests_unixsocket""" + +import logging + +import pytest +import requests + +import requests_unixsocket +from requests_unixsocket.testutils import UnixSocketServerThread + + +logger = logging.getLogger(__name__) + + +def test_unix_domain_adapter_ok(): + with UnixSocketServerThread() as usock_thread: + session = requests_unixsocket.Session('http+unix://') + urlencoded_usock = requests.compat.quote_plus(usock_thread.usock) + url = 'http+unix://%s/path/to/page' % urlencoded_usock + + for method in ['get', 'post', 'head', 'patch', 'put', 'delete', + 'options']: + logger.debug('Calling session.%s(%r) ...', method, url) + r = getattr(session, method)(url) + logger.debug( + 'Received response: %r with text: %r and headers: %r', + r, r.text, r.headers) + assert r.status_code == 200 + assert r.headers['server'] == 'waitress' + assert r.headers['X-Transport'] == 'unix domain socket' + assert r.headers['X-Requested-Path'] == '/path/to/page' + assert r.headers['X-Socket-Path'] == usock_thread.usock + assert isinstance(r.connection, requests_unixsocket.UnixAdapter) + assert r.url == url + if method == 'head': + assert r.text == '' + else: + assert r.text == 'Hello world!' + + +def test_unix_domain_adapter_url_with_query_params(): + with UnixSocketServerThread() as usock_thread: + session = requests_unixsocket.Session('http+unix://') + urlencoded_usock = requests.compat.quote_plus(usock_thread.usock) + url = ('http+unix://%s' + '/containers/nginx/logs?timestamp=true' % urlencoded_usock) + + for method in ['get', 'post', 'head', 'patch', 'put', 'delete', + 'options']: + logger.debug('Calling session.%s(%r) ...', method, url) + r = getattr(session, method)(url) + logger.debug( + 'Received response: %r with text: %r and headers: %r', + r, r.text, r.headers) + assert r.status_code == 200 + assert r.headers['server'] == 'waitress' + assert r.headers['X-Transport'] == 'unix domain socket' + assert r.headers['X-Requested-Path'] == '/containers/nginx/logs' + assert r.headers['X-Requested-Query-String'] == 'timestamp=true' + assert r.headers['X-Socket-Path'] == usock_thread.usock + assert isinstance(r.connection, requests_unixsocket.UnixAdapter) + assert r.url == url + if method == 'head': + assert r.text == '' + else: + assert r.text == 'Hello world!' + + +def test_unix_domain_adapter_connection_error(): + session = requests_unixsocket.Session('http+unix://') + + for method in ['get', 'post', 'head', 'patch', 'put', 'delete', 'options']: + with pytest.raises(requests.ConnectionError): + getattr(session, method)( + 'http+unix://socket_does_not_exist/path/to/page') + + +def test_unix_domain_adapter_connection_proxies_error(): + session = requests_unixsocket.Session('http+unix://') + + for method in ['get', 'post', 'head', 'patch', 'put', 'delete', 'options']: + with pytest.raises(ValueError) as excinfo: + getattr(session, method)( + 'http+unix://socket_does_not_exist/path/to/page', + proxies={"http+unix": "http://10.10.1.10:1080"}) + assert ('UnixAdapter does not support specifying proxies' + in str(excinfo.value)) + + +def test_unix_domain_adapter_monkeypatch(): + with UnixSocketServerThread() as usock_thread: + with requests_unixsocket.monkeypatch('http+unix://'): + urlencoded_usock = requests.compat.quote_plus(usock_thread.usock) + url = 'http+unix://%s/path/to/page' % urlencoded_usock + + for method in ['get', 'post', 'head', 'patch', 'put', 'delete', + 'options']: + logger.debug('Calling session.%s(%r) ...', method, url) + r = getattr(requests, method)(url) + logger.debug( + 'Received response: %r with text: %r and headers: %r', + r, r.text, r.headers) + assert r.status_code == 200 + assert r.headers['server'] == 'waitress' + assert r.headers['X-Transport'] == 'unix domain socket' + assert r.headers['X-Requested-Path'] == '/path/to/page' + assert r.headers['X-Socket-Path'] == usock_thread.usock + assert isinstance(r.connection, + requests_unixsocket.UnixAdapter) + assert r.url == url + if method == 'head': + assert r.text == '' + else: + assert r.text == 'Hello world!' + + for method in ['get', 'post', 'head', 'patch', 'put', 'delete', 'options']: + with pytest.raises(requests.exceptions.InvalidSchema): + getattr(requests, method)(url) diff --git a/third_party/python/requests_unixsocket/requests_unixsocket/testutils.py b/third_party/python/requests-unixsocket/requests_unixsocket/testutils.py similarity index 100% rename from third_party/python/requests_unixsocket/requests_unixsocket/testutils.py rename to third_party/python/requests-unixsocket/requests_unixsocket/testutils.py diff --git a/third_party/python/requests-unixsocket/requirements.txt b/third_party/python/requests-unixsocket/requirements.txt new file mode 100644 index 000000000000..df0e6d638a06 --- /dev/null +++ b/third_party/python/requests-unixsocket/requirements.txt @@ -0,0 +1,2 @@ +requests>=1.1 +urllib3>=1.8 diff --git a/third_party/python/requests-unixsocket/setup.cfg b/third_party/python/requests-unixsocket/setup.cfg new file mode 100644 index 000000000000..1131013ee549 --- /dev/null +++ b/third_party/python/requests-unixsocket/setup.cfg @@ -0,0 +1,33 @@ +[metadata] +name = requests-unixsocket +author = Marc Abramowitz +author-email = marc@marc-abramowitz.com +summary = Use requests to talk HTTP via a UNIX domain socket +description-file = README.rst +license = Apache-2 +home-page = https://github.com/msabramo/requests-unixsocket +classifier = + Development Status :: 3 - Alpha + Intended Audience :: Developers + Intended Audience :: Information Technology + License :: OSI Approved :: Apache Software License + Operating System :: OS Independent + Programming Language :: Python + Programming Language :: Python :: 2 + Programming Language :: Python :: 2.7 + Programming Language :: Python :: 2.6 + Programming Language :: Python :: 3 + Programming Language :: Python :: 3.3 +test_suite = requests_unixsocket.tests + +[files] +packages = requests_unixsocket + +[wheel] +universal = 1 + +[egg_info] +tag_build = +tag_date = 0 +tag_svn_revision = 0 + diff --git a/third_party/python/requests-unixsocket/setup.py b/third_party/python/requests-unixsocket/setup.py new file mode 100755 index 000000000000..aa2d8a019471 --- /dev/null +++ b/third_party/python/requests-unixsocket/setup.py @@ -0,0 +1,8 @@ +#!/usr/bin/env python + +from setuptools import setup + +setup( + setup_requires=['pbr'], + pbr=True, +) diff --git a/third_party/python/requests-unixsocket/test-requirements.txt b/third_party/python/requests-unixsocket/test-requirements.txt new file mode 100644 index 000000000000..a10e005013b4 --- /dev/null +++ b/third_party/python/requests-unixsocket/test-requirements.txt @@ -0,0 +1,4 @@ +pytest +pytest-capturelog +pytest-pep8 +waitress diff --git a/third_party/python/requests-unixsocket/tox.ini b/third_party/python/requests-unixsocket/tox.ini new file mode 100644 index 000000000000..d061cb879e7f --- /dev/null +++ b/third_party/python/requests-unixsocket/tox.ini @@ -0,0 +1,48 @@ +[tox] +envlist = py26, py27, py33, py34, py35, pypy, flake8 + +[testenv] +commands = py.test {posargs:requests_unixsocket/tests} +deps = + -r{toxinidir}/requirements.txt + -r{toxinidir}/test-requirements.txt + +[testenv:flake8] +commands = flake8 +deps = + flake8 + {[testenv]deps} + +[testenv:venv] +commands = {posargs} + +[testenv:coverage] +commands = + coverage erase + coverage run --source requests_unixsocket -m py.test requests_unixsocket/tests + coverage report --show-missing + coverage html +deps = + coverage + {[testenv]deps} + +[testenv:doctest] +# note this only works under python 3 because of unicode literals +commands = + python -m doctest README.rst + +[testenv:sphinx-doctest] +# note this only works under python 3 because of unicode literals +commands = + mkdir build/sphinx/doctest + sphinx-build -b doctest docs build/sphinx/doctest +deps = + pbr + {[testenv]deps} + +[testenv:docs] +commands = python setup.py build_sphinx + +[flake8] +max_line_length = 79 +exclude = .git,.tox,dist,docs,*egg diff --git a/third_party/python/requests_unixsocket/requests_unixsocket-0.2.0.dist-info/METADATA b/third_party/python/requests_unixsocket/requests_unixsocket-0.2.0.dist-info/METADATA deleted file mode 100644 index 54234d6ab214..000000000000 --- a/third_party/python/requests_unixsocket/requests_unixsocket-0.2.0.dist-info/METADATA +++ /dev/null @@ -1,117 +0,0 @@ -Metadata-Version: 2.1 -Name: requests-unixsocket -Version: 0.2.0 -Summary: Use requests to talk HTTP via a UNIX domain socket -Home-page: https://github.com/msabramo/requests-unixsocket -Author: Marc Abramowitz -Author-email: marc@marc-abramowitz.com -License: Apache-2 -Platform: UNKNOWN -Classifier: Development Status :: 3 - Alpha -Classifier: Intended Audience :: Developers -Classifier: Intended Audience :: Information Technology -Classifier: License :: OSI Approved :: Apache Software License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Requires-Dist: requests (>=1.1) -Requires-Dist: urllib3 (>=1.8) - -requests-unixsocket -=================== - -.. image:: https://badge.fury.io/py/requests-unixsocket.svg - :target: https://badge.fury.io/py/requests-unixsocket - :alt: Latest Version on PyPI - -.. image:: https://travis-ci.org/msabramo/requests-unixsocket.svg?branch=master - :target: https://travis-ci.org/msabramo/requests-unixsocket - -Use `requests `_ to talk HTTP via a UNIX domain socket - -Usage ------ - -Explicit -++++++++ - -You can use it by instantiating a special ``Session`` object: - -.. code-block:: python - - import json - - import requests_unixsocket - - session = requests_unixsocket.Session() - - r = session.get('http+unix://%2Fvar%2Frun%2Fdocker.sock/info') - registry_config = r.json()['RegistryConfig'] - print(json.dumps(registry_config, indent=4)) - - -Implicit (monkeypatching) -+++++++++++++++++++++++++ - -Monkeypatching allows you to use the functionality in this module, while making -minimal changes to your code. Note that in the above example we had to -instantiate a special ``requests_unixsocket.Session`` object and call the -``get`` method on that object. Calling ``requests.get(url)`` (the easiest way -to use requests and probably very common), would not work. But we can make it -work by doing monkeypatching. - -You can monkeypatch globally: - -.. code-block:: python - - import requests_unixsocket - - requests_unixsocket.monkeypatch() - - r = requests.get('http+unix://%2Fvar%2Frun%2Fdocker.sock/info') - assert r.status_code == 200 - -or you can do it temporarily using a context manager: - -.. code-block:: python - - import requests_unixsocket - - with requests_unixsocket.monkeypatch(): - r = requests.get('http+unix://%2Fvar%2Frun%2Fdocker.sock/info') - assert r.status_code == 200 - - -Abstract namespace sockets -++++++++++++++++++++++++++ - -To connect to an `abstract namespace -socket `_ -(Linux only), prefix the name with a NULL byte (i.e.: `\0`) - e.g.: - -.. code-block:: python - - import requests_unixsocket - - session = requests_unixsocket.Session() - res = session.get('http+unix://\0test_socket/get') - print(res.text) - -For an example program that illustrates this, see -``examples/abstract_namespace.py`` in the git repo. Since abstract namespace -sockets are specific to Linux, the program will only work on Linux. - - -See also --------- - -- https://github.com/httpie/httpie-unixsocket - a plugin for `HTTPie `_ that allows you to interact with UNIX domain sockets - - - diff --git a/third_party/python/requests_unixsocket/requests_unixsocket-0.2.0.dist-info/RECORD b/third_party/python/requests_unixsocket/requests_unixsocket-0.2.0.dist-info/RECORD deleted file mode 100644 index f8fa773b835e..000000000000 --- a/third_party/python/requests_unixsocket/requests_unixsocket-0.2.0.dist-info/RECORD +++ /dev/null @@ -1,11 +0,0 @@ -requests_unixsocket/__init__.py,sha256=_SzyTnexKzhCfmtiIRbrDpLNpKYJuf01QyyNHw76bF0,2077 -requests_unixsocket/adapters.py,sha256=UhXkEErWss-D90cehJltXqPVxwey4RjVkEj7wq0FFgs,2727 -requests_unixsocket/testutils.py,sha256=knU4P4lvwvogbEqRAJ-X77ojHD2V5rTDcYaLat_JreQ,3093 -requests_unixsocket/tests/test_requests_unixsocket.py,sha256=FIDUuM8ZPhpDpiHxvff8fyqCdbJsCDeUa8mKrtSBcSM,5196 -requests_unixsocket-0.2.0.dist-info/AUTHORS,sha256=CAloaNwgMbpQp1CeYjcT6FeDSqSgUZMppfV8FdeFSmM,420 -requests_unixsocket-0.2.0.dist-info/LICENSE,sha256=y16Ofl9KOYjhBjwULGDcLfdWBfTEZRXnduOspt-XbhQ,11325 -requests_unixsocket-0.2.0.dist-info/METADATA,sha256=wUV1Z9UnYmcs95HR7JMQrvwgxxh4lieBuSmGufX9BU4,3545 -requests_unixsocket-0.2.0.dist-info/WHEEL,sha256=HX-v9-noUkyUoxyZ1PMSuS7auUxDAR4VBdoYLqD0xws,110 -requests_unixsocket-0.2.0.dist-info/pbr.json,sha256=b-kcBU2vW_AOypwYSXDMuitwk2Wo4MC57Y9Pnx5JfZk,47 -requests_unixsocket-0.2.0.dist-info/top_level.txt,sha256=Y1EEbvkeC5k8NXwoNkaqjeDlx2oDGfUJrbEubbBbjcc,20 -requests_unixsocket-0.2.0.dist-info/RECORD,, diff --git a/third_party/python/requests_unixsocket/requests_unixsocket-0.2.0.dist-info/pbr.json b/third_party/python/requests_unixsocket/requests_unixsocket-0.2.0.dist-info/pbr.json deleted file mode 100644 index 859fce145b2e..000000000000 --- a/third_party/python/requests_unixsocket/requests_unixsocket-0.2.0.dist-info/pbr.json +++ /dev/null @@ -1 +0,0 @@ -{"is_release": false, "git_version": "f4703e0"} \ No newline at end of file diff --git a/third_party/python/requests_unixsocket/requests_unixsocket-0.2.0.dist-info/top_level.txt b/third_party/python/requests_unixsocket/requests_unixsocket-0.2.0.dist-info/top_level.txt deleted file mode 100644 index 412903bd0581..000000000000 --- a/third_party/python/requests_unixsocket/requests_unixsocket-0.2.0.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -requests_unixsocket diff --git a/third_party/python/requests_unixsocket/requests_unixsocket/adapters.py b/third_party/python/requests_unixsocket/requests_unixsocket/adapters.py deleted file mode 100644 index a2c15642b14d..000000000000 --- a/third_party/python/requests_unixsocket/requests_unixsocket/adapters.py +++ /dev/null @@ -1,89 +0,0 @@ -import socket - -from requests.adapters import HTTPAdapter -from requests.compat import urlparse, unquote - -try: - import http.client as httplib -except ImportError: - import httplib - -try: - from requests.packages import urllib3 -except ImportError: - import urllib3 - - -# The following was adapted from some code from docker-py -# https://github.com/docker/docker-py/blob/master/docker/transport/unixconn.py -class UnixHTTPConnection(httplib.HTTPConnection, object): - - def __init__(self, unix_socket_url, timeout=60): - """Create an HTTP connection to a unix domain socket - - :param unix_socket_url: A URL with a scheme of 'http+unix' and the - netloc is a percent-encoded path to a unix domain socket. E.g.: - 'http+unix://%2Ftmp%2Fprofilesvc.sock/status/pid' - """ - super(UnixHTTPConnection, self).__init__('localhost', timeout=timeout) - self.unix_socket_url = unix_socket_url - self.timeout = timeout - self.sock = None - - def __del__(self): # base class does not have d'tor - if self.sock: - self.sock.close() - - def connect(self): - sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) - sock.settimeout(self.timeout) - socket_path = unquote(urlparse(self.unix_socket_url).netloc) - sock.connect(socket_path) - self.sock = sock - - -class UnixHTTPConnectionPool(urllib3.connectionpool.HTTPConnectionPool): - - def __init__(self, socket_path, timeout=60): - super(UnixHTTPConnectionPool, self).__init__( - 'localhost', timeout=timeout) - self.socket_path = socket_path - self.timeout = timeout - - def _new_conn(self): - return UnixHTTPConnection(self.socket_path, self.timeout) - - -class UnixAdapter(HTTPAdapter): - - def __init__(self, timeout=60, pool_connections=25): - super(UnixAdapter, self).__init__() - self.timeout = timeout - self.pools = urllib3._collections.RecentlyUsedContainer( - pool_connections, dispose_func=lambda p: p.close() - ) - super(UnixAdapter, self).__init__() - - def get_connection(self, url, proxies=None): - proxies = proxies or {} - proxy = proxies.get(urlparse(url.lower()).scheme) - - if proxy: - raise ValueError('%s does not support specifying proxies' - % self.__class__.__name__) - - with self.pools.lock: - pool = self.pools.get(url) - if pool: - return pool - - pool = UnixHTTPConnectionPool(url, self.timeout) - self.pools[url] = pool - - return pool - - def request_url(self, request, proxies): - return request.path_url - - def close(self): - self.pools.clear() diff --git a/third_party/python/requirements.in b/third_party/python/requirements.in index dab1b4a90bf3..4ace3e3ad99f 100644 --- a/third_party/python/requirements.in +++ b/third_party/python/requirements.in @@ -1,41 +1,47 @@ +# ONLY ADD PACKAGES USED BY PYTHON 3 TO THIS LIST! +# +# Python 2-only packages should be vendored manually by running: +# +# $ pip download == +# +# Then for the package and each dependency: +# +# $ pip hash .whl # verify the hash against the one on PyPi (e.g https://pypi.org/project///#files) +# $ unzip .whl -d +# $ echo > /VERSION +# $ hg add /VERSION +# +# Note `pip download` may return `tar.gz` files if there is no `.whl` (wheel) +# available. When downloading wheels, make sure that they are cross-platform. +# If not you may need to specify `--no-binary :,:` to get +# the source distribution instead for those particular packages. + attrs==19.1.0 blessings==1.7 -cbor2==4.0.1 compare-locales==8.1.0 cookies==2.2.1 coverage==5.1 -cram==0.7 distro==1.4.0 ecdsa==0.15 esprima==4.0.1 fluent.migrate==0.11 fluent.syntax==0.18.1 -funcsigs==1.0.2 glean_parser==3.6.0 jsmin==2.1.0 json-e==2.7.0 -mohawk==0.3.4 mozilla-version==0.3.4 pathspec==0.8 pip-tools==5.5.0 ply==3.10 pyasn1==0.4.8 -pyasn1-modules==0.2.8 -pylru==1.0.9 -pystache==0.5.4 pytest==3.6.2 python-hglib==2.4 pytoml==0.1.10 pyyaml==5.4.1 redo==2.0.3 requests==2.25.1 -requests-unixsocket==0.2.0 responses==0.10.6 -rsa==3.1.4 sentry-sdk==0.14.3 six==1.13.0 -slugid==1.0.7 -taskcluster==6.0.0 -taskcluster-urls==11.0.0 voluptuous==0.12.1 yamllint==1.23 diff --git a/third_party/python/requirements.txt b/third_party/python/requirements.txt index 616721a4fd79..aeaabadfb3e4 100644 --- a/third_party/python/requirements.txt +++ b/third_party/python/requirements.txt @@ -1,52 +1,7 @@ -aiohttp==3.7.4.post0 \ - --hash=sha256:02f46fc0e3c5ac58b80d4d56eb0a7c7d97fcef69ace9326289fb9f1955e65cfe \ - --hash=sha256:0563c1b3826945eecd62186f3f5c7d31abb7391fedc893b7e2b26303b5a9f3fe \ - --hash=sha256:114b281e4d68302a324dd33abb04778e8557d88947875cbf4e842c2c01a030c5 \ - --hash=sha256:14762875b22d0055f05d12abc7f7d61d5fd4fe4642ce1a249abdf8c700bf1fd8 \ - --hash=sha256:15492a6368d985b76a2a5fdd2166cddfea5d24e69eefed4630cbaae5c81d89bd \ - --hash=sha256:17c073de315745a1510393a96e680d20af8e67e324f70b42accbd4cb3315c9fb \ - --hash=sha256:209b4a8ee987eccc91e2bd3ac36adee0e53a5970b8ac52c273f7f8fd4872c94c \ - --hash=sha256:230a8f7e24298dea47659251abc0fd8b3c4e38a664c59d4b89cca7f6c09c9e87 \ - --hash=sha256:2e19413bf84934d651344783c9f5e22dee452e251cfd220ebadbed2d9931dbf0 \ - --hash=sha256:393f389841e8f2dfc86f774ad22f00923fdee66d238af89b70ea314c4aefd290 \ - --hash=sha256:3cf75f7cdc2397ed4442594b935a11ed5569961333d49b7539ea741be2cc79d5 \ - --hash=sha256:3d78619672183be860b96ed96f533046ec97ca067fd46ac1f6a09cd9b7484287 \ - --hash=sha256:40eced07f07a9e60e825554a31f923e8d3997cfc7fb31dbc1328c70826e04cde \ - --hash=sha256:493d3299ebe5f5a7c66b9819eacdcfbbaaf1a8e84911ddffcdc48888497afecf \ - --hash=sha256:4b302b45040890cea949ad092479e01ba25911a15e648429c7c5aae9650c67a8 \ - --hash=sha256:515dfef7f869a0feb2afee66b957cc7bbe9ad0cdee45aec7fdc623f4ecd4fb16 \ - --hash=sha256:547da6cacac20666422d4882cfcd51298d45f7ccb60a04ec27424d2f36ba3eaf \ - --hash=sha256:5df68496d19f849921f05f14f31bd6ef53ad4b00245da3195048c69934521809 \ - --hash=sha256:64322071e046020e8797117b3658b9c2f80e3267daec409b350b6a7a05041213 \ - --hash=sha256:7615dab56bb07bff74bc865307aeb89a8bfd9941d2ef9d817b9436da3a0ea54f \ - --hash=sha256:79ebfc238612123a713a457d92afb4096e2148be17df6c50fb9bf7a81c2f8013 \ - --hash=sha256:7b18b97cf8ee5452fa5f4e3af95d01d84d86d32c5e2bfa260cf041749d66360b \ - --hash=sha256:932bb1ea39a54e9ea27fc9232163059a0b8855256f4052e776357ad9add6f1c9 \ - --hash=sha256:a00bb73540af068ca7390e636c01cbc4f644961896fa9363154ff43fd37af2f5 \ - --hash=sha256:a5ca29ee66f8343ed336816c553e82d6cade48a3ad702b9ffa6125d187e2dedb \ - --hash=sha256:af9aa9ef5ba1fd5b8c948bb11f44891968ab30356d65fd0cc6707d989cd521df \ - --hash=sha256:bb437315738aa441251214dad17428cafda9cdc9729499f1d6001748e1d432f4 \ - --hash=sha256:bdb230b4943891321e06fc7def63c7aace16095be7d9cf3b1e01be2f10fba439 \ - --hash=sha256:c6e9dcb4cb338d91a73f178d866d051efe7c62a7166653a91e7d9fb18274058f \ - --hash=sha256:cffe3ab27871bc3ea47df5d8f7013945712c46a3cc5a95b6bee15887f1675c22 \ - --hash=sha256:d012ad7911653a906425d8473a1465caa9f8dea7fcf07b6d870397b774ea7c0f \ - --hash=sha256:d9e13b33afd39ddeb377eff2c1c4f00544e191e1d1dee5b6c51ddee8ea6f0cf5 \ - --hash=sha256:e4b2b334e68b18ac9817d828ba44d8fcb391f6acb398bcc5062b14b2cbeac970 \ - --hash=sha256:e54962802d4b8b18b6207d4a927032826af39395a3bd9196a5af43fc4e60b009 \ - --hash=sha256:f705e12750171c0ab4ef2a3c76b9a4024a62c4103e3a55dd6f99265b9bc6fcfc \ - --hash=sha256:f881853d2643a29e643609da57b96d5f9c9b93f62429dcc1cbb413c7d07f0e1a \ - --hash=sha256:fe60131d21b31fd1a14bd43e6bb88256f69dfc3188b3a89d736d6c71ed43ec95 - # via taskcluster appdirs==1.4.4 \ --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41 \ --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128 # via glean-parser -async-timeout==3.0.1 \ - --hash=sha256:0c3c816a028d47f659d6ff5c745cb2acf1f966da1fe5c19c77a70282b25f4c5f \ - --hash=sha256:4291ca197d287d274d0b6cb5d6f8f8f82d434ed288f962539ff18cc9012f9ea3 - # via - # aiohttp - # taskcluster atomicwrites==1.1.5 \ --hash=sha256:240831ea22da9ab882b551b31d4225591e5e447a68c5e188db5b89ca1d487585 \ --hash=sha256:a24da68318b08ac9c9c45029f4a10371ab5b20e4226738e150e6e7c571630ae6 @@ -56,7 +11,6 @@ attrs==19.1.0 \ --hash=sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399 # via # -r requirements-mach-vendor-python.in - # aiohttp # jsonschema # mozilla-version # pytest @@ -65,10 +19,6 @@ blessings==1.7 \ --hash=sha256:b1fdd7e7a675295630f9ae71527a8ebc10bfefa236b3d6aa4932ee4462c17ba3 \ --hash=sha256:caad5211e7ba5afe04367cdd4cfc68fa886e2e08f6f35e76b7387d2109ccea6e # via -r requirements-mach-vendor-python.in -cbor2==4.0.1 \ - --hash=sha256:b0eb916c9ea226aa81e9091607737475d5b0e5c314fe8d5a87179fba449cd190 \ - --hash=sha256:cee0d01e520563b5a73c72eace5c428bb68aefb1b3f7aee5d692d3af6a1e5172 - # via -r requirements-mach-vendor-python.in certifi==2018.4.16 \ --hash=sha256:13e698f54293db9f89122b0581843a782ad0934a4fe0172d2a980ba77fc61bb7 \ --hash=sha256:9fa520c1bacfb634fa7af20a76bcbd3d5fb390481724c597da32c719a7dca4b0 @@ -78,9 +28,7 @@ certifi==2018.4.16 \ chardet==4.0.0 \ --hash=sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa \ --hash=sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5 - # via - # aiohttp - # requests + # via requests click==7.0 \ --hash=sha256:2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13 \ --hash=sha256:5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7 @@ -130,10 +78,6 @@ coverage==5.1 \ --hash=sha256:e1ea316102ea1e1770724db01998d1603ed921c54a86a2efcb03428d5417e489 \ --hash=sha256:f90bfc4ad18450c80b024036eaf91e4a246ae287701aaa88eaebebf150868052 # via -r requirements-mach-vendor-python.in -cram==0.7 \ - --hash=sha256:008e4e8b4d325cf040964b5f62460535b004a7bc816d54f8527a4d299edfe4a3 \ - --hash=sha256:7da7445af2ce15b90aad5ec4792f857cef5786d71f14377e9eb994d8b8337f2f - # via -r requirements-mach-vendor-python.in diskcache==4.1.0 \ --hash=sha256:69b253a6ffe95bb4bafb483b97c24fca3c2c6c47b82e92b36486969a7e80d47d \ --hash=sha256:bcee5a59f9c264e2809e58d01be6569a3bbb1e36a1e0fb83f7ef9b2075f95ce0 @@ -159,24 +103,14 @@ fluent.syntax==0.18.1 \ # -r requirements-mach-vendor-python.in # compare-locales # fluent.migrate -funcsigs==1.0.2 \ - --hash=sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca \ - --hash=sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50 - # via -r requirements-mach-vendor-python.in glean_parser==3.6.0 \ --hash=sha256:1da46764d93ec563eb93235bd477814444742d1ee6b5231dc6684cebf46a6057 \ --hash=sha256:655a2038ec0be5364f351f592d5476fe7b028ebb0d9fd75f07c6c54287859552 # via -r requirements-mach-vendor-python.in -idna-ssl==1.1.0 \ - --hash=sha256:a933e3bb13da54383f9e8f35dc4f9cb9eb9b3b78c6b36f311254d6d0d92c6c7c - # via aiohttp idna==2.10 \ --hash=sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6 \ --hash=sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0 - # via - # idna-ssl - # requests - # yarl + # via requests importlib-metadata==3.10.1 \ --hash=sha256:2ec0faae539743ae6aaa84b49a169670a465f7f5d64e6add98388cc29fd1f2f6 \ --hash=sha256:c9356b657de65c53744046fa8f7358afe0714a1af7d570c00c3835c2d724a7c1 @@ -234,12 +168,6 @@ markupsafe==1.1.1 \ --hash=sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7 \ --hash=sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be # via jinja2 -mohawk==0.3.4 \ - --hash=sha256:b3f85ffa93a5c7d2f9cc591246ef9f8ac4a9fa716bfd5bae0377699a2d89d78c \ - --hash=sha256:e98b331d9fa9ece7b8be26094cbe2d57613ae882133cc755167268a984bc0ab3 - # via - # -r requirements-mach-vendor-python.in - # taskcluster more-itertools==4.3.0 \ --hash=sha256:c187a73da93e7a8acc0001572aebc7e3c69daf7bf6881a2cea10650bd4420092 \ --hash=sha256:c476b5d3a34e12d40130bc2f935028b5f636df8f372dc2c1c01dc19681b2039e \ @@ -249,47 +177,6 @@ mozilla-version==0.3.4 \ --hash=sha256:3ed4deb7a6fb25c83a5346ef4de08ddff9b2ddc4d16dd8fafb4a84978cc71255 \ --hash=sha256:ce5741c2e7d12c30b53de9f79e30d6ac2a8bd4c93be711d30c7a7a08e32a094f # via -r requirements-mach-vendor-python.in -multidict==5.1.0 \ - --hash=sha256:018132dbd8688c7a69ad89c4a3f39ea2f9f33302ebe567a879da8f4ca73f0d0a \ - --hash=sha256:051012ccee979b2b06be928a6150d237aec75dd6bf2d1eeeb190baf2b05abc93 \ - --hash=sha256:05c20b68e512166fddba59a918773ba002fdd77800cad9f55b59790030bab632 \ - --hash=sha256:07b42215124aedecc6083f1ce6b7e5ec5b50047afa701f3442054373a6deb656 \ - --hash=sha256:0e3c84e6c67eba89c2dbcee08504ba8644ab4284863452450520dad8f1e89b79 \ - --hash=sha256:0e929169f9c090dae0646a011c8b058e5e5fb391466016b39d21745b48817fd7 \ - --hash=sha256:1ab820665e67373de5802acae069a6a05567ae234ddb129f31d290fc3d1aa56d \ - --hash=sha256:25b4e5f22d3a37ddf3effc0710ba692cfc792c2b9edfb9c05aefe823256e84d5 \ - --hash=sha256:2e68965192c4ea61fff1b81c14ff712fc7dc15d2bd120602e4a3494ea6584224 \ - --hash=sha256:2f1a132f1c88724674271d636e6b7351477c27722f2ed789f719f9e3545a3d26 \ - --hash=sha256:37e5438e1c78931df5d3c0c78ae049092877e5e9c02dd1ff5abb9cf27a5914ea \ - --hash=sha256:3a041b76d13706b7fff23b9fc83117c7b8fe8d5fe9e6be45eee72b9baa75f348 \ - --hash=sha256:3a4f32116f8f72ecf2a29dabfb27b23ab7cdc0ba807e8459e59a93a9be9506f6 \ - --hash=sha256:46c73e09ad374a6d876c599f2328161bcd95e280f84d2060cf57991dec5cfe76 \ - --hash=sha256:46dd362c2f045095c920162e9307de5ffd0a1bfbba0a6e990b344366f55a30c1 \ - --hash=sha256:4b186eb7d6ae7c06eb4392411189469e6a820da81447f46c0072a41c748ab73f \ - --hash=sha256:54fd1e83a184e19c598d5e70ba508196fd0bbdd676ce159feb412a4a6664f952 \ - --hash=sha256:585fd452dd7782130d112f7ddf3473ffdd521414674c33876187e101b588738a \ - --hash=sha256:5cf3443199b83ed9e955f511b5b241fd3ae004e3cb81c58ec10f4fe47c7dce37 \ - --hash=sha256:6a4d5ce640e37b0efcc8441caeea8f43a06addace2335bd11151bc02d2ee31f9 \ - --hash=sha256:7df80d07818b385f3129180369079bd6934cf70469f99daaebfac89dca288359 \ - --hash=sha256:806068d4f86cb06af37cd65821554f98240a19ce646d3cd24e1c33587f313eb8 \ - --hash=sha256:830f57206cc96ed0ccf68304141fec9481a096c4d2e2831f311bde1c404401da \ - --hash=sha256:929006d3c2d923788ba153ad0de8ed2e5ed39fdbe8e7be21e2f22ed06c6783d3 \ - --hash=sha256:9436dc58c123f07b230383083855593550c4d301d2532045a17ccf6eca505f6d \ - --hash=sha256:9dd6e9b1a913d096ac95d0399bd737e00f2af1e1594a787e00f7975778c8b2bf \ - --hash=sha256:ace010325c787c378afd7f7c1ac66b26313b3344628652eacd149bdd23c68841 \ - --hash=sha256:b47a43177a5e65b771b80db71e7be76c0ba23cc8aa73eeeb089ed5219cdbe27d \ - --hash=sha256:b797515be8743b771aa868f83563f789bbd4b236659ba52243b735d80b29ed93 \ - --hash=sha256:b7993704f1a4b204e71debe6095150d43b2ee6150fa4f44d6d966ec356a8d61f \ - --hash=sha256:d5c65bdf4484872c4af3150aeebe101ba560dcfb34488d9a8ff8dbcd21079647 \ - --hash=sha256:d81eddcb12d608cc08081fa88d046c78afb1bf8107e6feab5d43503fea74a635 \ - --hash=sha256:dc862056f76443a0db4509116c5cd480fe1b6a2d45512a653f9a855cc0517456 \ - --hash=sha256:ecc771ab628ea281517e24fd2c52e8f31c41e66652d07599ad8818abaad38cda \ - --hash=sha256:f200755768dc19c6f4e2b672421e0ebb3dd54c38d5a4f262b872d8cfcc9e93b5 \ - --hash=sha256:f21756997ad8ef815d8ef3d34edd98804ab5ea337feedcd62fb52d22bf531281 \ - --hash=sha256:fc13a9524bc18b6fb6e0dbec3533ba0496bbed167c56d0aabefd965584557d80 - # via - # aiohttp - # yarl pathspec==0.8 \ --hash=sha256:7d91249d21749788d07a2d0f94147accd8f845507400749ea19c1ec9054a12b0 \ --hash=sha256:da45173eb3a6f2a5a487efba21f050af2b41948be6ab52b6a1e3ff22bb8b7061 @@ -312,26 +199,13 @@ py==1.10.0 \ --hash=sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3 \ --hash=sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a # via pytest -pyasn1-modules==0.2.8 \ - --hash=sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e \ - --hash=sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74 - # via -r requirements-mach-vendor-python.in pyasn1==0.4.8 \ --hash=sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d \ --hash=sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba - # via - # -r requirements-mach-vendor-python.in - # pyasn1-modules - # rsa -pylru==1.0.9 \ - --hash=sha256:71376192671f0ad1690b2a7427d39a29b1df994c8469a9b46b03ed7e28c0172c # via -r requirements-mach-vendor-python.in pyrsistent==0.16.0 \ --hash=sha256:28669905fe725965daa16184933676547c5bb40a5153055a8dee2a4bd7933ad3 # via jsonschema -pystache==0.5.4 \ - --hash=sha256:f7bbc265fb957b4d6c7c042b336563179444ab313fb93a719759111eabd3b85a - # via -r requirements-mach-vendor-python.in pytest==3.6.2 \ --hash=sha256:8ea01fc4fcc8e1b1e305252b4bc80a1528019ab99fd3b88666c9dc38d754406c \ --hash=sha256:90898786b3d0b880b47645bae7b51aa9bbf1e9d1e4510c2cfd15dd65c70ea0cd @@ -382,25 +256,16 @@ redo==2.0.3 \ --hash=sha256:36784bf8ae766e14f9db0e377ccfa02835d648321d2007b6ae0bf4fd612c0f94 \ --hash=sha256:71161cb0e928d824092a5f16203939bbc0867ce4c4685db263cf22c3ae7634a8 # via -r requirements-mach-vendor-python.in -requests-unixsocket==0.2.0 \ - --hash=sha256:014d07bfb66dc805a011a8b4b306cf4ec96d2eddb589f6b2b5765e626f0dc0cc \ - --hash=sha256:9e5c1a20afc3cf786197ae59c79bcdb0e7565f218f27df5f891307ee8817c1ea - # via -r requirements-mach-vendor-python.in requests==2.25.1 \ --hash=sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804 \ --hash=sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e # via # -r requirements-mach-vendor-python.in - # requests-unixsocket # responses - # taskcluster responses==0.10.6 \ --hash=sha256:502d9c0c8008439cfcdef7e251f507fcfdd503b56e8c0c87c3c3e3393953f790 \ --hash=sha256:97193c0183d63fba8cd3a041c75464e4b09ea0aff6328800d1546598567dde0b # via -r requirements-mach-vendor-python.in -rsa==3.1.4 \ - --hash=sha256:e2b0b05936c276b1edd2e1525553233b666df9e29b5c3ba223eed738277c82a0 - # via -r requirements-mach-vendor-python.in sentry-sdk==0.14.3 \ --hash=sha256:23808d571d2461a4ce3784ec12bbee5bdb8c026c143fe79d36cef8a6d653e71f \ --hash=sha256:bb90a4e19c7233a580715fc986cc44be2c48fc10b31e71580a2037e1c94b6950 @@ -415,43 +280,20 @@ six==1.13.0 \ # ecdsa # fluent.migrate # jsonschema - # mohawk # more-itertools # pyrsistent # pytest # responses - # taskcluster -slugid==1.0.7 \ - --hash=sha256:6dab3c7eef0bb423fb54cb7752e0f466ddd0ee495b78b763be60e8a27f69e779 - # via - # -r requirements-mach-vendor-python.in - # taskcluster -taskcluster-urls==11.0.0 \ - --hash=sha256:18dcaa9c2412d34ff6c78faca33f0dd8f2384e3f00a98d5832c62d6d664741f0 \ - --hash=sha256:2aceab7cf5b1948bc197f2e5e50c371aa48181ccd490b8bada00f1e3baf0c5cc \ - --hash=sha256:74bd2110b5daaebcec5e1d287bf137b61cb8cf6b2d8f5f2b74183e32bc4e7c87 - # via - # -r requirements-mach-vendor-python.in - # taskcluster -taskcluster==6.0.0 \ - --hash=sha256:48ecd4898c7928deddfb34cb1cfe2b2505c68416e6c503f8a7f3dd0572425e96 \ - --hash=sha256:6d5cf7bdbc09dc48b2d376b418b95c1c157a2d359c4b6b231c1fb14a323c0cc5 \ - --hash=sha256:e409fce7a72808e4f87dc7baca7a79d8b64d5c5045264b9e197c120cc40e219b - # via -r requirements-mach-vendor-python.in typing-extensions==3.7.4.3 \ --hash=sha256:7cb407020f00f7bfc3cb3e7881628838e69d8f3fcab2f64742a5e76b2f841918 \ --hash=sha256:99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c \ --hash=sha256:dafc7639cde7f1b6e1acc0f457842a83e722ccca8eef5270af2d74792619a89f - # via - # aiohttp - # importlib-metadata - # yarl + # via importlib-metadata urllib3==1.25.9 \ --hash=sha256:3018294ebefce6572a474f0604c2021e33b3fd8006ecd11d62107a5d2a963527 \ --hash=sha256:88206b0eb87e6d677d424843ac5209e3fb9d0190d0ee169599165ec25e9d9115 # via # requests - # requests-unixsocket # sentry-sdk voluptuous==0.12.1 \ --hash=sha256:663572419281ddfaf4b4197fd4942d181630120fb39b333e3adad70aeb56444b \ @@ -463,45 +305,6 @@ yamllint==1.23 \ # via # -r requirements-mach-vendor-python.in # glean-parser -yarl==1.6.3 \ - --hash=sha256:00d7ad91b6583602eb9c1d085a2cf281ada267e9a197e8b7cae487dadbfa293e \ - --hash=sha256:0355a701b3998dcd832d0dc47cc5dedf3874f966ac7f870e0f3a6788d802d434 \ - --hash=sha256:15263c3b0b47968c1d90daa89f21fcc889bb4b1aac5555580d74565de6836366 \ - --hash=sha256:2ce4c621d21326a4a5500c25031e102af589edb50c09b321049e388b3934eec3 \ - --hash=sha256:31ede6e8c4329fb81c86706ba8f6bf661a924b53ba191b27aa5fcee5714d18ec \ - --hash=sha256:324ba3d3c6fee56e2e0b0d09bf5c73824b9f08234339d2b788af65e60040c959 \ - --hash=sha256:329412812ecfc94a57cd37c9d547579510a9e83c516bc069470db5f75684629e \ - --hash=sha256:4736eaee5626db8d9cda9eb5282028cc834e2aeb194e0d8b50217d707e98bb5c \ - --hash=sha256:4953fb0b4fdb7e08b2f3b3be80a00d28c5c8a2056bb066169de00e6501b986b6 \ - --hash=sha256:4c5bcfc3ed226bf6419f7a33982fb4b8ec2e45785a0561eb99274ebbf09fdd6a \ - --hash=sha256:547f7665ad50fa8563150ed079f8e805e63dd85def6674c97efd78eed6c224a6 \ - --hash=sha256:5b883e458058f8d6099e4420f0cc2567989032b5f34b271c0827de9f1079a424 \ - --hash=sha256:63f90b20ca654b3ecc7a8d62c03ffa46999595f0167d6450fa8383bab252987e \ - --hash=sha256:68dc568889b1c13f1e4745c96b931cc94fdd0defe92a72c2b8ce01091b22e35f \ - --hash=sha256:69ee97c71fee1f63d04c945f56d5d726483c4762845400a6795a3b75d56b6c50 \ - --hash=sha256:6d6283d8e0631b617edf0fd726353cb76630b83a089a40933043894e7f6721e2 \ - --hash=sha256:72a660bdd24497e3e84f5519e57a9ee9220b6f3ac4d45056961bf22838ce20cc \ - --hash=sha256:73494d5b71099ae8cb8754f1df131c11d433b387efab7b51849e7e1e851f07a4 \ - --hash=sha256:7356644cbed76119d0b6bd32ffba704d30d747e0c217109d7979a7bc36c4d970 \ - --hash=sha256:8a9066529240171b68893d60dca86a763eae2139dd42f42106b03cf4b426bf10 \ - --hash=sha256:8aa3decd5e0e852dc68335abf5478a518b41bf2ab2f330fe44916399efedfae0 \ - --hash=sha256:97b5bdc450d63c3ba30a127d018b866ea94e65655efaf889ebeabc20f7d12406 \ - --hash=sha256:9ede61b0854e267fd565e7527e2f2eb3ef8858b301319be0604177690e1a3896 \ - --hash=sha256:b2e9a456c121e26d13c29251f8267541bd75e6a1ccf9e859179701c36a078643 \ - --hash=sha256:b5dfc9a40c198334f4f3f55880ecf910adebdcb2a0b9a9c23c9345faa9185721 \ - --hash=sha256:bafb450deef6861815ed579c7a6113a879a6ef58aed4c3a4be54400ae8871478 \ - --hash=sha256:c49ff66d479d38ab863c50f7bb27dee97c6627c5fe60697de15529da9c3de724 \ - --hash=sha256:ce3beb46a72d9f2190f9e1027886bfc513702d748047b548b05dab7dfb584d2e \ - --hash=sha256:d26608cf178efb8faa5ff0f2d2e77c208f471c5a3709e577a7b3fd0445703ac8 \ - --hash=sha256:d597767fcd2c3dc49d6eea360c458b65643d1e4dbed91361cf5e36e53c1f8c96 \ - --hash=sha256:d5c32c82990e4ac4d8150fd7652b972216b204de4e83a122546dce571c1bdf25 \ - --hash=sha256:d8d07d102f17b68966e2de0e07bfd6e139c7c02ef06d3a0f8d2f0f055e13bb76 \ - --hash=sha256:e46fba844f4895b36f4c398c5af062a9808d1f26b2999c58909517384d5deda2 \ - --hash=sha256:e6b5460dc5ad42ad2b36cca524491dfcaffbfd9c8df50508bddc354e787b8dc2 \ - --hash=sha256:f040bcc6725c821a4c0665f3aa96a4d0805a7aaf2caf266d256b8ed71b9f041c \ - --hash=sha256:f0b059678fd549c66b89bed03efcabb009075bd131c248ecdf087bdb6faba24a \ - --hash=sha256:fcbb48a93e8699eae920f8d92f7160c03567b421bc17362a9ffbbd706a816f71 - # via aiohttp zipp==3.4.1 \ --hash=sha256:3607921face881ba3e026887d8150cca609d517579abe052ac81fc5aeffdbd76 \ --hash=sha256:51cb66cc54621609dd593d1787f286ee42a5c0adbb4b29abea5a63edc3e03098 diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.1-py2.py3-none-any/importlib_resources/py.typed b/third_party/python/rsa/tests/__init__.py similarity index 100% rename from third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.1-py2.py3-none-any/importlib_resources/py.typed rename to third_party/python/rsa/tests/__init__.py diff --git a/third_party/python/rsa/tests/constants.py b/third_party/python/rsa/tests/constants.py new file mode 100644 index 000000000000..6a0d08183689 --- /dev/null +++ b/third_party/python/rsa/tests/constants.py @@ -0,0 +1,9 @@ +# -*- coding: utf-8 -*- + +from rsa._compat import have_python3 + +if have_python3: + from py3kconstants import * +else: + from py2kconstants import * + diff --git a/third_party/python/rsa/tests/py2kconstants.py b/third_party/python/rsa/tests/py2kconstants.py new file mode 100644 index 000000000000..5f695dd22792 --- /dev/null +++ b/third_party/python/rsa/tests/py2kconstants.py @@ -0,0 +1,3 @@ +# -*- coding: utf-8 -*- + +unicode_string = u"Euro=\u20ac ABCDEFGHIJKLMNOPQRSTUVWXYZ" diff --git a/third_party/python/rsa/tests/py3kconstants.py b/third_party/python/rsa/tests/py3kconstants.py new file mode 100644 index 000000000000..83b67129c9bc --- /dev/null +++ b/third_party/python/rsa/tests/py3kconstants.py @@ -0,0 +1,3 @@ +# -*- coding: utf-8 -*- + +unicode_string = "Euro=\u20ac ABCDEFGHIJKLMNOPQRSTUVWXYZ" diff --git a/third_party/python/rsa/tests/test_bigfile.py b/third_party/python/rsa/tests/test_bigfile.py new file mode 100644 index 000000000000..86bcbbac6f4d --- /dev/null +++ b/third_party/python/rsa/tests/test_bigfile.py @@ -0,0 +1,60 @@ +'''Tests block operations.''' +from rsa._compat import b + +try: + from StringIO import StringIO as BytesIO +except ImportError: + from io import BytesIO +import unittest2 + +import rsa +from rsa import bigfile, varblock, pkcs1 + +class BigfileTest(unittest2.TestCase): + + def test_encrypt_decrypt_bigfile(self): + + # Expected block size + 11 bytes padding + pub_key, priv_key = rsa.newkeys((6 + 11) * 8) + + # Encrypt the file + message = b('123456Sybren') + infile = BytesIO(message) + outfile = BytesIO() + + bigfile.encrypt_bigfile(infile, outfile, pub_key) + + # Test + crypto = outfile.getvalue() + + cryptfile = BytesIO(crypto) + clearfile = BytesIO() + + bigfile.decrypt_bigfile(cryptfile, clearfile, priv_key) + self.assertEquals(clearfile.getvalue(), message) + + # We have 2x6 bytes in the message, so that should result in two + # bigfile. + cryptfile.seek(0) + varblocks = list(varblock.yield_varblocks(cryptfile)) + self.assertEqual(2, len(varblocks)) + + + def test_sign_verify_bigfile(self): + + # Large enough to store MD5-sum and ASN.1 code for MD5 + pub_key, priv_key = rsa.newkeys((34 + 11) * 8) + + # Sign the file + msgfile = BytesIO(b('123456Sybren')) + signature = pkcs1.sign(msgfile, priv_key, 'MD5') + + # Check the signature + msgfile.seek(0) + self.assertTrue(pkcs1.verify(msgfile, signature, pub_key)) + + # Alter the message, re-check + msgfile = BytesIO(b('123456sybren')) + self.assertRaises(pkcs1.VerificationError, + pkcs1.verify, msgfile, signature, pub_key) + diff --git a/third_party/python/rsa/tests/test_common.py b/third_party/python/rsa/tests/test_common.py new file mode 100644 index 000000000000..d105dc020f15 --- /dev/null +++ b/third_party/python/rsa/tests/test_common.py @@ -0,0 +1,61 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import unittest2 +import struct +from rsa._compat import byte, b +from rsa.common import byte_size, bit_size, _bit_size + + +class Test_byte(unittest2.TestCase): + def test_values(self): + self.assertEqual(byte(0), b('\x00')) + self.assertEqual(byte(255), b('\xff')) + + def test_struct_error_when_out_of_bounds(self): + self.assertRaises(struct.error, byte, 256) + self.assertRaises(struct.error, byte, -1) + +class Test_byte_size(unittest2.TestCase): + def test_values(self): + self.assertEqual(byte_size(1 << 1023), 128) + self.assertEqual(byte_size((1 << 1024) - 1), 128) + self.assertEqual(byte_size(1 << 1024), 129) + self.assertEqual(byte_size(255), 1) + self.assertEqual(byte_size(256), 2) + self.assertEqual(byte_size(0xffff), 2) + self.assertEqual(byte_size(0xffffff), 3) + self.assertEqual(byte_size(0xffffffff), 4) + self.assertEqual(byte_size(0xffffffffff), 5) + self.assertEqual(byte_size(0xffffffffffff), 6) + self.assertEqual(byte_size(0xffffffffffffff), 7) + self.assertEqual(byte_size(0xffffffffffffffff), 8) + + def test_zero(self): + self.assertEqual(byte_size(0), 1) + + def test_bad_type(self): + self.assertRaises(TypeError, byte_size, []) + self.assertRaises(TypeError, byte_size, ()) + self.assertRaises(TypeError, byte_size, dict()) + self.assertRaises(TypeError, byte_size, "") + self.assertRaises(TypeError, byte_size, None) + +class Test_bit_size(unittest2.TestCase): + def test_zero(self): + self.assertEqual(bit_size(0), 0) + + def test_values(self): + self.assertEqual(bit_size(1023), 10) + self.assertEqual(bit_size(1024), 11) + self.assertEqual(bit_size(1025), 11) + self.assertEqual(bit_size(1 << 1024), 1025) + self.assertEqual(bit_size((1 << 1024) + 1), 1025) + self.assertEqual(bit_size((1 << 1024) - 1), 1024) + + self.assertEqual(_bit_size(1023), 10) + self.assertEqual(_bit_size(1024), 11) + self.assertEqual(_bit_size(1025), 11) + self.assertEqual(_bit_size(1 << 1024), 1025) + self.assertEqual(_bit_size((1 << 1024) + 1), 1025) + self.assertEqual(_bit_size((1 << 1024) - 1), 1024) diff --git a/third_party/python/rsa/tests/test_compat.py b/third_party/python/rsa/tests/test_compat.py new file mode 100644 index 000000000000..3652c82d5167 --- /dev/null +++ b/third_party/python/rsa/tests/test_compat.py @@ -0,0 +1,17 @@ +# -*- coding: utf-8 -*- + +import unittest2 +import struct + +from rsa._compat import is_bytes, byte + +class Test_byte(unittest2.TestCase): + def test_byte(self): + for i in range(256): + byt = byte(i) + self.assertTrue(is_bytes(byt)) + self.assertEqual(ord(byt), i) + + def test_raises_StructError_on_overflow(self): + self.assertRaises(struct.error, byte, 256) + self.assertRaises(struct.error, byte, -1) diff --git a/third_party/python/rsa/tests/test_integers.py b/third_party/python/rsa/tests/test_integers.py new file mode 100644 index 000000000000..0a712aa0fc15 --- /dev/null +++ b/third_party/python/rsa/tests/test_integers.py @@ -0,0 +1,36 @@ +'''Tests integer operations.''' + +import unittest2 + +import rsa.core + +class IntegerTest(unittest2.TestCase): + + def setUp(self): + (self.pub, self.priv) = rsa.newkeys(64) + + def test_enc_dec(self): + + message = 42 + print("\tMessage: %d" % message) + + encrypted = rsa.core.encrypt_int(message, self.pub.e, self.pub.n) + print("\tEncrypted: %d" % encrypted) + + decrypted = rsa.core.decrypt_int(encrypted, self.priv.d, self.pub.n) + print("\tDecrypted: %d" % decrypted) + + self.assertEqual(message, decrypted) + + def test_sign_verify(self): + + message = 42 + + signed = rsa.core.encrypt_int(message,self.priv.d, self.pub.n) + print("\tSigned: %d" % signed) + + verified = rsa.core.decrypt_int(signed, self.pub.e,self.pub.n) + print("\tVerified: %d" % verified) + + self.assertEqual(message, verified) + diff --git a/third_party/python/rsa/tests/test_load_save_keys.py b/third_party/python/rsa/tests/test_load_save_keys.py new file mode 100644 index 000000000000..fc1a1aaae70e --- /dev/null +++ b/third_party/python/rsa/tests/test_load_save_keys.py @@ -0,0 +1,127 @@ +'''Unittest for saving and loading keys.''' + +import base64 +import unittest2 +from rsa._compat import b + +import rsa.key + +B64PRIV_DER = b('MC4CAQACBQDeKYlRAgMBAAECBQDHn4npAgMA/icCAwDfxwIDANcXAgInbwIDAMZt') +PRIVATE_DER = base64.decodestring(B64PRIV_DER) + +B64PUB_DER = b('MAwCBQDeKYlRAgMBAAE=') +PUBLIC_DER = base64.decodestring(B64PUB_DER) + +PRIVATE_PEM = b(''' +-----BEGIN CONFUSING STUFF----- +Cruft before the key + +-----BEGIN RSA PRIVATE KEY----- +Comment: something blah + +%s +-----END RSA PRIVATE KEY----- + +Stuff after the key +-----END CONFUSING STUFF----- +''' % B64PRIV_DER.decode("utf-8")) + +CLEAN_PRIVATE_PEM = b('''\ +-----BEGIN RSA PRIVATE KEY----- +%s +-----END RSA PRIVATE KEY----- +''' % B64PRIV_DER.decode("utf-8")) + +PUBLIC_PEM = b(''' +-----BEGIN CONFUSING STUFF----- +Cruft before the key + +-----BEGIN RSA PUBLIC KEY----- +Comment: something blah + +%s +-----END RSA PUBLIC KEY----- + +Stuff after the key +-----END CONFUSING STUFF----- +''' % B64PUB_DER.decode("utf-8")) + +CLEAN_PUBLIC_PEM = b('''\ +-----BEGIN RSA PUBLIC KEY----- +%s +-----END RSA PUBLIC KEY----- +''' % B64PUB_DER.decode("utf-8")) + + +class DerTest(unittest2.TestCase): + '''Test saving and loading DER keys.''' + + def test_load_private_key(self): + '''Test loading private DER keys.''' + + key = rsa.key.PrivateKey.load_pkcs1(PRIVATE_DER, 'DER') + expected = rsa.key.PrivateKey(3727264081, 65537, 3349121513, 65063, 57287) + + self.assertEqual(expected, key) + + def test_save_private_key(self): + '''Test saving private DER keys.''' + + key = rsa.key.PrivateKey(3727264081, 65537, 3349121513, 65063, 57287) + der = key.save_pkcs1('DER') + + self.assertEqual(PRIVATE_DER, der) + + def test_load_public_key(self): + '''Test loading public DER keys.''' + + key = rsa.key.PublicKey.load_pkcs1(PUBLIC_DER, 'DER') + expected = rsa.key.PublicKey(3727264081, 65537) + + self.assertEqual(expected, key) + + def test_save_public_key(self): + '''Test saving public DER keys.''' + + key = rsa.key.PublicKey(3727264081, 65537) + der = key.save_pkcs1('DER') + + self.assertEqual(PUBLIC_DER, der) + +class PemTest(unittest2.TestCase): + '''Test saving and loading PEM keys.''' + + + def test_load_private_key(self): + '''Test loading private PEM files.''' + + key = rsa.key.PrivateKey.load_pkcs1(PRIVATE_PEM, 'PEM') + expected = rsa.key.PrivateKey(3727264081, 65537, 3349121513, 65063, 57287) + + self.assertEqual(expected, key) + + def test_save_private_key(self): + '''Test saving private PEM files.''' + + key = rsa.key.PrivateKey(3727264081, 65537, 3349121513, 65063, 57287) + pem = key.save_pkcs1('PEM') + + self.assertEqual(CLEAN_PRIVATE_PEM, pem) + + def test_load_public_key(self): + '''Test loading public PEM files.''' + + key = rsa.key.PublicKey.load_pkcs1(PUBLIC_PEM, 'PEM') + expected = rsa.key.PublicKey(3727264081, 65537) + + self.assertEqual(expected, key) + + def test_save_public_key(self): + '''Test saving public PEM files.''' + + key = rsa.key.PublicKey(3727264081, 65537) + pem = key.save_pkcs1('PEM') + + self.assertEqual(CLEAN_PUBLIC_PEM, pem) + + diff --git a/third_party/python/rsa/tests/test_pem.py b/third_party/python/rsa/tests/test_pem.py new file mode 100644 index 000000000000..867f678a0e07 --- /dev/null +++ b/third_party/python/rsa/tests/test_pem.py @@ -0,0 +1,14 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + + +import unittest2 +from rsa._compat import b +from rsa.pem import _markers + + +class Test__markers(unittest2.TestCase): + def test_values(self): + self.assertEqual(_markers('RSA PRIVATE KEY'), + (b('-----BEGIN RSA PRIVATE KEY-----'), + b('-----END RSA PRIVATE KEY-----'))) diff --git a/third_party/python/rsa/tests/test_pkcs1.py b/third_party/python/rsa/tests/test_pkcs1.py new file mode 100644 index 000000000000..d5882dfd1b4a --- /dev/null +++ b/third_party/python/rsa/tests/test_pkcs1.py @@ -0,0 +1,94 @@ +'''Tests string operations.''' + +import struct +import unittest2 + +import rsa +from rsa import pkcs1 +from rsa._compat import byte, is_integer, b, is_bytes + +class BinaryTest(unittest2.TestCase): + + def setUp(self): + (self.pub, self.priv) = rsa.newkeys(256) + + def test_enc_dec(self): + + message = struct.pack('>IIII', 0, 0, 0, 1) + print("\tMessage: %r" % message) + + encrypted = pkcs1.encrypt(message, self.pub) + print("\tEncrypted: %r" % encrypted) + + decrypted = pkcs1.decrypt(encrypted, self.priv) + print("\tDecrypted: %r" % decrypted) + + self.assertEqual(message, decrypted) + + def test_decoding_failure(self): + + message = struct.pack('>IIII', 0, 0, 0, 1) + encrypted = pkcs1.encrypt(message, self.pub) + + # Alter the encrypted stream + a = encrypted[5] + if is_bytes(a): + a = ord(a) + encrypted = encrypted[:5] + byte(a + 1) + encrypted[6:] + + self.assertRaises(pkcs1.DecryptionError, pkcs1.decrypt, encrypted, + self.priv) + + def test_randomness(self): + '''Encrypting the same message twice should result in different + cryptos. + ''' + + message = struct.pack('>IIII', 0, 0, 0, 1) + encrypted1 = pkcs1.encrypt(message, self.pub) + encrypted2 = pkcs1.encrypt(message, self.pub) + + self.assertNotEqual(encrypted1, encrypted2) + +class SignatureTest(unittest2.TestCase): + + def setUp(self): + (self.pub, self.priv) = rsa.newkeys(512) + + def test_sign_verify(self): + '''Test happy flow of sign and verify''' + + message = b('je moeder') + print("\tMessage: %r" % message) + + signature = pkcs1.sign(message, self.priv, 'SHA-256') + print("\tSignature: %r" % signature) + + self.assertTrue(pkcs1.verify(message, signature, self.pub)) + + def test_alter_message(self): + '''Altering the message should let the verification fail.''' + + signature = pkcs1.sign(b('je moeder'), self.priv, 'SHA-256') + self.assertRaises(pkcs1.VerificationError, pkcs1.verify, + b('mijn moeder'), signature, self.pub) + + def test_sign_different_key(self): + '''Signing with another key should let the verification fail.''' + + (otherpub, _) = rsa.newkeys(512) + + message = b('je moeder') + signature = pkcs1.sign(message, self.priv, 'SHA-256') + self.assertRaises(pkcs1.VerificationError, pkcs1.verify, + message, signature, otherpub) + + def test_multiple_signings(self): + '''Signing the same message twice should return the same signatures.''' + + message = struct.pack('>IIII', 0, 0, 0, 1) + signature1 = pkcs1.sign(message, self.priv, 'SHA-1') + signature2 = pkcs1.sign(message, self.priv, 'SHA-1') + + self.assertEqual(signature1, signature2) + diff --git a/third_party/python/rsa/tests/test_strings.py b/third_party/python/rsa/tests/test_strings.py new file mode 100644 index 000000000000..4af06291d417 --- /dev/null +++ b/third_party/python/rsa/tests/test_strings.py @@ -0,0 +1,28 @@ +'''Tests string operations.''' + +from __future__ import absolute_import + +import unittest2 + +import rsa + +from constants import unicode_string + +class StringTest(unittest2.TestCase): + + def setUp(self): + (self.pub, self.priv) = rsa.newkeys(384) + + def test_enc_dec(self): + + message = unicode_string.encode('utf-8') + print("\tMessage: %s" % message) + + encrypted = rsa.encrypt(message, self.pub) + print("\tEncrypted: %s" % encrypted) + + decrypted = rsa.decrypt(encrypted, self.priv) + print("\tDecrypted: %s" % decrypted) + + self.assertEqual(message, decrypted) + diff --git a/third_party/python/rsa/tests/test_transform.py b/third_party/python/rsa/tests/test_transform.py new file mode 100644 index 000000000000..ffd9ec892a81 --- /dev/null +++ b/third_party/python/rsa/tests/test_transform.py @@ -0,0 +1,67 @@ +# -*- coding: utf-8 -*- + + +import unittest2 +from rsa._compat import b +from rsa.transform import int2bytes, bytes2int, _int2bytes + + +class Test_int2bytes(unittest2.TestCase): + def test_accuracy(self): + self.assertEqual(int2bytes(123456789), b('\x07[\xcd\x15')) + self.assertEqual(_int2bytes(123456789), b('\x07[\xcd\x15')) + + def test_codec_identity(self): + self.assertEqual(bytes2int(int2bytes(123456789, 128)), 123456789) + self.assertEqual(bytes2int(_int2bytes(123456789, 128)), 123456789) + + def test_chunk_size(self): + self.assertEqual(int2bytes(123456789, 6), b('\x00\x00\x07[\xcd\x15')) + self.assertEqual(int2bytes(123456789, 7), + b('\x00\x00\x00\x07[\xcd\x15')) + + self.assertEqual(_int2bytes(123456789, 6), + b('\x00\x00\x07[\xcd\x15')) + self.assertEqual(_int2bytes(123456789, 7), + b('\x00\x00\x00\x07[\xcd\x15')) + + def test_zero(self): + self.assertEqual(int2bytes(0, 4), b('\x00') * 4) + self.assertEqual(int2bytes(0, 7), b('\x00') * 7) + self.assertEqual(int2bytes(0), b('\x00')) + + self.assertEqual(_int2bytes(0, 4), b('\x00') * 4) + self.assertEqual(_int2bytes(0, 7), b('\x00') * 7) + self.assertEqual(_int2bytes(0), b('\x00')) + + def test_correctness_against_base_implementation(self): + # Slow test. + values = [ + 1 << 512, + 1 << 8192, + 1 << 77, + ] + for value in values: + self.assertEqual(int2bytes(value), _int2bytes(value), + "Boom %d" % value) + self.assertEqual(bytes2int(int2bytes(value)), + value, + "Boom %d" % value) + self.assertEqual(bytes2int(_int2bytes(value)), + value, + "Boom %d" % value) + + def test_raises_OverflowError_when_chunk_size_is_insufficient(self): + self.assertRaises(OverflowError, int2bytes, 123456789, 3) + self.assertRaises(OverflowError, int2bytes, 299999999999, 4) + + self.assertRaises(OverflowError, _int2bytes, 123456789, 3) + self.assertRaises(OverflowError, _int2bytes, 299999999999, 4) + + def test_raises_ValueError_when_negative_integer(self): + self.assertRaises(ValueError, int2bytes, -1) + self.assertRaises(ValueError, _int2bytes, -1) + + def test_raises_TypeError_when_not_integer(self): + self.assertRaises(TypeError, int2bytes, None) + self.assertRaises(TypeError, _int2bytes, None) diff --git a/third_party/python/rsa/tests/test_varblock.py b/third_party/python/rsa/tests/test_varblock.py new file mode 100644 index 000000000000..24ea50f1f6d5 --- /dev/null +++ b/third_party/python/rsa/tests/test_varblock.py @@ -0,0 +1,82 @@ +'''Tests varblock operations.''' + + +try: + from StringIO import StringIO as BytesIO +except ImportError: + from io import BytesIO +import unittest + +import rsa +from rsa._compat import b +from rsa import varblock + +class VarintTest(unittest.TestCase): + + def test_read_varint(self): + + encoded = b('\xac\x02crummy') + infile = BytesIO(encoded) + + (decoded, read) = varblock.read_varint(infile) + + # Test the returned values + self.assertEqual(300, decoded) + self.assertEqual(2, read) + + # The rest of the file should be untouched + self.assertEqual(b('crummy'), infile.read()) + + def test_read_zero(self): + + encoded = b('\x00crummy') + infile = BytesIO(encoded) + + (decoded, read) = varblock.read_varint(infile) + + # Test the returned values + self.assertEqual(0, decoded) + self.assertEqual(1, read) + + # The rest of the file should be untouched + self.assertEqual(b('crummy'), infile.read()) + + def test_write_varint(self): + + expected = b('\xac\x02') + outfile = BytesIO() + + written = varblock.write_varint(outfile, 300) + + # Test the returned values + self.assertEqual(expected, outfile.getvalue()) + self.assertEqual(2, written) + + + def test_write_zero(self): + + outfile = BytesIO() + written = varblock.write_varint(outfile, 0) + + # Test the returned values + self.assertEqual(b('\x00'), outfile.getvalue()) + self.assertEqual(1, written) + + +class VarblockTest(unittest.TestCase): + + def test_yield_varblock(self): + infile = BytesIO(b('\x01\x0512345\x06Sybren')) + + varblocks = list(varblock.yield_varblocks(infile)) + self.assertEqual([b('12345'), b('Sybren')], varblocks) + +class FixedblockTest(unittest.TestCase): + + def test_yield_fixedblock(self): + + infile = BytesIO(b('123456Sybren')) + + fixedblocks = list(varblock.yield_fixedblocks(infile, 6)) + self.assertEqual([b('123456'), b('Sybren')], fixedblocks) + diff --git a/third_party/python/taskcluster/taskcluster-6.0.0.dist-info/LICENSE b/third_party/python/taskcluster-urls/LICENSE similarity index 100% rename from third_party/python/taskcluster/taskcluster-6.0.0.dist-info/LICENSE rename to third_party/python/taskcluster-urls/LICENSE diff --git a/third_party/python/taskcluster-urls/MANIFEST.in b/third_party/python/taskcluster-urls/MANIFEST.in new file mode 100644 index 000000000000..2451f52823c3 --- /dev/null +++ b/third_party/python/taskcluster-urls/MANIFEST.in @@ -0,0 +1,4 @@ +include LICENSE +global-exclude *.py[co] +include specification.yml +include package.json diff --git a/third_party/python/taskcluster-urls/PKG-INFO b/third_party/python/taskcluster-urls/PKG-INFO new file mode 100644 index 000000000000..b22650dafd8d --- /dev/null +++ b/third_party/python/taskcluster-urls/PKG-INFO @@ -0,0 +1,253 @@ +Metadata-Version: 2.1 +Name: taskcluster-urls +Version: 11.0.0 +Summary: Standardized url generator for taskcluster resources. +Home-page: https://github.com/taskcluster/taskcluster-lib-urls +Author: Brian Stack +Author-email: bstack@mozilla.com +License: MPL2 +Description: # Taskcluster URL Building Library + + [![License](https://img.shields.io/badge/license-MPL%202.0-orange.svg)](http://mozilla.org/MPL/2.0) + + A simple library to generate URLs for various Taskcluster resources across our various deployment methods. + + This serves as both a simple shim for projects that use JavaScript but also is the reference implementation for + how we define these paths. + + URLs are defined in the 'Taskcluster URL Format' document. + + Changelog + --------- + View the changelog on the [releases page](https://github.com/taskcluster/taskcluster-lib-urls/releases). + + Requirements + ------------ + + This is tested on and should run on any of Node.js `{8, 10}`. + + JS Usage + -------- + [![Node.js Build Status](https://travis-ci.org/taskcluster/taskcluster-lib-urls.svg?branch=master)](https://travis-ci.org/taskcluster/taskcluster-lib-urls) + [![npm](https://img.shields.io/npm/v/taskcluster-lib-urls.svg?maxAge=2592000)](https://www.npmjs.com/package/taskcluster-lib-urls) + + This package exports several methods for generating URLs conditionally based on + a root URL, as well as a few helper classes for generating URLs for a pre-determined + root URL: + + * `api(rootUrl, service, version, path)` -> `String` + * `apiReference(rootUrl, service, version)` -> `String` + * `docs(rootUrl, path)` -> `String` + * `exchangeReference(rootUrl, service, version)` -> `String` + * `schema(rootUrl, service, schema)` -> `String` + * `ui(rootUrl, path)` -> `String` + * `servicesManifest(rootUrl)` -> `String` + * `testRootUrl()` -> `String` + * `withRootUrl(rootUrl)` -> `Class` instance for above methods + + When the `rootUrl` is `https://taskcluster.net`, the generated URLs will be to the Heroku cluster. Otherwise they will follow the + [spec defined in this project](https://github.com/taskcluster/taskcluster-lib-urls/tree/master/docs/urls-spec.md). + + `testRootUrl()` is used to share a common fake `rootUrl` between various Taskcluster mocks in testing. + The URL does not resolve. + + ```js + // Specifying root URL every time: + const libUrls = require('taskcluster-lib-urls'); + + libUrls.api(rootUrl, 'auth', 'v1', 'foo/bar'); + libUrls.schema(rootUrl, 'auth', 'v1/foo.yml'); // Note that schema names have versions in them + libUrls.apiReference(rootUrl, 'auth', 'v1'); + libUrls.exchangeReference(rootUrl, 'auth', 'v1'); + libUrls.ui(rootUrl, 'foo/bar'); + libUrls.servicesManifest(rootUrl); + libUrls.docs(rootUrl, 'foo/bar'); + ``` + + ```js + // Specifying root URL in advance: + const libUrls = require('taskcluster-lib-urls'); + + const urls = libUrls.withRoot(rootUrl); + + urls.api('auth', 'v1', 'foo/bar'); + urls.schema('auth', 'v1/foo.yml'); + urls.apiReference('auth', 'v1'); + urls.exchangeReference('auth', 'v1'); + urls.ui('foo/bar'); + urls.servicesManifest(); + urls.docs('foo/bar'); + ``` + + If you would like, you can set this up via [taskcluster-lib-loader](https://github.com/taskcluster/taskcluster-lib-loader) as follows: + + ```js + { + libUrlss: { + require: ['cfg'], + setup: ({cfg}) => withRootUrl(cfg.rootURl), + }, + } + ``` + + Test with: + + ``` + yarn install + yarn test + ``` + + + Go Usage + -------- + + [![GoDoc](https://godoc.org/github.com/taskcluster/taskcluster-lib-urls?status.svg)](https://godoc.org/github.com/taskcluster/taskcluster-lib-urls) + + The go package exports the following functions: + + ```go + func API(rootURL string, service string, version string, path string) string + func APIReference(rootURL string, service string, version string) string + func Docs(rootURL string, path string) string + func ExchangeReference(rootURL string, service string, version string) string + func Schema(rootURL string, service string, name string) string + func UI(rootURL string, path string) string + func ServicesManifest(rootURL string) string + ``` + + Install with: + + ``` + go install ./.. + ``` + + Test with: + + ``` + go test -v ./... + ``` + + Python Usage + ------------ + + You can install the python client with `pip install taskcluster-urls`; + + ```python + import taskcluster_urls + + taskcluster_urls.api(root_url, 'auth', 'v1', 'foo/bar') + taskcluster_urls.schema(root_url, 'auth', 'v1/foo.yml') # Note that schema names have versions in them + taskcluster_urls.api_reference(root_url, 'auth', 'v1') + taskcluster_urls.exchange_reference(root_url, 'auth', 'v1') + taskcluster_urls.ui(root_url, 'foo/bar') + taskcluster_urls.servicesManifest(root_url) + taskcluster_urls.docs(root_url, 'foo/bar') + + And for testing, + ```python + taskcluster_urls.test_root_url() + ``` + + Test with: + + ``` + tox + ``` + + Java Usage + ---------- + + [![JavaDoc](https://img.shields.io/badge/javadoc-reference-blue.svg)](http://taskcluster.github.io/taskcluster-lib-urls/apidocs) + + In order to use this library from your maven project, simply include it as a project dependency: + + ``` + + ... + + ... + + org.mozilla.taskcluster + taskcluster-lib-urls + 1.0.0 + + + + ``` + + The taskcluster-lib-urls artifacts are now available from the [maven central repository](http://central.sonatype.org/): + + * [Search Results](http://search.maven.org/#search|gav|1|g%3A%22org.mozilla.taskcluster%22%20AND%20a%3A%22taskcluster-lib-urls%22) + * [Directory Listing](https://repo1.maven.org/maven2/org/mozilla/taskcluster/taskcluster-lib-urls/) + + To use the library, do as follows: + + ```java + import org.mozilla.taskcluster.urls.*; + + ... + + URLProvider urlProvider = URLs.provider("https://mytaskcluster.acme.org"); + + String fooBarAPI = urlProvider.api("auth", "v1", "foo/bar"); + String fooSchema = urlProvider.schema("auth", "v1/foo.yml"); // Note that schema names have versions in them + String authAPIRef = urlProvider.apiReference("auth", "v1"); + String authExchangesRef = urlProvider.exchangeReference("auth", "v1"); + String uiFooBar = urlProvider.ui("foo/bar"); + String servicesManifest = urlProvider.servicesManifest(); + String docsFooBar = urlProvider.docs("foo/bar"); + + ... + ``` + + Install with: + + ``` + mvn install + ``` + + Test with: + + ``` + mvn test + ``` + + + Releasing + --------- + + New releases should be tested on Travis and Taskcluster to allow for all supported versions of various languages to be tested. Once satisfied that it works, new versions should be created with + `npm version` rather than by manually editing `package.json` and tags should be pushed to Github. + + Make the Node release first, as Python's version depends on its `package.json`. This follows the typical tag-and-push-to-publish approach: + + ```sh + $ npm version minor # or patch, or major + $ git push upstream + ``` + + Once that's done, build the Python sdists (only possible by the [maintainers on pypi](https://pypi.org/project/taskcluster-urls/#files)): + + ```sh + rm -rf dist/* + python setup.py sdist bdist_wheel + python3 setup.py bdist_wheel + pip install twine + twine upload dist/* + ``` + + Make sure to update [the changelog](https://github.com/taskcluster/taskcluster-lib-urls/releases)! + + License + ------- + + [Mozilla Public License Version 2.0](https://github.com/taskcluster/taskcluster-lib-urls/blob/master/LICENSE) + +Platform: UNKNOWN +Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0) +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Description-Content-Type: text/markdown diff --git a/third_party/python/taskcluster_urls/taskcluster_urls-11.0.0.dist-info/METADATA b/third_party/python/taskcluster-urls/README.md similarity index 91% rename from third_party/python/taskcluster_urls/taskcluster_urls-11.0.0.dist-info/METADATA rename to third_party/python/taskcluster-urls/README.md index e881042576d3..46a6d835f364 100644 --- a/third_party/python/taskcluster_urls/taskcluster_urls-11.0.0.dist-info/METADATA +++ b/third_party/python/taskcluster-urls/README.md @@ -1,20 +1,3 @@ -Metadata-Version: 2.1 -Name: taskcluster-urls -Version: 11.0.0 -Summary: Standardized url generator for taskcluster resources. -Home-page: https://github.com/taskcluster/taskcluster-lib-urls -Author: Brian Stack -Author-email: bstack@mozilla.com -License: MPL2 -Platform: UNKNOWN -Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0) -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Description-Content-Type: text/markdown - # Taskcluster URL Building Library [![License](https://img.shields.io/badge/license-MPL%202.0-orange.svg)](http://mozilla.org/MPL/2.0) @@ -251,5 +234,3 @@ License ------- [Mozilla Public License Version 2.0](https://github.com/taskcluster/taskcluster-lib-urls/blob/master/LICENSE) - - diff --git a/third_party/python/taskcluster-urls/package.json b/third_party/python/taskcluster-urls/package.json new file mode 100644 index 000000000000..66829aac4ba6 --- /dev/null +++ b/third_party/python/taskcluster-urls/package.json @@ -0,0 +1,25 @@ +{ + "name": "taskcluster-lib-urls", + "version": "11.0.0", + "author": "Brian Stack ", + "description": "Build urls for taskcluster resources.", + "license": "MPL-2.0", + "scripts": { + "lint": "eslint src/*.js test/*.js", + "pretest": "yarn lint", + "test": "mocha test/*_test.js" + }, + "files": [ + "src" + ], + "repository": { + "type": "git", + "url": "https://github.com/taskcluster/taskcluster-lib-urls.git" + }, + "main": "./src/index.js", + "devDependencies": { + "eslint-config-taskcluster": "^3.1.0", + "js-yaml": "^3.11.0", + "mocha": "^5.1.1" + } +} diff --git a/third_party/python/taskcluster-urls/setup.cfg b/third_party/python/taskcluster-urls/setup.cfg new file mode 100644 index 000000000000..3f5dd53607c5 --- /dev/null +++ b/third_party/python/taskcluster-urls/setup.cfg @@ -0,0 +1,7 @@ +[tools:pytest] +flake8-max-line-length = 120 + +[egg_info] +tag_build = +tag_date = 0 + diff --git a/third_party/python/taskcluster-urls/setup.py b/third_party/python/taskcluster-urls/setup.py new file mode 100644 index 000000000000..f60108151b68 --- /dev/null +++ b/third_party/python/taskcluster-urls/setup.py @@ -0,0 +1,28 @@ +import json +import os +from setuptools import setup + +package_json = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'package.json') +with open(package_json) as f: + version = json.load(f)['version'] + +setup( + name='taskcluster-urls', + description='Standardized url generator for taskcluster resources.', + long_description=open(os.path.join(os.path.dirname(__file__), 'README.md')).read(), + long_description_content_type='text/markdown', + url='https://github.com/taskcluster/taskcluster-lib-urls', + version=version, + packages=['taskcluster_urls'], + author='Brian Stack', + author_email='bstack@mozilla.com', + license='MPL2', + classifiers=[ + 'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)', + 'Programming Language :: Python :: 2', + 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.5', + 'Programming Language :: Python :: 3.6', + ], +) diff --git a/third_party/python/taskcluster_urls/taskcluster_urls/__init__.py b/third_party/python/taskcluster-urls/taskcluster_urls/__init__.py similarity index 100% rename from third_party/python/taskcluster_urls/taskcluster_urls/__init__.py rename to third_party/python/taskcluster-urls/taskcluster_urls/__init__.py diff --git a/third_party/python/taskcluster/taskcluster-6.0.0.dist-info/METADATA b/third_party/python/taskcluster/PKG-INFO similarity index 55% rename from third_party/python/taskcluster/taskcluster-6.0.0.dist-info/METADATA rename to third_party/python/taskcluster/PKG-INFO index f7ba70ffa5ff..9a20850cdcaa 100644 --- a/third_party/python/taskcluster/taskcluster-6.0.0.dist-info/METADATA +++ b/third_party/python/taskcluster/PKG-INFO @@ -1,4 +1,4 @@ -Metadata-Version: 2.1 +Metadata-Version: 1.1 Name: taskcluster Version: 6.0.0 Summary: Python client for Taskcluster @@ -6,18 +6,8 @@ Home-page: https://github.com/taskcluster/taskcluster-client.py Author: John Ford Author-email: jhford@mozilla.com License: UNKNOWN +Description: UNKNOWN Platform: UNKNOWN Classifier: Programming Language :: Python :: 2.7 Classifier: Programming Language :: Python :: 3.5 Classifier: Programming Language :: Python :: 3.6 -Requires-Dist: requests (<3,>=2.4.3) -Requires-Dist: mohawk (<0.4,>=0.3.4) -Requires-Dist: slugid (<2,>=1.0.7) -Requires-Dist: taskcluster-urls (<12,>=10.1.0) -Requires-Dist: six (<2,>=1.10.0) -Requires-Dist: aiohttp (<4,>=2.0.0) -Requires-Dist: async-timeout (<4,>=2.0.0) - -UNKNOWN - - diff --git a/third_party/python/taskcluster/README.md b/third_party/python/taskcluster/README.md new file mode 100644 index 000000000000..9a0cee7b4a5b --- /dev/null +++ b/third_party/python/taskcluster/README.md @@ -0,0 +1,4383 @@ +Taskcluster Client Library in Python +====================================== + +[![Build Status](https://travis-ci.org/taskcluster/taskcluster-client.py.svg?branch=master)](https://travis-ci.org/taskcluster/taskcluster-client.py) + +This is a library used to interact with Taskcluster within Python programs. It +presents the entire REST API to consumers as well as being able to generate +URLs Signed by Hawk credentials. It can also generate routing keys for +listening to pulse messages from Taskcluster. + +The library builds the REST API methods from the same [API Reference +format](/docs/manual/design/apis/reference-format) as the +Javascript client library. + +## Generating Temporary Credentials +If you have non-temporary taskcluster credentials you can generate a set of +temporary credentials as follows. Notice that the credentials cannot last more +than 31 days, and you can only revoke them by revoking the credentials that was +used to issue them (this takes up to one hour). + +It is not the responsibility of the caller to apply any clock drift adjustment +to the start or expiry time - this is handled by the auth service directly. + +```python +import datetime + +start = datetime.datetime.now() +expiry = start + datetime.timedelta(0,60) +scopes = ['ScopeA', 'ScopeB'] +name = 'foo' + +credentials = taskcluster.createTemporaryCredentials( + # issuing clientId + clientId, + # issuing accessToken + accessToken, + # Validity of temporary credentials starts here, in timestamp + start, + # Expiration of temporary credentials, in timestamp + expiry, + # Scopes to grant the temporary credentials + scopes, + # credential name (optional) + name +) +``` + +You cannot use temporary credentials to issue new temporary credentials. You +must have `auth:create-client:` to create a named temporary credential, +but unnamed temporary credentials can be created regardless of your scopes. + +## API Documentation + +The REST API methods are documented in the [reference docs](/docs/reference). + +## Query-String arguments +Query string arguments are now supported. In order to use them, you can call +a method like this: + +```python +queue.listTaskGroup('JzTGxwxhQ76_Tt1dxkaG5g', query={'continuationToken': outcome.get('continuationToken')}) +``` + +These query-string arguments are only supported using this calling convention + +## Sync vs Async + +The objects under `taskcluster` (e.g., `taskcluster.Queue`) are +python2-compatible and operate synchronously. + + +The objects under `taskcluster.aio` (e.g., `taskcluster.aio.Queue`) require +`python>=3.6`. The async objects use asyncio coroutines for concurrency; this +allows us to put I/O operations in the background, so operations that require +the cpu can happen sooner. Given dozens of operations that can run concurrently +(e.g., cancelling a medium-to-large task graph), this can result in significant +performance improvements. The code would look something like + +```python +#!/usr/bin/env python +import aiohttp +import asyncio +from taskcluster.aio import Auth + +async def do_ping(): + with aiohttp.ClientSession() as session: + a = Auth(session=session) + print(await a.ping()) + +loop = asyncio.get_event_loop() +loop.run_until_complete(do_ping()) +``` + +Other async code examples are available [here](#methods-contained-in-the-client-library). + +Here's a slide deck for an [introduction to async python](https://gitpitch.com/escapewindow/slides-sf-2017/async-python). + +## Usage + +* Here's a simple command: + + ```python + import taskcluster + index = taskcluster.Index({ + 'rootUrl': 'https://tc.example.com', + 'credentials': {'clientId': 'id', 'accessToken': 'accessToken'}, + }) + index.ping() + ``` + +* There are four calling conventions for methods: + + ```python + client.method(v1, v1, payload) + client.method(payload, k1=v1, k2=v2) + client.method(payload=payload, query=query, params={k1: v1, k2: v2}) + client.method(v1, v2, payload=payload, query=query) + ``` + +* Options for the topic exchange methods can be in the form of either a single + dictionary argument or keyword arguments. Only one form is allowed + + ```python + from taskcluster import client + qEvt = client.QueueEvents({rootUrl: 'https://tc.example.com'}) + # The following calls are equivalent + qEvt.taskCompleted({'taskId': 'atask'}) + qEvt.taskCompleted(taskId='atask') + ``` + +## Root URL + +This client requires a `rootUrl` argument to identify the Taskcluster +deployment to talk to. As of this writing, the production cluster has rootUrl +`https://taskcluster.net`. + +## Environment Variables + +As of version 6.0.0, the client does not read the standard `TASKCLUSTER_…` +environment variables automatically. To fetch their values explicitly, use +`taskcluster.optionsFromEnvironment()`: + +```python +auth = taskcluster.Auth(taskcluster.optionsFromEnvironment()) +``` + +## Pagination +There are two ways to accomplish pagination easily with the python client. The first is +to implement pagination in your code: +```python +import taskcluster +queue = taskcluster.Queue({'rootUrl': 'https://tc.example.com'}) +i = 0 +tasks = 0 +outcome = queue.listTaskGroup('JzTGxwxhQ76_Tt1dxkaG5g') +while outcome.get('continuationToken'): + print('Response %d gave us %d more tasks' % (i, len(outcome['tasks']))) + if outcome.get('continuationToken'): + outcome = queue.listTaskGroup('JzTGxwxhQ76_Tt1dxkaG5g', query={'continuationToken': outcome.get('continuationToken')}) + i += 1 + tasks += len(outcome.get('tasks', [])) +print('Task Group %s has %d tasks' % (outcome['taskGroupId'], tasks)) +``` + +There's also an experimental feature to support built in automatic pagination +in the sync client. This feature allows passing a callback as the +'paginationHandler' keyword-argument. This function will be passed the +response body of the API method as its sole positional arugment. + +This example of the built in pagination shows how a list of tasks could be +built and then counted: + +```python +import taskcluster +queue = taskcluster.Queue({'rootUrl': 'https://tc.example.com'}) + +responses = [] + +def handle_page(y): + print("%d tasks fetched" % len(y.get('tasks', []))) + responses.append(y) + +queue.listTaskGroup('JzTGxwxhQ76_Tt1dxkaG5g', paginationHandler=handle_page) + +tasks = 0 +for response in responses: + tasks += len(response.get('tasks', [])) + +print("%d requests fetch %d tasks" % (len(responses), tasks)) +``` + +## Logging +Logging is set up in `taskcluster/__init__.py`. If the special +`DEBUG_TASKCLUSTER_CLIENT` environment variable is set, the `__init__.py` +module will set the `logging` module's level for its logger to `logging.DEBUG` +and if there are no existing handlers, add a `logging.StreamHandler()` +instance. This is meant to assist those who do not wish to bother figuring out +how to configure the python logging module but do want debug messages + + +## Scopes +The `scopeMatch(assumedScopes, requiredScopeSets)` function determines +whether one or more of a set of required scopes are satisfied by the assumed +scopes, taking *-expansion into account. This is useful for making local +decisions on scope satisfaction, but note that `assumed_scopes` must be the +*expanded* scopes, as this function cannot perform expansion. + +It takes a list of a assumed scopes, and a list of required scope sets on +disjunctive normal form, and checks if any of the required scope sets are +satisfied. + +Example: + +``` + requiredScopeSets = [ + ["scopeA", "scopeB"], + ["scopeC:*"] + ] + assert scopesMatch(['scopeA', 'scopeB'], requiredScopeSets) + assert scopesMatch(['scopeC:xyz'], requiredScopeSets) + assert not scopesMatch(['scopeA'], requiredScopeSets) + assert not scopesMatch(['scopeC'], requiredScopeSets) +``` + +## Relative Date-time Utilities +A lot of taskcluster APIs requires ISO 8601 time stamps offset into the future +as way of providing expiration, deadlines, etc. These can be easily created +using `datetime.datetime.isoformat()`, however, it can be rather error prone +and tedious to offset `datetime.datetime` objects into the future. Therefore +this library comes with two utility functions for this purposes. + +```python +dateObject = taskcluster.fromNow("2 days 3 hours 1 minute") +# datetime.datetime(2017, 1, 21, 17, 8, 1, 607929) +dateString = taskcluster.fromNowJSON("2 days 3 hours 1 minute") +# '2017-01-21T17:09:23.240178Z' +``` + +By default it will offset the date time into the future, if the offset strings +are prefixed minus (`-`) the date object will be offset into the past. This is +useful in some corner cases. + +```python +dateObject = taskcluster.fromNow("- 1 year 2 months 3 weeks 5 seconds"); +# datetime.datetime(2015, 10, 30, 18, 16, 50, 931161) +``` + +The offset string is ignorant of whitespace and case insensitive. It may also +optionally be prefixed plus `+` (if not prefixed minus), any `+` prefix will be +ignored. However, entries in the offset string must be given in order from +high to low, ie. `2 years 1 day`. Additionally, various shorthands may be +employed, as illustrated below. + +``` + years, year, yr, y + months, month, mo + weeks, week, w + days, day, d + hours, hour, h + minutes, minute, min + seconds, second, sec, s +``` + +The `fromNow` method may also be given a date to be relative to as a second +argument. This is useful if offset the task expiration relative to the the task +deadline or doing something similar. This argument can also be passed as the +kwarg `dateObj` + +```python +dateObject1 = taskcluster.fromNow("2 days 3 hours"); +dateObject2 = taskcluster.fromNow("1 year", dateObject1); +taskcluster.fromNow("1 year", dateObj=dateObject1); +# datetime.datetime(2018, 1, 21, 17, 59, 0, 328934) +``` + +## Methods contained in the client library + + + +### Methods in `taskcluster.Auth` +```python +import asyncio # Only for async +// Create Auth client instance +import taskcluster +import taskcluster.aio + +auth = taskcluster.Auth(options) +# Below only for async instances, assume already in coroutine +loop = asyncio.get_event_loop() +session = taskcluster.aio.createSession(loop=loop) +asyncAuth = taskcluster.aio.Auth(options, session=session) +``` +Authentication related API end-points for Taskcluster and related +services. These API end-points are of interest if you wish to: + * Authorize a request signed with Taskcluster credentials, + * Manage clients and roles, + * Inspect or audit clients and roles, + * Gain access to various services guarded by this API. + +Note that in this service "authentication" refers to validating the +correctness of the supplied credentials (that the caller posesses the +appropriate access token). This service does not provide any kind of user +authentication (identifying a particular person). + +### Clients +The authentication service manages _clients_, at a high-level each client +consists of a `clientId`, an `accessToken`, scopes, and some metadata. +The `clientId` and `accessToken` can be used for authentication when +calling Taskcluster APIs. + +The client's scopes control the client's access to Taskcluster resources. +The scopes are *expanded* by substituting roles, as defined below. + +### Roles +A _role_ consists of a `roleId`, a set of scopes and a description. +Each role constitutes a simple _expansion rule_ that says if you have +the scope: `assume:` you get the set of scopes the role has. +Think of the `assume:` as a scope that allows a client to assume +a role. + +As in scopes the `*` kleene star also have special meaning if it is +located at the end of a `roleId`. If you have a role with the following +`roleId`: `my-prefix*`, then any client which has a scope staring with +`assume:my-prefix` will be allowed to assume the role. + +### Guarded Services +The authentication service also has API end-points for delegating access +to some guarded service such as AWS S3, or Azure Table Storage. +Generally, we add API end-points to this server when we wish to use +Taskcluster credentials to grant access to a third-party service used +by many Taskcluster components. +#### Ping Server +Respond without doing anything. +This endpoint is used to check that the service is up. + + +```python +# Sync calls +auth.ping() # -> None` +# Async call +await asyncAuth.ping() # -> None +``` + +#### List Clients +Get a list of all clients. With `prefix`, only clients for which +it is a prefix of the clientId are returned. + +By default this end-point will try to return up to 1000 clients in one +request. But it **may return less, even none**. +It may also return a `continuationToken` even though there are no more +results. However, you can only be sure to have seen all results if you +keep calling `listClients` with the last `continuationToken` until you +get a result without a `continuationToken`. + + +Required [output schema](v1/list-clients-response.json#) + +```python +# Sync calls +auth.listClients() # -> result` +# Async call +await asyncAuth.listClients() # -> result +``` + +#### Get Client +Get information about a single client. + + + +Takes the following arguments: + + * `clientId` + +Required [output schema](v1/get-client-response.json#) + +```python +# Sync calls +auth.client(clientId) # -> result` +auth.client(clientId='value') # -> result +# Async call +await asyncAuth.client(clientId) # -> result +await asyncAuth.client(clientId='value') # -> result +``` + +#### Create Client +Create a new client and get the `accessToken` for this client. +You should store the `accessToken` from this API call as there is no +other way to retrieve it. + +If you loose the `accessToken` you can call `resetAccessToken` to reset +it, and a new `accessToken` will be returned, but you cannot retrieve the +current `accessToken`. + +If a client with the same `clientId` already exists this operation will +fail. Use `updateClient` if you wish to update an existing client. + +The caller's scopes must satisfy `scopes`. + + + +Takes the following arguments: + + * `clientId` + +Required [input schema](v1/create-client-request.json#) + +Required [output schema](v1/create-client-response.json#) + +```python +# Sync calls +auth.createClient(clientId, payload) # -> result` +auth.createClient(payload, clientId='value') # -> result +# Async call +await asyncAuth.createClient(clientId, payload) # -> result +await asyncAuth.createClient(payload, clientId='value') # -> result +``` + +#### Reset `accessToken` +Reset a clients `accessToken`, this will revoke the existing +`accessToken`, generate a new `accessToken` and return it from this +call. + +There is no way to retrieve an existing `accessToken`, so if you loose it +you must reset the accessToken to acquire it again. + + + +Takes the following arguments: + + * `clientId` + +Required [output schema](v1/create-client-response.json#) + +```python +# Sync calls +auth.resetAccessToken(clientId) # -> result` +auth.resetAccessToken(clientId='value') # -> result +# Async call +await asyncAuth.resetAccessToken(clientId) # -> result +await asyncAuth.resetAccessToken(clientId='value') # -> result +``` + +#### Update Client +Update an exisiting client. The `clientId` and `accessToken` cannot be +updated, but `scopes` can be modified. The caller's scopes must +satisfy all scopes being added to the client in the update operation. +If no scopes are given in the request, the client's scopes remain +unchanged + + + +Takes the following arguments: + + * `clientId` + +Required [input schema](v1/create-client-request.json#) + +Required [output schema](v1/get-client-response.json#) + +```python +# Sync calls +auth.updateClient(clientId, payload) # -> result` +auth.updateClient(payload, clientId='value') # -> result +# Async call +await asyncAuth.updateClient(clientId, payload) # -> result +await asyncAuth.updateClient(payload, clientId='value') # -> result +``` + +#### Enable Client +Enable a client that was disabled with `disableClient`. If the client +is already enabled, this does nothing. + +This is typically used by identity providers to re-enable clients that +had been disabled when the corresponding identity's scopes changed. + + + +Takes the following arguments: + + * `clientId` + +Required [output schema](v1/get-client-response.json#) + +```python +# Sync calls +auth.enableClient(clientId) # -> result` +auth.enableClient(clientId='value') # -> result +# Async call +await asyncAuth.enableClient(clientId) # -> result +await asyncAuth.enableClient(clientId='value') # -> result +``` + +#### Disable Client +Disable a client. If the client is already disabled, this does nothing. + +This is typically used by identity providers to disable clients when the +corresponding identity's scopes no longer satisfy the client's scopes. + + + +Takes the following arguments: + + * `clientId` + +Required [output schema](v1/get-client-response.json#) + +```python +# Sync calls +auth.disableClient(clientId) # -> result` +auth.disableClient(clientId='value') # -> result +# Async call +await asyncAuth.disableClient(clientId) # -> result +await asyncAuth.disableClient(clientId='value') # -> result +``` + +#### Delete Client +Delete a client, please note that any roles related to this client must +be deleted independently. + + + +Takes the following arguments: + + * `clientId` + +```python +# Sync calls +auth.deleteClient(clientId) # -> None` +auth.deleteClient(clientId='value') # -> None +# Async call +await asyncAuth.deleteClient(clientId) # -> None +await asyncAuth.deleteClient(clientId='value') # -> None +``` + +#### List Roles +Get a list of all roles, each role object also includes the list of +scopes it expands to. + + +Required [output schema](v1/list-roles-response.json#) + +```python +# Sync calls +auth.listRoles() # -> result` +# Async call +await asyncAuth.listRoles() # -> result +``` + +#### Get Role +Get information about a single role, including the set of scopes that the +role expands to. + + + +Takes the following arguments: + + * `roleId` + +Required [output schema](v1/get-role-response.json#) + +```python +# Sync calls +auth.role(roleId) # -> result` +auth.role(roleId='value') # -> result +# Async call +await asyncAuth.role(roleId) # -> result +await asyncAuth.role(roleId='value') # -> result +``` + +#### Create Role +Create a new role. + +The caller's scopes must satisfy the new role's scopes. + +If there already exists a role with the same `roleId` this operation +will fail. Use `updateRole` to modify an existing role. + +Creation of a role that will generate an infinite expansion will result +in an error response. + + + +Takes the following arguments: + + * `roleId` + +Required [input schema](v1/create-role-request.json#) + +Required [output schema](v1/get-role-response.json#) + +```python +# Sync calls +auth.createRole(roleId, payload) # -> result` +auth.createRole(payload, roleId='value') # -> result +# Async call +await asyncAuth.createRole(roleId, payload) # -> result +await asyncAuth.createRole(payload, roleId='value') # -> result +``` + +#### Update Role +Update an existing role. + +The caller's scopes must satisfy all of the new scopes being added, but +need not satisfy all of the client's existing scopes. + +An update of a role that will generate an infinite expansion will result +in an error response. + + + +Takes the following arguments: + + * `roleId` + +Required [input schema](v1/create-role-request.json#) + +Required [output schema](v1/get-role-response.json#) + +```python +# Sync calls +auth.updateRole(roleId, payload) # -> result` +auth.updateRole(payload, roleId='value') # -> result +# Async call +await asyncAuth.updateRole(roleId, payload) # -> result +await asyncAuth.updateRole(payload, roleId='value') # -> result +``` + +#### Delete Role +Delete a role. This operation will succeed regardless of whether or not +the role exists. + + + +Takes the following arguments: + + * `roleId` + +```python +# Sync calls +auth.deleteRole(roleId) # -> None` +auth.deleteRole(roleId='value') # -> None +# Async call +await asyncAuth.deleteRole(roleId) # -> None +await asyncAuth.deleteRole(roleId='value') # -> None +``` + +#### Expand Scopes +Return an expanded copy of the given scopeset, with scopes implied by any +roles included. + +This call uses the GET method with an HTTP body. It remains only for +backward compatibility. + + +Required [input schema](v1/scopeset.json#) + +Required [output schema](v1/scopeset.json#) + +```python +# Sync calls +auth.expandScopesGet(payload) # -> result` +# Async call +await asyncAuth.expandScopesGet(payload) # -> result +``` + +#### Expand Scopes +Return an expanded copy of the given scopeset, with scopes implied by any +roles included. + + +Required [input schema](v1/scopeset.json#) + +Required [output schema](v1/scopeset.json#) + +```python +# Sync calls +auth.expandScopes(payload) # -> result` +# Async call +await asyncAuth.expandScopes(payload) # -> result +``` + +#### Get Current Scopes +Return the expanded scopes available in the request, taking into account all sources +of scopes and scope restrictions (temporary credentials, assumeScopes, client scopes, +and roles). + + +Required [output schema](v1/scopeset.json#) + +```python +# Sync calls +auth.currentScopes() # -> result` +# Async call +await asyncAuth.currentScopes() # -> result +``` + +#### Get Temporary Read/Write Credentials S3 +Get temporary AWS credentials for `read-write` or `read-only` access to +a given `bucket` and `prefix` within that bucket. +The `level` parameter can be `read-write` or `read-only` and determines +which type of credentials are returned. Please note that the `level` +parameter is required in the scope guarding access. The bucket name must +not contain `.`, as recommended by Amazon. + +This method can only allow access to a whitelisted set of buckets. To add +a bucket to that whitelist, contact the Taskcluster team, who will add it to +the appropriate IAM policy. If the bucket is in a different AWS account, you +will also need to add a bucket policy allowing access from the Taskcluster +account. That policy should look like this: + +```js +{ + "Version": "2012-10-17", + "Statement": [ + { + "Sid": "allow-taskcluster-auth-to-delegate-access", + "Effect": "Allow", + "Principal": { + "AWS": "arn:aws:iam::692406183521:root" + }, + "Action": [ + "s3:ListBucket", + "s3:GetObject", + "s3:PutObject", + "s3:DeleteObject", + "s3:GetBucketLocation" + ], + "Resource": [ + "arn:aws:s3:::", + "arn:aws:s3:::/*" + ] + } + ] +} +``` + +The credentials are set to expire after an hour, but this behavior is +subject to change. Hence, you should always read the `expires` property +from the response, if you intend to maintain active credentials in your +application. + +Please note that your `prefix` may not start with slash `/`. Such a prefix +is allowed on S3, but we forbid it here to discourage bad behavior. + +Also note that if your `prefix` doesn't end in a slash `/`, the STS +credentials may allow access to unexpected keys, as S3 does not treat +slashes specially. For example, a prefix of `my-folder` will allow +access to `my-folder/file.txt` as expected, but also to `my-folder.txt`, +which may not be intended. + +Finally, note that the `PutObjectAcl` call is not allowed. Passing a canned +ACL other than `private` to `PutObject` is treated as a `PutObjectAcl` call, and +will result in an access-denied error from AWS. This limitation is due to a +security flaw in Amazon S3 which might otherwise allow indefinite access to +uploaded objects. + +**EC2 metadata compatibility**, if the querystring parameter +`?format=iam-role-compat` is given, the response will be compatible +with the JSON exposed by the EC2 metadata service. This aims to ease +compatibility for libraries and tools built to auto-refresh credentials. +For details on the format returned by EC2 metadata service see: +[EC2 User Guide](http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/iam-roles-for-amazon-ec2.html#instance-metadata-security-credentials). + + + +Takes the following arguments: + + * `level` + * `bucket` + * `prefix` + +Required [output schema](v1/aws-s3-credentials-response.json#) + +```python +# Sync calls +auth.awsS3Credentials(level, bucket, prefix) # -> result` +auth.awsS3Credentials(level='value', bucket='value', prefix='value') # -> result +# Async call +await asyncAuth.awsS3Credentials(level, bucket, prefix) # -> result +await asyncAuth.awsS3Credentials(level='value', bucket='value', prefix='value') # -> result +``` + +#### List Accounts Managed by Auth +Retrieve a list of all Azure accounts managed by Taskcluster Auth. + + +Required [output schema](v1/azure-account-list-response.json#) + +```python +# Sync calls +auth.azureAccounts() # -> result` +# Async call +await asyncAuth.azureAccounts() # -> result +``` + +#### List Tables in an Account Managed by Auth +Retrieve a list of all tables in an account. + + + +Takes the following arguments: + + * `account` + +Required [output schema](v1/azure-table-list-response.json#) + +```python +# Sync calls +auth.azureTables(account) # -> result` +auth.azureTables(account='value') # -> result +# Async call +await asyncAuth.azureTables(account) # -> result +await asyncAuth.azureTables(account='value') # -> result +``` + +#### Get Shared-Access-Signature for Azure Table +Get a shared access signature (SAS) string for use with a specific Azure +Table Storage table. + +The `level` parameter can be `read-write` or `read-only` and determines +which type of credentials are returned. If level is read-write, it will create the +table if it doesn't already exist. + + + +Takes the following arguments: + + * `account` + * `table` + * `level` + +Required [output schema](v1/azure-table-access-response.json#) + +```python +# Sync calls +auth.azureTableSAS(account, table, level) # -> result` +auth.azureTableSAS(account='value', table='value', level='value') # -> result +# Async call +await asyncAuth.azureTableSAS(account, table, level) # -> result +await asyncAuth.azureTableSAS(account='value', table='value', level='value') # -> result +``` + +#### List containers in an Account Managed by Auth +Retrieve a list of all containers in an account. + + + +Takes the following arguments: + + * `account` + +Required [output schema](v1/azure-container-list-response.json#) + +```python +# Sync calls +auth.azureContainers(account) # -> result` +auth.azureContainers(account='value') # -> result +# Async call +await asyncAuth.azureContainers(account) # -> result +await asyncAuth.azureContainers(account='value') # -> result +``` + +#### Get Shared-Access-Signature for Azure Container +Get a shared access signature (SAS) string for use with a specific Azure +Blob Storage container. + +The `level` parameter can be `read-write` or `read-only` and determines +which type of credentials are returned. If level is read-write, it will create the +container if it doesn't already exist. + + + +Takes the following arguments: + + * `account` + * `container` + * `level` + +Required [output schema](v1/azure-container-response.json#) + +```python +# Sync calls +auth.azureContainerSAS(account, container, level) # -> result` +auth.azureContainerSAS(account='value', container='value', level='value') # -> result +# Async call +await asyncAuth.azureContainerSAS(account, container, level) # -> result +await asyncAuth.azureContainerSAS(account='value', container='value', level='value') # -> result +``` + +#### Get DSN for Sentry Project +Get temporary DSN (access credentials) for a sentry project. +The credentials returned can be used with any Sentry client for up to +24 hours, after which the credentials will be automatically disabled. + +If the project doesn't exist it will be created, and assigned to the +initial team configured for this component. Contact a Sentry admin +to have the project transferred to a team you have access to if needed + + + +Takes the following arguments: + + * `project` + +Required [output schema](v1/sentry-dsn-response.json#) + +```python +# Sync calls +auth.sentryDSN(project) # -> result` +auth.sentryDSN(project='value') # -> result +# Async call +await asyncAuth.sentryDSN(project) # -> result +await asyncAuth.sentryDSN(project='value') # -> result +``` + +#### Get Token for Statsum Project +Get temporary `token` and `baseUrl` for sending metrics to statsum. + +The token is valid for 24 hours, clients should refresh after expiration. + + + +Takes the following arguments: + + * `project` + +Required [output schema](v1/statsum-token-response.json#) + +```python +# Sync calls +auth.statsumToken(project) # -> result` +auth.statsumToken(project='value') # -> result +# Async call +await asyncAuth.statsumToken(project) # -> result +await asyncAuth.statsumToken(project='value') # -> result +``` + +#### Get Token for Webhooktunnel Proxy +Get temporary `token` and `id` for connecting to webhooktunnel +The token is valid for 96 hours, clients should refresh after expiration. + + +Required [output schema](v1/webhooktunnel-token-response.json#) + +```python +# Sync calls +auth.webhooktunnelToken() # -> result` +# Async call +await asyncAuth.webhooktunnelToken() # -> result +``` + +#### Authenticate Hawk Request +Validate the request signature given on input and return list of scopes +that the authenticating client has. + +This method is used by other services that wish rely on Taskcluster +credentials for authentication. This way we can use Hawk without having +the secret credentials leave this service. + + +Required [input schema](v1/authenticate-hawk-request.json#) + +Required [output schema](v1/authenticate-hawk-response.json#) + +```python +# Sync calls +auth.authenticateHawk(payload) # -> result` +# Async call +await asyncAuth.authenticateHawk(payload) # -> result +``` + +#### Test Authentication +Utility method to test client implementations of Taskcluster +authentication. + +Rather than using real credentials, this endpoint accepts requests with +clientId `tester` and accessToken `no-secret`. That client's scopes are +based on `clientScopes` in the request body. + +The request is validated, with any certificate, authorizedScopes, etc. +applied, and the resulting scopes are checked against `requiredScopes` +from the request body. On success, the response contains the clientId +and scopes as seen by the API method. + + +Required [input schema](v1/test-authenticate-request.json#) + +Required [output schema](v1/test-authenticate-response.json#) + +```python +# Sync calls +auth.testAuthenticate(payload) # -> result` +# Async call +await asyncAuth.testAuthenticate(payload) # -> result +``` + +#### Test Authentication (GET) +Utility method similar to `testAuthenticate`, but with the GET method, +so it can be used with signed URLs (bewits). + +Rather than using real credentials, this endpoint accepts requests with +clientId `tester` and accessToken `no-secret`. That client's scopes are +`['test:*', 'auth:create-client:test:*']`. The call fails if the +`test:authenticate-get` scope is not available. + +The request is validated, with any certificate, authorizedScopes, etc. +applied, and the resulting scopes are checked, just like any API call. +On success, the response contains the clientId and scopes as seen by +the API method. + +This method may later be extended to allow specification of client and +required scopes via query arguments. + + +Required [output schema](v1/test-authenticate-response.json#) + +```python +# Sync calls +auth.testAuthenticateGet() # -> result` +# Async call +await asyncAuth.testAuthenticateGet() # -> result +``` + + + + +### Exchanges in `taskcluster.AuthEvents` +```python +// Create AuthEvents client instance +import taskcluster +authEvents = taskcluster.AuthEvents(options) +``` +The auth service is responsible for storing credentials, managing +assignment of scopes, and validation of request signatures from other +services. + +These exchanges provides notifications when credentials or roles are +updated. This is mostly so that multiple instances of the auth service +can purge their caches and synchronize state. But you are of course +welcome to use these for other purposes, monitoring changes for example. +#### Client Created Messages + * `authEvents.clientCreated(routingKeyPattern) -> routingKey` + * `reserved` Description: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified. + +#### Client Updated Messages + * `authEvents.clientUpdated(routingKeyPattern) -> routingKey` + * `reserved` Description: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified. + +#### Client Deleted Messages + * `authEvents.clientDeleted(routingKeyPattern) -> routingKey` + * `reserved` Description: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified. + +#### Role Created Messages + * `authEvents.roleCreated(routingKeyPattern) -> routingKey` + * `reserved` Description: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified. + +#### Role Updated Messages + * `authEvents.roleUpdated(routingKeyPattern) -> routingKey` + * `reserved` Description: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified. + +#### Role Deleted Messages + * `authEvents.roleDeleted(routingKeyPattern) -> routingKey` + * `reserved` Description: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified. + + + + +### Methods in `taskcluster.AwsProvisioner` +```python +import asyncio # Only for async +// Create AwsProvisioner client instance +import taskcluster +import taskcluster.aio + +awsProvisioner = taskcluster.AwsProvisioner(options) +# Below only for async instances, assume already in coroutine +loop = asyncio.get_event_loop() +session = taskcluster.aio.createSession(loop=loop) +asyncAwsProvisioner = taskcluster.aio.AwsProvisioner(options, session=session) +``` +The AWS Provisioner is responsible for provisioning instances on EC2 for use in +Taskcluster. The provisioner maintains a set of worker configurations which +can be managed with an API that is typically available at +aws-provisioner.taskcluster.net/v1. This API can also perform basic instance +management tasks in addition to maintaining the internal state of worker type +configuration information. + +The Provisioner runs at a configurable interval. Each iteration of the +provisioner fetches a current copy the state that the AWS EC2 api reports. In +each iteration, we ask the Queue how many tasks are pending for that worker +type. Based on the number of tasks pending and the scaling ratio, we may +submit requests for new instances. We use pricing information, capacity and +utility factor information to decide which instance type in which region would +be the optimal configuration. + +Each EC2 instance type will declare a capacity and utility factor. Capacity is +the number of tasks that a given machine is capable of running concurrently. +Utility factor is a relative measure of performance between two instance types. +We multiply the utility factor by the spot price to compare instance types and +regions when making the bidding choices. + +When a new EC2 instance is instantiated, its user data contains a token in +`securityToken` that can be used with the `getSecret` method to retrieve +the worker's credentials and any needed passwords or other restricted +information. The worker is responsible for deleting the secret after +retrieving it, to prevent dissemination of the secret to other proceses +which can read the instance user data. + +#### List worker types with details +Return a list of worker types, including some summary information about +current capacity for each. While this list includes all defined worker types, +there may be running EC2 instances for deleted worker types that are not +included here. The list is unordered. + + +Required [output schema](http://schemas.taskcluster.net/aws-provisioner/v1/list-worker-types-summaries-response.json#) + +```python +# Sync calls +awsProvisioner.listWorkerTypeSummaries() # -> result` +# Async call +await asyncAwsProvisioner.listWorkerTypeSummaries() # -> result +``` + +#### Create new Worker Type +Create a worker type. A worker type contains all the configuration +needed for the provisioner to manage the instances. Each worker type +knows which regions and which instance types are allowed for that +worker type. Remember that Capacity is the number of concurrent tasks +that can be run on a given EC2 resource and that Utility is the relative +performance rate between different instance types. There is no way to +configure different regions to have different sets of instance types +so ensure that all instance types are available in all regions. +This function is idempotent. + +Once a worker type is in the provisioner, a back ground process will +begin creating instances for it based on its capacity bounds and its +pending task count from the Queue. It is the worker's responsibility +to shut itself down. The provisioner has a limit (currently 96hours) +for all instances to prevent zombie instances from running indefinitely. + +The provisioner will ensure that all instances created are tagged with +aws resource tags containing the provisioner id and the worker type. + +If provided, the secrets in the global, region and instance type sections +are available using the secrets api. If specified, the scopes provided +will be used to generate a set of temporary credentials available with +the other secrets. + + + +Takes the following arguments: + + * `workerType` + +Required [input schema](http://schemas.taskcluster.net/aws-provisioner/v1/create-worker-type-request.json#) + +Required [output schema](http://schemas.taskcluster.net/aws-provisioner/v1/get-worker-type-response.json#) + +```python +# Sync calls +awsProvisioner.createWorkerType(workerType, payload) # -> result` +awsProvisioner.createWorkerType(payload, workerType='value') # -> result +# Async call +await asyncAwsProvisioner.createWorkerType(workerType, payload) # -> result +await asyncAwsProvisioner.createWorkerType(payload, workerType='value') # -> result +``` + +#### Update Worker Type +Provide a new copy of a worker type to replace the existing one. +This will overwrite the existing worker type definition if there +is already a worker type of that name. This method will return a +200 response along with a copy of the worker type definition created +Note that if you are using the result of a GET on the worker-type +end point that you will need to delete the lastModified and workerType +keys from the object returned, since those fields are not allowed +the request body for this method + +Otherwise, all input requirements and actions are the same as the +create method. + + + +Takes the following arguments: + + * `workerType` + +Required [input schema](http://schemas.taskcluster.net/aws-provisioner/v1/create-worker-type-request.json#) + +Required [output schema](http://schemas.taskcluster.net/aws-provisioner/v1/get-worker-type-response.json#) + +```python +# Sync calls +awsProvisioner.updateWorkerType(workerType, payload) # -> result` +awsProvisioner.updateWorkerType(payload, workerType='value') # -> result +# Async call +await asyncAwsProvisioner.updateWorkerType(workerType, payload) # -> result +await asyncAwsProvisioner.updateWorkerType(payload, workerType='value') # -> result +``` + +#### Get Worker Type Last Modified Time +This method is provided to allow workers to see when they were +last modified. The value provided through UserData can be +compared against this value to see if changes have been made +If the worker type definition has not been changed, the date +should be identical as it is the same stored value. + + + +Takes the following arguments: + + * `workerType` + +Required [output schema](http://schemas.taskcluster.net/aws-provisioner/v1/get-worker-type-last-modified.json#) + +```python +# Sync calls +awsProvisioner.workerTypeLastModified(workerType) # -> result` +awsProvisioner.workerTypeLastModified(workerType='value') # -> result +# Async call +await asyncAwsProvisioner.workerTypeLastModified(workerType) # -> result +await asyncAwsProvisioner.workerTypeLastModified(workerType='value') # -> result +``` + +#### Get Worker Type +Retrieve a copy of the requested worker type definition. +This copy contains a lastModified field as well as the worker +type name. As such, it will require manipulation to be able to +use the results of this method to submit date to the update +method. + + + +Takes the following arguments: + + * `workerType` + +Required [output schema](http://schemas.taskcluster.net/aws-provisioner/v1/get-worker-type-response.json#) + +```python +# Sync calls +awsProvisioner.workerType(workerType) # -> result` +awsProvisioner.workerType(workerType='value') # -> result +# Async call +await asyncAwsProvisioner.workerType(workerType) # -> result +await asyncAwsProvisioner.workerType(workerType='value') # -> result +``` + +#### Delete Worker Type +Delete a worker type definition. This method will only delete +the worker type definition from the storage table. The actual +deletion will be handled by a background worker. As soon as this +method is called for a worker type, the background worker will +immediately submit requests to cancel all spot requests for this +worker type as well as killing all instances regardless of their +state. If you want to gracefully remove a worker type, you must +either ensure that no tasks are created with that worker type name +or you could theoretically set maxCapacity to 0, though, this is +not a supported or tested action + + + +Takes the following arguments: + + * `workerType` + +```python +# Sync calls +awsProvisioner.removeWorkerType(workerType) # -> None` +awsProvisioner.removeWorkerType(workerType='value') # -> None +# Async call +await asyncAwsProvisioner.removeWorkerType(workerType) # -> None +await asyncAwsProvisioner.removeWorkerType(workerType='value') # -> None +``` + +#### List Worker Types +Return a list of string worker type names. These are the names +of all managed worker types known to the provisioner. This does +not include worker types which are left overs from a deleted worker +type definition but are still running in AWS. + + +Required [output schema](http://schemas.taskcluster.net/aws-provisioner/v1/list-worker-types-response.json#) + +```python +# Sync calls +awsProvisioner.listWorkerTypes() # -> result` +# Async call +await asyncAwsProvisioner.listWorkerTypes() # -> result +``` + +#### Create new Secret +Insert a secret into the secret storage. The supplied secrets will +be provided verbatime via `getSecret`, while the supplied scopes will +be converted into credentials by `getSecret`. + +This method is not ordinarily used in production; instead, the provisioner +creates a new secret directly for each spot bid. + + + +Takes the following arguments: + + * `token` + +Required [input schema](http://schemas.taskcluster.net/aws-provisioner/v1/create-secret-request.json#) + +```python +# Sync calls +awsProvisioner.createSecret(token, payload) # -> None` +awsProvisioner.createSecret(payload, token='value') # -> None +# Async call +await asyncAwsProvisioner.createSecret(token, payload) # -> None +await asyncAwsProvisioner.createSecret(payload, token='value') # -> None +``` + +#### Get a Secret +Retrieve a secret from storage. The result contains any passwords or +other restricted information verbatim as well as a temporary credential +based on the scopes specified when the secret was created. + +It is important that this secret is deleted by the consumer (`removeSecret`), +or else the secrets will be visible to any process which can access the +user data associated with the instance. + + + +Takes the following arguments: + + * `token` + +Required [output schema](http://schemas.taskcluster.net/aws-provisioner/v1/get-secret-response.json#) + +```python +# Sync calls +awsProvisioner.getSecret(token) # -> result` +awsProvisioner.getSecret(token='value') # -> result +# Async call +await asyncAwsProvisioner.getSecret(token) # -> result +await asyncAwsProvisioner.getSecret(token='value') # -> result +``` + +#### Report an instance starting +An instance will report in by giving its instance id as well +as its security token. The token is given and checked to ensure +that it matches a real token that exists to ensure that random +machines do not check in. We could generate a different token +but that seems like overkill + + + +Takes the following arguments: + + * `instanceId` + * `token` + +```python +# Sync calls +awsProvisioner.instanceStarted(instanceId, token) # -> None` +awsProvisioner.instanceStarted(instanceId='value', token='value') # -> None +# Async call +await asyncAwsProvisioner.instanceStarted(instanceId, token) # -> None +await asyncAwsProvisioner.instanceStarted(instanceId='value', token='value') # -> None +``` + +#### Remove a Secret +Remove a secret. After this call, a call to `getSecret` with the given +token will return no information. + +It is very important that the consumer of a +secret delete the secret from storage before handing over control +to untrusted processes to prevent credential and/or secret leakage. + + + +Takes the following arguments: + + * `token` + +```python +# Sync calls +awsProvisioner.removeSecret(token) # -> None` +awsProvisioner.removeSecret(token='value') # -> None +# Async call +await asyncAwsProvisioner.removeSecret(token) # -> None +await asyncAwsProvisioner.removeSecret(token='value') # -> None +``` + +#### Get All Launch Specifications for WorkerType +This method returns a preview of all possible launch specifications +that this worker type definition could submit to EC2. It is used to +test worker types, nothing more + +**This API end-point is experimental and may be subject to change without warning.** + + + +Takes the following arguments: + + * `workerType` + +Required [output schema](http://schemas.taskcluster.net/aws-provisioner/v1/get-launch-specs-response.json#) + +```python +# Sync calls +awsProvisioner.getLaunchSpecs(workerType) # -> result` +awsProvisioner.getLaunchSpecs(workerType='value') # -> result +# Async call +await asyncAwsProvisioner.getLaunchSpecs(workerType) # -> result +await asyncAwsProvisioner.getLaunchSpecs(workerType='value') # -> result +``` + +#### Get AWS State for a worker type +Return the state of a given workertype as stored by the provisioner. +This state is stored as three lists: 1 for running instances, 1 for +pending requests. The `summary` property contains an updated summary +similar to that returned from `listWorkerTypeSummaries`. + + + +Takes the following arguments: + + * `workerType` + +```python +# Sync calls +awsProvisioner.state(workerType) # -> None` +awsProvisioner.state(workerType='value') # -> None +# Async call +await asyncAwsProvisioner.state(workerType) # -> None +await asyncAwsProvisioner.state(workerType='value') # -> None +``` + +#### Backend Status +This endpoint is used to show when the last time the provisioner +has checked in. A check in is done through the deadman's snitch +api. It is done at the conclusion of a provisioning iteration +and used to tell if the background provisioning process is still +running. + +**Warning** this api end-point is **not stable**. + + +Required [output schema](http://schemas.taskcluster.net/aws-provisioner/v1/backend-status-response.json#) + +```python +# Sync calls +awsProvisioner.backendStatus() # -> result` +# Async call +await asyncAwsProvisioner.backendStatus() # -> result +``` + +#### Ping Server +Respond without doing anything. +This endpoint is used to check that the service is up. + + +```python +# Sync calls +awsProvisioner.ping() # -> None` +# Async call +await asyncAwsProvisioner.ping() # -> None +``` + + + + +### Exchanges in `taskcluster.AwsProvisionerEvents` +```python +// Create AwsProvisionerEvents client instance +import taskcluster +awsProvisionerEvents = taskcluster.AwsProvisionerEvents(options) +``` +Exchanges from the provisioner... more docs later +#### WorkerType Created Message + * `awsProvisionerEvents.workerTypeCreated(routingKeyPattern) -> routingKey` + * `routingKeyKind` is constant of `primary` is required Description: Identifier for the routing-key kind. This is always `'primary'` for the formalized routing key. + * `workerType` is required Description: WorkerType that this message concerns. + * `reserved` Description: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified. + +#### WorkerType Updated Message + * `awsProvisionerEvents.workerTypeUpdated(routingKeyPattern) -> routingKey` + * `routingKeyKind` is constant of `primary` is required Description: Identifier for the routing-key kind. This is always `'primary'` for the formalized routing key. + * `workerType` is required Description: WorkerType that this message concerns. + * `reserved` Description: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified. + +#### WorkerType Removed Message + * `awsProvisionerEvents.workerTypeRemoved(routingKeyPattern) -> routingKey` + * `routingKeyKind` is constant of `primary` is required Description: Identifier for the routing-key kind. This is always `'primary'` for the formalized routing key. + * `workerType` is required Description: WorkerType that this message concerns. + * `reserved` Description: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified. + + + + +### Methods in `taskcluster.EC2Manager` +```python +import asyncio # Only for async +// Create EC2Manager client instance +import taskcluster +import taskcluster.aio + +eC2Manager = taskcluster.EC2Manager(options) +# Below only for async instances, assume already in coroutine +loop = asyncio.get_event_loop() +session = taskcluster.aio.createSession(loop=loop) +asyncEC2Manager = taskcluster.aio.EC2Manager(options, session=session) +``` +A taskcluster service which manages EC2 instances. This service does not understand any taskcluster concepts intrinsicaly other than using the name `workerType` to refer to a group of associated instances. Unless you are working on building a provisioner for AWS, you almost certainly do not want to use this service +#### Ping Server +Respond without doing anything. +This endpoint is used to check that the service is up. + + +```python +# Sync calls +eC2Manager.ping() # -> None` +# Async call +await asyncEC2Manager.ping() # -> None +``` + +#### See the list of worker types which are known to be managed +This method is only for debugging the ec2-manager + + +Required [output schema](v1/list-worker-types.json#) + +```python +# Sync calls +eC2Manager.listWorkerTypes() # -> result` +# Async call +await asyncEC2Manager.listWorkerTypes() # -> result +``` + +#### Run an instance +Request an instance of a worker type + + + +Takes the following arguments: + + * `workerType` + +Required [input schema](v1/run-instance-request.json#) + +```python +# Sync calls +eC2Manager.runInstance(workerType, payload) # -> None` +eC2Manager.runInstance(payload, workerType='value') # -> None +# Async call +await asyncEC2Manager.runInstance(workerType, payload) # -> None +await asyncEC2Manager.runInstance(payload, workerType='value') # -> None +``` + +#### Terminate all resources from a worker type +Terminate all instances for this worker type + + + +Takes the following arguments: + + * `workerType` + +```python +# Sync calls +eC2Manager.terminateWorkerType(workerType) # -> None` +eC2Manager.terminateWorkerType(workerType='value') # -> None +# Async call +await asyncEC2Manager.terminateWorkerType(workerType) # -> None +await asyncEC2Manager.terminateWorkerType(workerType='value') # -> None +``` + +#### Look up the resource stats for a workerType +Return an object which has a generic state description. This only contains counts of instances + + + +Takes the following arguments: + + * `workerType` + +Required [output schema](v1/worker-type-resources.json#) + +```python +# Sync calls +eC2Manager.workerTypeStats(workerType) # -> result` +eC2Manager.workerTypeStats(workerType='value') # -> result +# Async call +await asyncEC2Manager.workerTypeStats(workerType) # -> result +await asyncEC2Manager.workerTypeStats(workerType='value') # -> result +``` + +#### Look up the resource health for a workerType +Return a view of the health of a given worker type + + + +Takes the following arguments: + + * `workerType` + +Required [output schema](v1/health.json#) + +```python +# Sync calls +eC2Manager.workerTypeHealth(workerType) # -> result` +eC2Manager.workerTypeHealth(workerType='value') # -> result +# Async call +await asyncEC2Manager.workerTypeHealth(workerType) # -> result +await asyncEC2Manager.workerTypeHealth(workerType='value') # -> result +``` + +#### Look up the most recent errors of a workerType +Return a list of the most recent errors encountered by a worker type + + + +Takes the following arguments: + + * `workerType` + +Required [output schema](v1/errors.json#) + +```python +# Sync calls +eC2Manager.workerTypeErrors(workerType) # -> result` +eC2Manager.workerTypeErrors(workerType='value') # -> result +# Async call +await asyncEC2Manager.workerTypeErrors(workerType) # -> result +await asyncEC2Manager.workerTypeErrors(workerType='value') # -> result +``` + +#### Look up the resource state for a workerType +Return state information for a given worker type + + + +Takes the following arguments: + + * `workerType` + +Required [output schema](v1/worker-type-state.json#) + +```python +# Sync calls +eC2Manager.workerTypeState(workerType) # -> result` +eC2Manager.workerTypeState(workerType='value') # -> result +# Async call +await asyncEC2Manager.workerTypeState(workerType) # -> result +await asyncEC2Manager.workerTypeState(workerType='value') # -> result +``` + +#### Ensure a KeyPair for a given worker type exists +Idempotently ensure that a keypair of a given name exists + + + +Takes the following arguments: + + * `name` + +Required [input schema](v1/create-key-pair.json#) + +```python +# Sync calls +eC2Manager.ensureKeyPair(name, payload) # -> None` +eC2Manager.ensureKeyPair(payload, name='value') # -> None +# Async call +await asyncEC2Manager.ensureKeyPair(name, payload) # -> None +await asyncEC2Manager.ensureKeyPair(payload, name='value') # -> None +``` + +#### Ensure a KeyPair for a given worker type does not exist +Ensure that a keypair of a given name does not exist. + + + +Takes the following arguments: + + * `name` + +```python +# Sync calls +eC2Manager.removeKeyPair(name) # -> None` +eC2Manager.removeKeyPair(name='value') # -> None +# Async call +await asyncEC2Manager.removeKeyPair(name) # -> None +await asyncEC2Manager.removeKeyPair(name='value') # -> None +``` + +#### Terminate an instance +Terminate an instance in a specified region + + + +Takes the following arguments: + + * `region` + * `instanceId` + +```python +# Sync calls +eC2Manager.terminateInstance(region, instanceId) # -> None` +eC2Manager.terminateInstance(region='value', instanceId='value') # -> None +# Async call +await asyncEC2Manager.terminateInstance(region, instanceId) # -> None +await asyncEC2Manager.terminateInstance(region='value', instanceId='value') # -> None +``` + +#### Request prices for EC2 +Return a list of possible prices for EC2 + + +Required [output schema](v1/prices.json#) + +```python +# Sync calls +eC2Manager.getPrices() # -> result` +# Async call +await asyncEC2Manager.getPrices() # -> result +``` + +#### Request prices for EC2 +Return a list of possible prices for EC2 + + +Required [input schema](v1/prices-request.json#) + +Required [output schema](v1/prices.json#) + +```python +# Sync calls +eC2Manager.getSpecificPrices(payload) # -> result` +# Async call +await asyncEC2Manager.getSpecificPrices(payload) # -> result +``` + +#### Get EC2 account health metrics +Give some basic stats on the health of our EC2 account + + +Required [output schema](v1/health.json#) + +```python +# Sync calls +eC2Manager.getHealth() # -> result` +# Async call +await asyncEC2Manager.getHealth() # -> result +``` + +#### Look up the most recent errors in the provisioner across all worker types +Return a list of recent errors encountered + + +Required [output schema](v1/errors.json#) + +```python +# Sync calls +eC2Manager.getRecentErrors() # -> result` +# Async call +await asyncEC2Manager.getRecentErrors() # -> result +``` + +#### See the list of regions managed by this ec2-manager +This method is only for debugging the ec2-manager + + +```python +# Sync calls +eC2Manager.regions() # -> None` +# Async call +await asyncEC2Manager.regions() # -> None +``` + +#### See the list of AMIs and their usage +List AMIs and their usage by returning a list of objects in the form: +{ +region: string + volumetype: string + lastused: timestamp +} + + +```python +# Sync calls +eC2Manager.amiUsage() # -> None` +# Async call +await asyncEC2Manager.amiUsage() # -> None +``` + +#### See the current EBS volume usage list +Lists current EBS volume usage by returning a list of objects +that are uniquely defined by {region, volumetype, state} in the form: +{ +region: string, + volumetype: string, + state: string, + totalcount: integer, + totalgb: integer, + touched: timestamp (last time that information was updated), +} + + +```python +# Sync calls +eC2Manager.ebsUsage() # -> None` +# Async call +await asyncEC2Manager.ebsUsage() # -> None +``` + +#### Statistics on the Database client pool +This method is only for debugging the ec2-manager + + +```python +# Sync calls +eC2Manager.dbpoolStats() # -> None` +# Async call +await asyncEC2Manager.dbpoolStats() # -> None +``` + +#### List out the entire internal state +This method is only for debugging the ec2-manager + + +```python +# Sync calls +eC2Manager.allState() # -> None` +# Async call +await asyncEC2Manager.allState() # -> None +``` + +#### Statistics on the sqs queues +This method is only for debugging the ec2-manager + + +```python +# Sync calls +eC2Manager.sqsStats() # -> None` +# Async call +await asyncEC2Manager.sqsStats() # -> None +``` + +#### Purge the SQS queues +This method is only for debugging the ec2-manager + + +```python +# Sync calls +eC2Manager.purgeQueues() # -> None` +# Async call +await asyncEC2Manager.purgeQueues() # -> None +``` + + + + +### Methods in `taskcluster.Github` +```python +import asyncio # Only for async +// Create Github client instance +import taskcluster +import taskcluster.aio + +github = taskcluster.Github(options) +# Below only for async instances, assume already in coroutine +loop = asyncio.get_event_loop() +session = taskcluster.aio.createSession(loop=loop) +asyncGithub = taskcluster.aio.Github(options, session=session) +``` +The github service is responsible for creating tasks in reposnse +to GitHub events, and posting results to the GitHub UI. + +This document describes the API end-point for consuming GitHub +web hooks, as well as some useful consumer APIs. + +When Github forbids an action, this service returns an HTTP 403 +with code ForbiddenByGithub. +#### Ping Server +Respond without doing anything. +This endpoint is used to check that the service is up. + + +```python +# Sync calls +github.ping() # -> None` +# Async call +await asyncGithub.ping() # -> None +``` + +#### Consume GitHub WebHook +Capture a GitHub event and publish it via pulse, if it's a push, +release or pull request. + + +```python +# Sync calls +github.githubWebHookConsumer() # -> None` +# Async call +await asyncGithub.githubWebHookConsumer() # -> None +``` + +#### List of Builds +A paginated list of builds that have been run in +Taskcluster. Can be filtered on various git-specific +fields. + + +Required [output schema](v1/build-list.json#) + +```python +# Sync calls +github.builds() # -> result` +# Async call +await asyncGithub.builds() # -> result +``` + +#### Latest Build Status Badge +Checks the status of the latest build of a given branch +and returns corresponding badge svg. + + + +Takes the following arguments: + + * `owner` + * `repo` + * `branch` + +```python +# Sync calls +github.badge(owner, repo, branch) # -> None` +github.badge(owner='value', repo='value', branch='value') # -> None +# Async call +await asyncGithub.badge(owner, repo, branch) # -> None +await asyncGithub.badge(owner='value', repo='value', branch='value') # -> None +``` + +#### Get Repository Info +Returns any repository metadata that is +useful within Taskcluster related services. + + + +Takes the following arguments: + + * `owner` + * `repo` + +Required [output schema](v1/repository.json#) + +```python +# Sync calls +github.repository(owner, repo) # -> result` +github.repository(owner='value', repo='value') # -> result +# Async call +await asyncGithub.repository(owner, repo) # -> result +await asyncGithub.repository(owner='value', repo='value') # -> result +``` + +#### Latest Status for Branch +For a given branch of a repository, this will always point +to a status page for the most recent task triggered by that +branch. + +Note: This is a redirect rather than a direct link. + + + +Takes the following arguments: + + * `owner` + * `repo` + * `branch` + +```python +# Sync calls +github.latest(owner, repo, branch) # -> None` +github.latest(owner='value', repo='value', branch='value') # -> None +# Async call +await asyncGithub.latest(owner, repo, branch) # -> None +await asyncGithub.latest(owner='value', repo='value', branch='value') # -> None +``` + +#### Post a status against a given changeset +For a given changeset (SHA) of a repository, this will attach a "commit status" +on github. These statuses are links displayed next to each revision. +The status is either OK (green check) or FAILURE (red cross), +made of a custom title and link. + + + +Takes the following arguments: + + * `owner` + * `repo` + * `sha` + +Required [input schema](v1/create-status.json#) + +```python +# Sync calls +github.createStatus(owner, repo, sha, payload) # -> None` +github.createStatus(payload, owner='value', repo='value', sha='value') # -> None +# Async call +await asyncGithub.createStatus(owner, repo, sha, payload) # -> None +await asyncGithub.createStatus(payload, owner='value', repo='value', sha='value') # -> None +``` + +#### Post a comment on a given GitHub Issue or Pull Request +For a given Issue or Pull Request of a repository, this will write a new message. + + + +Takes the following arguments: + + * `owner` + * `repo` + * `number` + +Required [input schema](v1/create-comment.json#) + +```python +# Sync calls +github.createComment(owner, repo, number, payload) # -> None` +github.createComment(payload, owner='value', repo='value', number='value') # -> None +# Async call +await asyncGithub.createComment(owner, repo, number, payload) # -> None +await asyncGithub.createComment(payload, owner='value', repo='value', number='value') # -> None +``` + + + + +### Exchanges in `taskcluster.GithubEvents` +```python +// Create GithubEvents client instance +import taskcluster +githubEvents = taskcluster.GithubEvents(options) +``` +The github service publishes a pulse +message for supported github events, translating Github webhook +events into pulse messages. + +This document describes the exchange offered by the taskcluster +github service +#### GitHub Pull Request Event + * `githubEvents.pullRequest(routingKeyPattern) -> routingKey` + * `routingKeyKind` is constant of `primary` is required Description: Identifier for the routing-key kind. This is always `"primary"` for the formalized routing key. + * `organization` is required Description: The GitHub `organization` which had an event. All periods have been replaced by % - such that foo.bar becomes foo%bar - and all other special characters aside from - and _ have been stripped. + * `repository` is required Description: The GitHub `repository` which had an event.All periods have been replaced by % - such that foo.bar becomes foo%bar - and all other special characters aside from - and _ have been stripped. + * `action` is required Description: The GitHub `action` which triggered an event. See for possible values see the payload actions property. + +#### GitHub push Event + * `githubEvents.push(routingKeyPattern) -> routingKey` + * `routingKeyKind` is constant of `primary` is required Description: Identifier for the routing-key kind. This is always `"primary"` for the formalized routing key. + * `organization` is required Description: The GitHub `organization` which had an event. All periods have been replaced by % - such that foo.bar becomes foo%bar - and all other special characters aside from - and _ have been stripped. + * `repository` is required Description: The GitHub `repository` which had an event.All periods have been replaced by % - such that foo.bar becomes foo%bar - and all other special characters aside from - and _ have been stripped. + +#### GitHub release Event + * `githubEvents.release(routingKeyPattern) -> routingKey` + * `routingKeyKind` is constant of `primary` is required Description: Identifier for the routing-key kind. This is always `"primary"` for the formalized routing key. + * `organization` is required Description: The GitHub `organization` which had an event. All periods have been replaced by % - such that foo.bar becomes foo%bar - and all other special characters aside from - and _ have been stripped. + * `repository` is required Description: The GitHub `repository` which had an event.All periods have been replaced by % - such that foo.bar becomes foo%bar - and all other special characters aside from - and _ have been stripped. + +#### GitHub release Event + * `githubEvents.taskGroupDefined(routingKeyPattern) -> routingKey` + * `routingKeyKind` is constant of `primary` is required Description: Identifier for the routing-key kind. This is always `"primary"` for the formalized routing key. + * `organization` is required Description: The GitHub `organization` which had an event. All periods have been replaced by % - such that foo.bar becomes foo%bar - and all other special characters aside from - and _ have been stripped. + * `repository` is required Description: The GitHub `repository` which had an event.All periods have been replaced by % - such that foo.bar becomes foo%bar - and all other special characters aside from - and _ have been stripped. + + + + +### Methods in `taskcluster.Hooks` +```python +import asyncio # Only for async +// Create Hooks client instance +import taskcluster +import taskcluster.aio + +hooks = taskcluster.Hooks(options) +# Below only for async instances, assume already in coroutine +loop = asyncio.get_event_loop() +session = taskcluster.aio.createSession(loop=loop) +asyncHooks = taskcluster.aio.Hooks(options, session=session) +``` +Hooks are a mechanism for creating tasks in response to events. + +Hooks are identified with a `hookGroupId` and a `hookId`. + +When an event occurs, the resulting task is automatically created. The +task is created using the scope `assume:hook-id:/`, +which must have scopes to make the createTask call, including satisfying all +scopes in `task.scopes`. The new task has a `taskGroupId` equal to its +`taskId`, as is the convention for decision tasks. + +Hooks can have a "schedule" indicating specific times that new tasks should +be created. Each schedule is in a simple cron format, per +https://www.npmjs.com/package/cron-parser. For example: + * `['0 0 1 * * *']` -- daily at 1:00 UTC + * `['0 0 9,21 * * 1-5', '0 0 12 * * 0,6']` -- weekdays at 9:00 and 21:00 UTC, weekends at noon + +The task definition is used as a JSON-e template, with a context depending on how it is fired. See +[/docs/reference/core/taskcluster-hooks/docs/firing-hooks](firing-hooks) +for more information. +#### Ping Server +Respond without doing anything. +This endpoint is used to check that the service is up. + + +```python +# Sync calls +hooks.ping() # -> None` +# Async call +await asyncHooks.ping() # -> None +``` + +#### List hook groups +This endpoint will return a list of all hook groups with at least one hook. + + +Required [output schema](v1/list-hook-groups-response.json#) + +```python +# Sync calls +hooks.listHookGroups() # -> result` +# Async call +await asyncHooks.listHookGroups() # -> result +``` + +#### List hooks in a given group +This endpoint will return a list of all the hook definitions within a +given hook group. + + + +Takes the following arguments: + + * `hookGroupId` + +Required [output schema](v1/list-hooks-response.json#) + +```python +# Sync calls +hooks.listHooks(hookGroupId) # -> result` +hooks.listHooks(hookGroupId='value') # -> result +# Async call +await asyncHooks.listHooks(hookGroupId) # -> result +await asyncHooks.listHooks(hookGroupId='value') # -> result +``` + +#### Get hook definition +This endpoint will return the hook definition for the given `hookGroupId` +and hookId. + + + +Takes the following arguments: + + * `hookGroupId` + * `hookId` + +Required [output schema](v1/hook-definition.json#) + +```python +# Sync calls +hooks.hook(hookGroupId, hookId) # -> result` +hooks.hook(hookGroupId='value', hookId='value') # -> result +# Async call +await asyncHooks.hook(hookGroupId, hookId) # -> result +await asyncHooks.hook(hookGroupId='value', hookId='value') # -> result +``` + +#### Get hook status +This endpoint will return the current status of the hook. This represents a +snapshot in time and may vary from one call to the next. + + + +Takes the following arguments: + + * `hookGroupId` + * `hookId` + +Required [output schema](v1/hook-status.json#) + +```python +# Sync calls +hooks.getHookStatus(hookGroupId, hookId) # -> result` +hooks.getHookStatus(hookGroupId='value', hookId='value') # -> result +# Async call +await asyncHooks.getHookStatus(hookGroupId, hookId) # -> result +await asyncHooks.getHookStatus(hookGroupId='value', hookId='value') # -> result +``` + +#### Create a hook +This endpoint will create a new hook. + +The caller's credentials must include the role that will be used to +create the task. That role must satisfy task.scopes as well as the +necessary scopes to add the task to the queue. + + + + +Takes the following arguments: + + * `hookGroupId` + * `hookId` + +Required [input schema](v1/create-hook-request.json#) + +Required [output schema](v1/hook-definition.json#) + +```python +# Sync calls +hooks.createHook(hookGroupId, hookId, payload) # -> result` +hooks.createHook(payload, hookGroupId='value', hookId='value') # -> result +# Async call +await asyncHooks.createHook(hookGroupId, hookId, payload) # -> result +await asyncHooks.createHook(payload, hookGroupId='value', hookId='value') # -> result +``` + +#### Update a hook +This endpoint will update an existing hook. All fields except +`hookGroupId` and `hookId` can be modified. + + + +Takes the following arguments: + + * `hookGroupId` + * `hookId` + +Required [input schema](v1/create-hook-request.json#) + +Required [output schema](v1/hook-definition.json#) + +```python +# Sync calls +hooks.updateHook(hookGroupId, hookId, payload) # -> result` +hooks.updateHook(payload, hookGroupId='value', hookId='value') # -> result +# Async call +await asyncHooks.updateHook(hookGroupId, hookId, payload) # -> result +await asyncHooks.updateHook(payload, hookGroupId='value', hookId='value') # -> result +``` + +#### Delete a hook +This endpoint will remove a hook definition. + + + +Takes the following arguments: + + * `hookGroupId` + * `hookId` + +```python +# Sync calls +hooks.removeHook(hookGroupId, hookId) # -> None` +hooks.removeHook(hookGroupId='value', hookId='value') # -> None +# Async call +await asyncHooks.removeHook(hookGroupId, hookId) # -> None +await asyncHooks.removeHook(hookGroupId='value', hookId='value') # -> None +``` + +#### Trigger a hook +This endpoint will trigger the creation of a task from a hook definition. + +The HTTP payload must match the hooks `triggerSchema`. If it does, it is +provided as the `payload` property of the JSON-e context used to render the +task template. + + + +Takes the following arguments: + + * `hookGroupId` + * `hookId` + +Required [input schema](v1/trigger-hook.json#) + +Required [output schema](v1/task-status.json#) + +```python +# Sync calls +hooks.triggerHook(hookGroupId, hookId, payload) # -> result` +hooks.triggerHook(payload, hookGroupId='value', hookId='value') # -> result +# Async call +await asyncHooks.triggerHook(hookGroupId, hookId, payload) # -> result +await asyncHooks.triggerHook(payload, hookGroupId='value', hookId='value') # -> result +``` + +#### Get a trigger token +Retrieve a unique secret token for triggering the specified hook. This +token can be deactivated with `resetTriggerToken`. + + + +Takes the following arguments: + + * `hookGroupId` + * `hookId` + +Required [output schema](v1/trigger-token-response.json#) + +```python +# Sync calls +hooks.getTriggerToken(hookGroupId, hookId) # -> result` +hooks.getTriggerToken(hookGroupId='value', hookId='value') # -> result +# Async call +await asyncHooks.getTriggerToken(hookGroupId, hookId) # -> result +await asyncHooks.getTriggerToken(hookGroupId='value', hookId='value') # -> result +``` + +#### Reset a trigger token +Reset the token for triggering a given hook. This invalidates token that +may have been issued via getTriggerToken with a new token. + + + +Takes the following arguments: + + * `hookGroupId` + * `hookId` + +Required [output schema](v1/trigger-token-response.json#) + +```python +# Sync calls +hooks.resetTriggerToken(hookGroupId, hookId) # -> result` +hooks.resetTriggerToken(hookGroupId='value', hookId='value') # -> result +# Async call +await asyncHooks.resetTriggerToken(hookGroupId, hookId) # -> result +await asyncHooks.resetTriggerToken(hookGroupId='value', hookId='value') # -> result +``` + +#### Trigger a hook with a token +This endpoint triggers a defined hook with a valid token. + +The HTTP payload must match the hooks `triggerSchema`. If it does, it is +provided as the `payload` property of the JSON-e context used to render the +task template. + + + +Takes the following arguments: + + * `hookGroupId` + * `hookId` + * `token` + +Required [input schema](v1/trigger-hook.json#) + +Required [output schema](v1/task-status.json#) + +```python +# Sync calls +hooks.triggerHookWithToken(hookGroupId, hookId, token, payload) # -> result` +hooks.triggerHookWithToken(payload, hookGroupId='value', hookId='value', token='value') # -> result +# Async call +await asyncHooks.triggerHookWithToken(hookGroupId, hookId, token, payload) # -> result +await asyncHooks.triggerHookWithToken(payload, hookGroupId='value', hookId='value', token='value') # -> result +``` + + + + +### Methods in `taskcluster.Index` +```python +import asyncio # Only for async +// Create Index client instance +import taskcluster +import taskcluster.aio + +index = taskcluster.Index(options) +# Below only for async instances, assume already in coroutine +loop = asyncio.get_event_loop() +session = taskcluster.aio.createSession(loop=loop) +asyncIndex = taskcluster.aio.Index(options, session=session) +``` +The task index, typically available at `index.taskcluster.net`, is +responsible for indexing tasks. The service ensures that tasks can be +located by recency and/or arbitrary strings. Common use-cases include: + + * Locate tasks by git or mercurial ``, or + * Locate latest task from given ``, such as a release. + +**Index hierarchy**, tasks are indexed in a dot (`.`) separated hierarchy +called a namespace. For example a task could be indexed with the index path +`some-app..linux-64.release-build`. In this case the following +namespaces is created. + + 1. `some-app`, + 1. `some-app.`, and, + 2. `some-app..linux-64` + +Inside the namespace `some-app.` you can find the namespace +`some-app..linux-64` inside which you can find the indexed task +`some-app..linux-64.release-build`. This is an example of indexing +builds for a given platform and revision. + +**Task Rank**, when a task is indexed, it is assigned a `rank` (defaults +to `0`). If another task is already indexed in the same namespace with +lower or equal `rank`, the index for that task will be overwritten. For example +consider index path `mozilla-central.linux-64.release-build`. In +this case one might choose to use a UNIX timestamp or mercurial revision +number as `rank`. This way the latest completed linux 64 bit release +build is always available at `mozilla-central.linux-64.release-build`. + +Note that this does mean index paths are not immutable: the same path may +point to a different task now than it did a moment ago. + +**Indexed Data**, when a task is retrieved from the index the result includes +a `taskId` and an additional user-defined JSON blob that was indexed with +the task. + +**Entry Expiration**, all indexed entries must have an expiration date. +Typically this defaults to one year, if not specified. If you are +indexing tasks to make it easy to find artifacts, consider using the +artifact's expiration date. + +**Valid Characters**, all keys in a namespace `.` must be +in the form `/[a-zA-Z0-9_!~*'()%-]+/`. Observe that this is URL-safe and +that if you strictly want to put another character you can URL encode it. + +**Indexing Routes**, tasks can be indexed using the API below, but the +most common way to index tasks is adding a custom route to `task.routes` of the +form `index.`. In order to add this route to a task you'll +need the scope `queue:route:index.`. When a task has +this route, it will be indexed when the task is **completed successfully**. +The task will be indexed with `rank`, `data` and `expires` as specified +in `task.extra.index`. See the example below: + +```js +{ + payload: { /* ... */ }, + routes: [ + // index. prefixed routes, tasks CC'ed such a route will + // be indexed under the given + "index.mozilla-central.linux-64.release-build", + "index..linux-64.release-build" + ], + extra: { + // Optional details for indexing service + index: { + // Ordering, this taskId will overwrite any thing that has + // rank <= 4000 (defaults to zero) + rank: 4000, + + // Specify when the entries expire (Defaults to 1 year) + expires: new Date().toJSON(), + + // A little informal data to store along with taskId + // (less 16 kb when encoded as JSON) + data: { + hgRevision: "...", + commitMessae: "...", + whatever... + } + }, + // Extra properties for other services... + } + // Other task properties... +} +``` + +**Remark**, when indexing tasks using custom routes, it's also possible +to listen for messages about these tasks. For +example one could bind to `route.index.some-app.*.release-build`, +and pick up all messages about release builds. Hence, it is a +good idea to document task index hierarchies, as these make up extension +points in their own. +#### Ping Server +Respond without doing anything. +This endpoint is used to check that the service is up. + + +```python +# Sync calls +index.ping() # -> None` +# Async call +await asyncIndex.ping() # -> None +``` + +#### Find Indexed Task +Find a task by index path, returning the highest-rank task with that path. If no +task exists for the given path, this API end-point will respond with a 404 status. + + + +Takes the following arguments: + + * `indexPath` + +Required [output schema](v1/indexed-task-response.json#) + +```python +# Sync calls +index.findTask(indexPath) # -> result` +index.findTask(indexPath='value') # -> result +# Async call +await asyncIndex.findTask(indexPath) # -> result +await asyncIndex.findTask(indexPath='value') # -> result +``` + +#### List Namespaces +List the namespaces immediately under a given namespace. + +This endpoint +lists up to 1000 namespaces. If more namespaces are present, a +`continuationToken` will be returned, which can be given in the next +request. For the initial request, the payload should be an empty JSON +object. + + + +Takes the following arguments: + + * `namespace` + +Required [output schema](v1/list-namespaces-response.json#) + +```python +# Sync calls +index.listNamespaces(namespace) # -> result` +index.listNamespaces(namespace='value') # -> result +# Async call +await asyncIndex.listNamespaces(namespace) # -> result +await asyncIndex.listNamespaces(namespace='value') # -> result +``` + +#### List Tasks +List the tasks immediately under a given namespace. + +This endpoint +lists up to 1000 tasks. If more tasks are present, a +`continuationToken` will be returned, which can be given in the next +request. For the initial request, the payload should be an empty JSON +object. + +**Remark**, this end-point is designed for humans browsing for tasks, not +services, as that makes little sense. + + + +Takes the following arguments: + + * `namespace` + +Required [output schema](v1/list-tasks-response.json#) + +```python +# Sync calls +index.listTasks(namespace) # -> result` +index.listTasks(namespace='value') # -> result +# Async call +await asyncIndex.listTasks(namespace) # -> result +await asyncIndex.listTasks(namespace='value') # -> result +``` + +#### Insert Task into Index +Insert a task into the index. If the new rank is less than the existing rank +at the given index path, the task is not indexed but the response is still 200 OK. + +Please see the introduction above for information +about indexing successfully completed tasks automatically using custom routes. + + + +Takes the following arguments: + + * `namespace` + +Required [input schema](v1/insert-task-request.json#) + +Required [output schema](v1/indexed-task-response.json#) + +```python +# Sync calls +index.insertTask(namespace, payload) # -> result` +index.insertTask(payload, namespace='value') # -> result +# Async call +await asyncIndex.insertTask(namespace, payload) # -> result +await asyncIndex.insertTask(payload, namespace='value') # -> result +``` + +#### Get Artifact From Indexed Task +Find a task by index path and redirect to the artifact on the most recent +run with the given `name`. + +Note that multiple calls to this endpoint may return artifacts from differen tasks +if a new task is inserted into the index between calls. Avoid using this method as +a stable link to multiple, connected files if the index path does not contain a +unique identifier. For example, the following two links may return unrelated files: +* https://index.taskcluster.net/task/some-app.win64.latest.installer/artifacts/public/installer.exe` +* https://index.taskcluster.net/task/some-app.win64.latest.installer/artifacts/public/debug-symbols.zip` + +This problem be remedied by including the revision in the index path or by bundling both +installer and debug symbols into a single artifact. + +If no task exists for the given index path, this API end-point responds with 404. + + + +Takes the following arguments: + + * `indexPath` + * `name` + +```python +# Sync calls +index.findArtifactFromTask(indexPath, name) # -> None` +index.findArtifactFromTask(indexPath='value', name='value') # -> None +# Async call +await asyncIndex.findArtifactFromTask(indexPath, name) # -> None +await asyncIndex.findArtifactFromTask(indexPath='value', name='value') # -> None +``` + + + + +### Methods in `taskcluster.Login` +```python +import asyncio # Only for async +// Create Login client instance +import taskcluster +import taskcluster.aio + +login = taskcluster.Login(options) +# Below only for async instances, assume already in coroutine +loop = asyncio.get_event_loop() +session = taskcluster.aio.createSession(loop=loop) +asyncLogin = taskcluster.aio.Login(options, session=session) +``` +The Login service serves as the interface between external authentication +systems and Taskcluster credentials. +#### Ping Server +Respond without doing anything. +This endpoint is used to check that the service is up. + + +```python +# Sync calls +login.ping() # -> None` +# Async call +await asyncLogin.ping() # -> None +``` + +#### Get Taskcluster credentials given a suitable `access_token` +Given an OIDC `access_token` from a trusted OpenID provider, return a +set of Taskcluster credentials for use on behalf of the identified +user. + +This method is typically not called with a Taskcluster client library +and does not accept Hawk credentials. The `access_token` should be +given in an `Authorization` header: +``` +Authorization: Bearer abc.xyz +``` + +The `access_token` is first verified against the named +:provider, then passed to the provider's APIBuilder to retrieve a user +profile. That profile is then used to generate Taskcluster credentials +appropriate to the user. Note that the resulting credentials may or may +not include a `certificate` property. Callers should be prepared for either +alternative. + +The given credentials will expire in a relatively short time. Callers should +monitor this expiration and refresh the credentials if necessary, by calling +this endpoint again, if they have expired. + + + +Takes the following arguments: + + * `provider` + +Required [output schema](v1/oidc-credentials-response.json#) + +```python +# Sync calls +login.oidcCredentials(provider) # -> result` +login.oidcCredentials(provider='value') # -> result +# Async call +await asyncLogin.oidcCredentials(provider) # -> result +await asyncLogin.oidcCredentials(provider='value') # -> result +``` + + + + +### Methods in `taskcluster.Notify` +```python +import asyncio # Only for async +// Create Notify client instance +import taskcluster +import taskcluster.aio + +notify = taskcluster.Notify(options) +# Below only for async instances, assume already in coroutine +loop = asyncio.get_event_loop() +session = taskcluster.aio.createSession(loop=loop) +asyncNotify = taskcluster.aio.Notify(options, session=session) +``` +The notification service, typically available at `notify.taskcluster.net` +listens for tasks with associated notifications and handles requests to +send emails and post pulse messages. +#### Ping Server +Respond without doing anything. +This endpoint is used to check that the service is up. + + +```python +# Sync calls +notify.ping() # -> None` +# Async call +await asyncNotify.ping() # -> None +``` + +#### Send an Email +Send an email to `address`. The content is markdown and will be rendered +to HTML, but both the HTML and raw markdown text will be sent in the +email. If a link is included, it will be rendered to a nice button in the +HTML version of the email + + +Required [input schema](v1/email-request.json#) + +```python +# Sync calls +notify.email(payload) # -> None` +# Async call +await asyncNotify.email(payload) # -> None +``` + +#### Publish a Pulse Message +Publish a message on pulse with the given `routingKey`. + + +Required [input schema](v1/pulse-request.json#) + +```python +# Sync calls +notify.pulse(payload) # -> None` +# Async call +await asyncNotify.pulse(payload) # -> None +``` + +#### Post IRC Message +Post a message on IRC to a specific channel or user, or a specific user +on a specific channel. + +Success of this API method does not imply the message was successfully +posted. This API method merely inserts the IRC message into a queue +that will be processed by a background process. +This allows us to re-send the message in face of connection issues. + +However, if the user isn't online the message will be dropped without +error. We maybe improve this behavior in the future. For now just keep +in mind that IRC is a best-effort service. + + +Required [input schema](v1/irc-request.json#) + +```python +# Sync calls +notify.irc(payload) # -> None` +# Async call +await asyncNotify.irc(payload) # -> None +``` + + + + +### Methods in `taskcluster.Pulse` +```python +import asyncio # Only for async +// Create Pulse client instance +import taskcluster +import taskcluster.aio + +pulse = taskcluster.Pulse(options) +# Below only for async instances, assume already in coroutine +loop = asyncio.get_event_loop() +session = taskcluster.aio.createSession(loop=loop) +asyncPulse = taskcluster.aio.Pulse(options, session=session) +``` +The taskcluster-pulse service, typically available at `pulse.taskcluster.net` +manages pulse credentials for taskcluster users. + +A service to manage Pulse credentials for anything using +Taskcluster credentials. This allows for self-service pulse +access and greater control within the Taskcluster project. +#### Ping Server +Respond without doing anything. +This endpoint is used to check that the service is up. + + +```python +# Sync calls +pulse.ping() # -> None` +# Async call +await asyncPulse.ping() # -> None +``` + +#### List Namespaces +List the namespaces managed by this service. + +This will list up to 1000 namespaces. If more namespaces are present a +`continuationToken` will be returned, which can be given in the next +request. For the initial request, do not provide continuation token. + + +Required [output schema](v1/list-namespaces-response.json#) + +```python +# Sync calls +pulse.listNamespaces() # -> result` +# Async call +await asyncPulse.listNamespaces() # -> result +``` + +#### Get a namespace +Get public information about a single namespace. This is the same information +as returned by `listNamespaces`. + + + +Takes the following arguments: + + * `namespace` + +Required [output schema](v1/namespace.json#) + +```python +# Sync calls +pulse.namespace(namespace) # -> result` +pulse.namespace(namespace='value') # -> result +# Async call +await asyncPulse.namespace(namespace) # -> result +await asyncPulse.namespace(namespace='value') # -> result +``` + +#### Claim a namespace +Claim a namespace, returning a connection string with access to that namespace +good for use until the `reclaimAt` time in the response body. The connection +string can be used as many times as desired during this period, but must not +be used after `reclaimAt`. + +Connections made with this connection string may persist beyond `reclaimAt`, +although it should not persist forever. 24 hours is a good maximum, and this +service will terminate connections after 72 hours (although this value is +configurable). + +The specified `expires` time updates any existing expiration times. Connections +for expired namespaces will be terminated. + + + +Takes the following arguments: + + * `namespace` + +Required [input schema](v1/namespace-request.json#) + +Required [output schema](v1/namespace-response.json#) + +```python +# Sync calls +pulse.claimNamespace(namespace, payload) # -> result` +pulse.claimNamespace(payload, namespace='value') # -> result +# Async call +await asyncPulse.claimNamespace(namespace, payload) # -> result +await asyncPulse.claimNamespace(payload, namespace='value') # -> result +``` + + + + +### Methods in `taskcluster.PurgeCache` +```python +import asyncio # Only for async +// Create PurgeCache client instance +import taskcluster +import taskcluster.aio + +purgeCache = taskcluster.PurgeCache(options) +# Below only for async instances, assume already in coroutine +loop = asyncio.get_event_loop() +session = taskcluster.aio.createSession(loop=loop) +asyncPurgeCache = taskcluster.aio.PurgeCache(options, session=session) +``` +The purge-cache service is responsible for publishing a pulse +message for workers, so they can purge cache upon request. + +This document describes the API end-point for publishing the pulse +message. This is mainly intended to be used by tools. +#### Ping Server +Respond without doing anything. +This endpoint is used to check that the service is up. + + +```python +# Sync calls +purgeCache.ping() # -> None` +# Async call +await asyncPurgeCache.ping() # -> None +``` + +#### Purge Worker Cache +Publish a purge-cache message to purge caches named `cacheName` with +`provisionerId` and `workerType` in the routing-key. Workers should +be listening for this message and purge caches when they see it. + + + +Takes the following arguments: + + * `provisionerId` + * `workerType` + +Required [input schema](v1/purge-cache-request.json#) + +```python +# Sync calls +purgeCache.purgeCache(provisionerId, workerType, payload) # -> None` +purgeCache.purgeCache(payload, provisionerId='value', workerType='value') # -> None +# Async call +await asyncPurgeCache.purgeCache(provisionerId, workerType, payload) # -> None +await asyncPurgeCache.purgeCache(payload, provisionerId='value', workerType='value') # -> None +``` + +#### All Open Purge Requests +This is useful mostly for administors to view +the set of open purge requests. It should not +be used by workers. They should use the purgeRequests +endpoint that is specific to their workerType and +provisionerId. + + +Required [output schema](v1/all-purge-cache-request-list.json#) + +```python +# Sync calls +purgeCache.allPurgeRequests() # -> result` +# Async call +await asyncPurgeCache.allPurgeRequests() # -> result +``` + +#### Open Purge Requests for a provisionerId/workerType pair +List of caches that need to be purged if they are from before +a certain time. This is safe to be used in automation from +workers. + + + +Takes the following arguments: + + * `provisionerId` + * `workerType` + +Required [output schema](v1/purge-cache-request-list.json#) + +```python +# Sync calls +purgeCache.purgeRequests(provisionerId, workerType) # -> result` +purgeCache.purgeRequests(provisionerId='value', workerType='value') # -> result +# Async call +await asyncPurgeCache.purgeRequests(provisionerId, workerType) # -> result +await asyncPurgeCache.purgeRequests(provisionerId='value', workerType='value') # -> result +``` + + + + +### Exchanges in `taskcluster.PurgeCacheEvents` +```python +// Create PurgeCacheEvents client instance +import taskcluster +purgeCacheEvents = taskcluster.PurgeCacheEvents(options) +``` +The purge-cache service, typically available at +`purge-cache.taskcluster.net`, is responsible for publishing a pulse +message for workers, so they can purge cache upon request. + +This document describes the exchange offered for workers by the +cache-purge service. +#### Purge Cache Messages + * `purgeCacheEvents.purgeCache(routingKeyPattern) -> routingKey` + * `routingKeyKind` is constant of `primary` is required Description: Identifier for the routing-key kind. This is always `'primary'` for the formalized routing key. + * `provisionerId` is required Description: `provisionerId` under which to purge cache. + * `workerType` is required Description: `workerType` for which to purge cache. + + + + +### Methods in `taskcluster.Queue` +```python +import asyncio # Only for async +// Create Queue client instance +import taskcluster +import taskcluster.aio + +queue = taskcluster.Queue(options) +# Below only for async instances, assume already in coroutine +loop = asyncio.get_event_loop() +session = taskcluster.aio.createSession(loop=loop) +asyncQueue = taskcluster.aio.Queue(options, session=session) +``` +The queue, typically available at `queue.taskcluster.net`, is responsible +for accepting tasks and track their state as they are executed by +workers. In order ensure they are eventually resolved. + +This document describes the API end-points offered by the queue. These +end-points targets the following audience: + * Schedulers, who create tasks to be executed, + * Workers, who execute tasks, and + * Tools, that wants to inspect the state of a task. +#### Ping Server +Respond without doing anything. +This endpoint is used to check that the service is up. + + +```python +# Sync calls +queue.ping() # -> None` +# Async call +await asyncQueue.ping() # -> None +``` + +#### Get Task Definition +This end-point will return the task-definition. Notice that the task +definition may have been modified by queue, if an optional property is +not specified the queue may provide a default value. + + + +Takes the following arguments: + + * `taskId` + +Required [output schema](v1/task.json#) + +```python +# Sync calls +queue.task(taskId) # -> result` +queue.task(taskId='value') # -> result +# Async call +await asyncQueue.task(taskId) # -> result +await asyncQueue.task(taskId='value') # -> result +``` + +#### Get task status +Get task status structure from `taskId` + + + +Takes the following arguments: + + * `taskId` + +Required [output schema](v1/task-status-response.json#) + +```python +# Sync calls +queue.status(taskId) # -> result` +queue.status(taskId='value') # -> result +# Async call +await asyncQueue.status(taskId) # -> result +await asyncQueue.status(taskId='value') # -> result +``` + +#### List Task Group +List tasks sharing the same `taskGroupId`. + +As a task-group may contain an unbounded number of tasks, this end-point +may return a `continuationToken`. To continue listing tasks you must call +the `listTaskGroup` again with the `continuationToken` as the +query-string option `continuationToken`. + +By default this end-point will try to return up to 1000 members in one +request. But it **may return less**, even if more tasks are available. +It may also return a `continuationToken` even though there are no more +results. However, you can only be sure to have seen all results if you +keep calling `listTaskGroup` with the last `continuationToken` until you +get a result without a `continuationToken`. + +If you are not interested in listing all the members at once, you may +use the query-string option `limit` to return fewer. + + + +Takes the following arguments: + + * `taskGroupId` + +Required [output schema](v1/list-task-group-response.json#) + +```python +# Sync calls +queue.listTaskGroup(taskGroupId) # -> result` +queue.listTaskGroup(taskGroupId='value') # -> result +# Async call +await asyncQueue.listTaskGroup(taskGroupId) # -> result +await asyncQueue.listTaskGroup(taskGroupId='value') # -> result +``` + +#### List Dependent Tasks +List tasks that depend on the given `taskId`. + +As many tasks from different task-groups may dependent on a single tasks, +this end-point may return a `continuationToken`. To continue listing +tasks you must call `listDependentTasks` again with the +`continuationToken` as the query-string option `continuationToken`. + +By default this end-point will try to return up to 1000 tasks in one +request. But it **may return less**, even if more tasks are available. +It may also return a `continuationToken` even though there are no more +results. However, you can only be sure to have seen all results if you +keep calling `listDependentTasks` with the last `continuationToken` until +you get a result without a `continuationToken`. + +If you are not interested in listing all the tasks at once, you may +use the query-string option `limit` to return fewer. + + + +Takes the following arguments: + + * `taskId` + +Required [output schema](v1/list-dependent-tasks-response.json#) + +```python +# Sync calls +queue.listDependentTasks(taskId) # -> result` +queue.listDependentTasks(taskId='value') # -> result +# Async call +await asyncQueue.listDependentTasks(taskId) # -> result +await asyncQueue.listDependentTasks(taskId='value') # -> result +``` + +#### Create New Task +Create a new task, this is an **idempotent** operation, so repeat it if +you get an internal server error or network connection is dropped. + +**Task `deadline`**: the deadline property can be no more than 5 days +into the future. This is to limit the amount of pending tasks not being +taken care of. Ideally, you should use a much shorter deadline. + +**Task expiration**: the `expires` property must be greater than the +task `deadline`. If not provided it will default to `deadline` + one +year. Notice, that artifacts created by task must expire before the task. + +**Task specific routing-keys**: using the `task.routes` property you may +define task specific routing-keys. If a task has a task specific +routing-key: ``, then when the AMQP message about the task is +published, the message will be CC'ed with the routing-key: +`route.`. This is useful if you want another component to listen +for completed tasks you have posted. The caller must have scope +`queue:route:` for each route. + +**Dependencies**: any tasks referenced in `task.dependencies` must have +already been created at the time of this call. + +**Scopes**: Note that the scopes required to complete this API call depend +on the content of the `scopes`, `routes`, `schedulerId`, `priority`, +`provisionerId`, and `workerType` properties of the task definition. + +**Legacy Scopes**: The `queue:create-task:..` scope without a priority and +the `queue:define-task:..` and `queue:task-group-id:..` scopes are considered +legacy and should not be used. Note that the new, non-legacy scopes require +a `queue:scheduler-id:..` scope as well as scopes for the proper priority. + + + +Takes the following arguments: + + * `taskId` + +Required [input schema](v1/create-task-request.json#) + +Required [output schema](v1/task-status-response.json#) + +```python +# Sync calls +queue.createTask(taskId, payload) # -> result` +queue.createTask(payload, taskId='value') # -> result +# Async call +await asyncQueue.createTask(taskId, payload) # -> result +await asyncQueue.createTask(payload, taskId='value') # -> result +``` + +#### Define Task +**Deprecated**, this is the same as `createTask` with a **self-dependency**. +This is only present for legacy. + + + +Takes the following arguments: + + * `taskId` + +Required [input schema](v1/create-task-request.json#) + +Required [output schema](v1/task-status-response.json#) + +```python +# Sync calls +queue.defineTask(taskId, payload) # -> result` +queue.defineTask(payload, taskId='value') # -> result +# Async call +await asyncQueue.defineTask(taskId, payload) # -> result +await asyncQueue.defineTask(payload, taskId='value') # -> result +``` + +#### Schedule Defined Task +scheduleTask will schedule a task to be executed, even if it has +unresolved dependencies. A task would otherwise only be scheduled if +its dependencies were resolved. + +This is useful if you have defined a task that depends on itself or on +some other task that has not been resolved, but you wish the task to be +scheduled immediately. + +This will announce the task as pending and workers will be allowed to +claim it and resolve the task. + +**Note** this operation is **idempotent** and will not fail or complain +if called with a `taskId` that is already scheduled, or even resolved. +To reschedule a task previously resolved, use `rerunTask`. + + + +Takes the following arguments: + + * `taskId` + +Required [output schema](v1/task-status-response.json#) + +```python +# Sync calls +queue.scheduleTask(taskId) # -> result` +queue.scheduleTask(taskId='value') # -> result +# Async call +await asyncQueue.scheduleTask(taskId) # -> result +await asyncQueue.scheduleTask(taskId='value') # -> result +``` + +#### Rerun a Resolved Task +This method _reruns_ a previously resolved task, even if it was +_completed_. This is useful if your task completes unsuccessfully, and +you just want to run it from scratch again. This will also reset the +number of `retries` allowed. + +Remember that `retries` in the task status counts the number of runs that +the queue have started because the worker stopped responding, for example +because a spot node died. + +**Remark** this operation is idempotent, if you try to rerun a task that +is not either `failed` or `completed`, this operation will just return +the current task status. + + + +Takes the following arguments: + + * `taskId` + +Required [output schema](v1/task-status-response.json#) + +```python +# Sync calls +queue.rerunTask(taskId) # -> result` +queue.rerunTask(taskId='value') # -> result +# Async call +await asyncQueue.rerunTask(taskId) # -> result +await asyncQueue.rerunTask(taskId='value') # -> result +``` + +#### Cancel Task +This method will cancel a task that is either `unscheduled`, `pending` or +`running`. It will resolve the current run as `exception` with +`reasonResolved` set to `canceled`. If the task isn't scheduled yet, ie. +it doesn't have any runs, an initial run will be added and resolved as +described above. Hence, after canceling a task, it cannot be scheduled +with `queue.scheduleTask`, but a new run can be created with +`queue.rerun`. These semantics is equivalent to calling +`queue.scheduleTask` immediately followed by `queue.cancelTask`. + +**Remark** this operation is idempotent, if you try to cancel a task that +isn't `unscheduled`, `pending` or `running`, this operation will just +return the current task status. + + + +Takes the following arguments: + + * `taskId` + +Required [output schema](v1/task-status-response.json#) + +```python +# Sync calls +queue.cancelTask(taskId) # -> result` +queue.cancelTask(taskId='value') # -> result +# Async call +await asyncQueue.cancelTask(taskId) # -> result +await asyncQueue.cancelTask(taskId='value') # -> result +``` + +#### Claim Work +Claim pending task(s) for the given `provisionerId`/`workerType` queue. + +If any work is available (even if fewer than the requested number of +tasks, this will return immediately. Otherwise, it will block for tens of +seconds waiting for work. If no work appears, it will return an emtpy +list of tasks. Callers should sleep a short while (to avoid denial of +service in an error condition) and call the endpoint again. This is a +simple implementation of "long polling". + + + +Takes the following arguments: + + * `provisionerId` + * `workerType` + +Required [input schema](v1/claim-work-request.json#) + +Required [output schema](v1/claim-work-response.json#) + +```python +# Sync calls +queue.claimWork(provisionerId, workerType, payload) # -> result` +queue.claimWork(payload, provisionerId='value', workerType='value') # -> result +# Async call +await asyncQueue.claimWork(provisionerId, workerType, payload) # -> result +await asyncQueue.claimWork(payload, provisionerId='value', workerType='value') # -> result +``` + +#### Claim Task +claim a task - never documented + + + +Takes the following arguments: + + * `taskId` + * `runId` + +Required [input schema](v1/task-claim-request.json#) + +Required [output schema](v1/task-claim-response.json#) + +```python +# Sync calls +queue.claimTask(taskId, runId, payload) # -> result` +queue.claimTask(payload, taskId='value', runId='value') # -> result +# Async call +await asyncQueue.claimTask(taskId, runId, payload) # -> result +await asyncQueue.claimTask(payload, taskId='value', runId='value') # -> result +``` + +#### Reclaim task +Refresh the claim for a specific `runId` for given `taskId`. This updates +the `takenUntil` property and returns a new set of temporary credentials +for performing requests on behalf of the task. These credentials should +be used in-place of the credentials returned by `claimWork`. + +The `reclaimTask` requests serves to: + * Postpone `takenUntil` preventing the queue from resolving + `claim-expired`, + * Refresh temporary credentials used for processing the task, and + * Abort execution if the task/run have been resolved. + +If the `takenUntil` timestamp is exceeded the queue will resolve the run +as _exception_ with reason `claim-expired`, and proceeded to retry to the +task. This ensures that tasks are retried, even if workers disappear +without warning. + +If the task is resolved, this end-point will return `409` reporting +`RequestConflict`. This typically happens if the task have been canceled +or the `task.deadline` have been exceeded. If reclaiming fails, workers +should abort the task and forget about the given `runId`. There is no +need to resolve the run or upload artifacts. + + + +Takes the following arguments: + + * `taskId` + * `runId` + +Required [output schema](v1/task-reclaim-response.json#) + +```python +# Sync calls +queue.reclaimTask(taskId, runId) # -> result` +queue.reclaimTask(taskId='value', runId='value') # -> result +# Async call +await asyncQueue.reclaimTask(taskId, runId) # -> result +await asyncQueue.reclaimTask(taskId='value', runId='value') # -> result +``` + +#### Report Run Completed +Report a task completed, resolving the run as `completed`. + + + +Takes the following arguments: + + * `taskId` + * `runId` + +Required [output schema](v1/task-status-response.json#) + +```python +# Sync calls +queue.reportCompleted(taskId, runId) # -> result` +queue.reportCompleted(taskId='value', runId='value') # -> result +# Async call +await asyncQueue.reportCompleted(taskId, runId) # -> result +await asyncQueue.reportCompleted(taskId='value', runId='value') # -> result +``` + +#### Report Run Failed +Report a run failed, resolving the run as `failed`. Use this to resolve +a run that failed because the task specific code behaved unexpectedly. +For example the task exited non-zero, or didn't produce expected output. + +Do not use this if the task couldn't be run because if malformed +payload, or other unexpected condition. In these cases we have a task +exception, which should be reported with `reportException`. + + + +Takes the following arguments: + + * `taskId` + * `runId` + +Required [output schema](v1/task-status-response.json#) + +```python +# Sync calls +queue.reportFailed(taskId, runId) # -> result` +queue.reportFailed(taskId='value', runId='value') # -> result +# Async call +await asyncQueue.reportFailed(taskId, runId) # -> result +await asyncQueue.reportFailed(taskId='value', runId='value') # -> result +``` + +#### Report Task Exception +Resolve a run as _exception_. Generally, you will want to report tasks as +failed instead of exception. You should `reportException` if, + + * The `task.payload` is invalid, + * Non-existent resources are referenced, + * Declared actions cannot be executed due to unavailable resources, + * The worker had to shutdown prematurely, + * The worker experienced an unknown error, or, + * The task explicitly requested a retry. + +Do not use this to signal that some user-specified code crashed for any +reason specific to this code. If user-specific code hits a resource that +is temporarily unavailable worker should report task _failed_. + + + +Takes the following arguments: + + * `taskId` + * `runId` + +Required [input schema](v1/task-exception-request.json#) + +Required [output schema](v1/task-status-response.json#) + +```python +# Sync calls +queue.reportException(taskId, runId, payload) # -> result` +queue.reportException(payload, taskId='value', runId='value') # -> result +# Async call +await asyncQueue.reportException(taskId, runId, payload) # -> result +await asyncQueue.reportException(payload, taskId='value', runId='value') # -> result +``` + +#### Create Artifact +This API end-point creates an artifact for a specific run of a task. This +should **only** be used by a worker currently operating on this task, or +from a process running within the task (ie. on the worker). + +All artifacts must specify when they `expires`, the queue will +automatically take care of deleting artifacts past their +expiration point. This features makes it feasible to upload large +intermediate artifacts from data processing applications, as the +artifacts can be set to expire a few days later. + +We currently support 3 different `storageType`s, each storage type have +slightly different features and in some cases difference semantics. +We also have 2 deprecated `storageType`s which are only maintained for +backwards compatiability and should not be used in new implementations + +**Blob artifacts**, are useful for storing large files. Currently, these +are all stored in S3 but there are facilities for adding support for other +backends in futre. A call for this type of artifact must provide information +about the file which will be uploaded. This includes sha256 sums and sizes. +This method will return a list of general form HTTP requests which are signed +by AWS S3 credentials managed by the Queue. Once these requests are completed +the list of `ETag` values returned by the requests must be passed to the +queue `completeArtifact` method + +**S3 artifacts**, DEPRECATED is useful for static files which will be +stored on S3. When creating an S3 artifact the queue will return a +pre-signed URL to which you can do a `PUT` request to upload your +artifact. Note that `PUT` request **must** specify the `content-length` +header and **must** give the `content-type` header the same value as in +the request to `createArtifact`. + +**Azure artifacts**, DEPRECATED are stored in _Azure Blob Storage_ service +which given the consistency guarantees and API interface offered by Azure +is more suitable for artifacts that will be modified during the execution +of the task. For example docker-worker has a feature that persists the +task log to Azure Blob Storage every few seconds creating a somewhat +live log. A request to create an Azure artifact will return a URL +featuring a [Shared-Access-Signature](http://msdn.microsoft.com/en-us/library/azure/dn140256.aspx), +refer to MSDN for further information on how to use these. +**Warning: azure artifact is currently an experimental feature subject +to changes and data-drops.** + +**Reference artifacts**, only consists of meta-data which the queue will +store for you. These artifacts really only have a `url` property and +when the artifact is requested the client will be redirect the URL +provided with a `303` (See Other) redirect. Please note that we cannot +delete artifacts you upload to other service, we can only delete the +reference to the artifact, when it expires. + +**Error artifacts**, only consists of meta-data which the queue will +store for you. These artifacts are only meant to indicate that you the +worker or the task failed to generate a specific artifact, that you +would otherwise have uploaded. For example docker-worker will upload an +error artifact, if the file it was supposed to upload doesn't exists or +turns out to be a directory. Clients requesting an error artifact will +get a `424` (Failed Dependency) response. This is mainly designed to +ensure that dependent tasks can distinguish between artifacts that were +suppose to be generated and artifacts for which the name is misspelled. + +**Artifact immutability**, generally speaking you cannot overwrite an +artifact when created. But if you repeat the request with the same +properties the request will succeed as the operation is idempotent. +This is useful if you need to refresh a signed URL while uploading. +Do not abuse this to overwrite artifacts created by another entity! +Such as worker-host overwriting artifact created by worker-code. + +As a special case the `url` property on _reference artifacts_ can be +updated. You should only use this to update the `url` property for +reference artifacts your process has created. + + + +Takes the following arguments: + + * `taskId` + * `runId` + * `name` + +Required [input schema](v1/post-artifact-request.json#) + +Required [output schema](v1/post-artifact-response.json#) + +```python +# Sync calls +queue.createArtifact(taskId, runId, name, payload) # -> result` +queue.createArtifact(payload, taskId='value', runId='value', name='value') # -> result +# Async call +await asyncQueue.createArtifact(taskId, runId, name, payload) # -> result +await asyncQueue.createArtifact(payload, taskId='value', runId='value', name='value') # -> result +``` + +#### Complete Artifact +This endpoint finalises an upload done through the blob `storageType`. +The queue will ensure that the task/run is still allowing artifacts +to be uploaded. For single-part S3 blob artifacts, this endpoint +will simply ensure the artifact is present in S3. For multipart S3 +artifacts, the endpoint will perform the commit step of the multipart +upload flow. As the final step for both multi and single part artifacts, +the `present` entity field will be set to `true` to reflect that the +artifact is now present and a message published to pulse. NOTE: This +endpoint *must* be called for all artifacts of storageType 'blob' + + + +Takes the following arguments: + + * `taskId` + * `runId` + * `name` + +Required [input schema](v1/put-artifact-request.json#) + +```python +# Sync calls +queue.completeArtifact(taskId, runId, name, payload) # -> None` +queue.completeArtifact(payload, taskId='value', runId='value', name='value') # -> None +# Async call +await asyncQueue.completeArtifact(taskId, runId, name, payload) # -> None +await asyncQueue.completeArtifact(payload, taskId='value', runId='value', name='value') # -> None +``` + +#### Get Artifact from Run +Get artifact by `` from a specific run. + +**Public Artifacts**, in-order to get an artifact you need the scope +`queue:get-artifact:`, where `` is the name of the artifact. +But if the artifact `name` starts with `public/`, authentication and +authorization is not necessary to fetch the artifact. + +**API Clients**, this method will redirect you to the artifact, if it is +stored externally. Either way, the response may not be JSON. So API +client users might want to generate a signed URL for this end-point and +use that URL with an HTTP client that can handle responses correctly. + +**Downloading artifacts** +There are some special considerations for those http clients which download +artifacts. This api endpoint is designed to be compatible with an HTTP 1.1 +compliant client, but has extra features to ensure the download is valid. +It is strongly recommend that consumers use either taskcluster-lib-artifact (JS), +taskcluster-lib-artifact-go (Go) or the CLI written in Go to interact with +artifacts. + +In order to download an artifact the following must be done: + +1. Obtain queue url. Building a signed url with a taskcluster client is +recommended +1. Make a GET request which does not follow redirects +1. In all cases, if specified, the +x-taskcluster-location-{content,transfer}-{sha256,length} values must be +validated to be equal to the Content-Length and Sha256 checksum of the +final artifact downloaded. as well as any intermediate redirects +1. If this response is a 500-series error, retry using an exponential +backoff. No more than 5 retries should be attempted +1. If this response is a 400-series error, treat it appropriately for +your context. This might be an error in responding to this request or +an Error storage type body. This request should not be retried. +1. If this response is a 200-series response, the response body is the artifact. +If the x-taskcluster-location-{content,transfer}-{sha256,length} and +x-taskcluster-location-content-encoding are specified, they should match +this response body +1. If the response type is a 300-series redirect, the artifact will be at the +location specified by the `Location` header. There are multiple artifact storage +types which use a 300-series redirect. +1. For all redirects followed, the user must verify that the content-sha256, content-length, +transfer-sha256, transfer-length and content-encoding match every further request. The final +artifact must also be validated against the values specified in the original queue response +1. Caching of requests with an x-taskcluster-artifact-storage-type value of `reference` +must not occur +1. A request which has x-taskcluster-artifact-storage-type value of `blob` and does not +have x-taskcluster-location-content-sha256 or x-taskcluster-location-content-length +must be treated as an error + +**Headers** +The following important headers are set on the response to this method: + +* location: the url of the artifact if a redirect is to be performed +* x-taskcluster-artifact-storage-type: the storage type. Example: blob, s3, error + +The following important headers are set on responses to this method for Blob artifacts + +* x-taskcluster-location-content-sha256: the SHA256 of the artifact +*after* any content-encoding is undone. Sha256 is hex encoded (e.g. [0-9A-Fa-f]{64}) +* x-taskcluster-location-content-length: the number of bytes *after* any content-encoding +is undone +* x-taskcluster-location-transfer-sha256: the SHA256 of the artifact +*before* any content-encoding is undone. This is the SHA256 of what is sent over +the wire. Sha256 is hex encoded (e.g. [0-9A-Fa-f]{64}) +* x-taskcluster-location-transfer-length: the number of bytes *after* any content-encoding +is undone +* x-taskcluster-location-content-encoding: the content-encoding used. It will either +be `gzip` or `identity` right now. This is hardcoded to a value set when the artifact +was created and no content-negotiation occurs +* x-taskcluster-location-content-type: the content-type of the artifact + +**Caching**, artifacts may be cached in data centers closer to the +workers in-order to reduce bandwidth costs. This can lead to longer +response times. Caching can be skipped by setting the header +`x-taskcluster-skip-cache: true`, this should only be used for resources +where request volume is known to be low, and caching not useful. +(This feature may be disabled in the future, use is sparingly!) + + + +Takes the following arguments: + + * `taskId` + * `runId` + * `name` + +```python +# Sync calls +queue.getArtifact(taskId, runId, name) # -> None` +queue.getArtifact(taskId='value', runId='value', name='value') # -> None +# Async call +await asyncQueue.getArtifact(taskId, runId, name) # -> None +await asyncQueue.getArtifact(taskId='value', runId='value', name='value') # -> None +``` + +#### Get Artifact from Latest Run +Get artifact by `` from the last run of a task. + +**Public Artifacts**, in-order to get an artifact you need the scope +`queue:get-artifact:`, where `` is the name of the artifact. +But if the artifact `name` starts with `public/`, authentication and +authorization is not necessary to fetch the artifact. + +**API Clients**, this method will redirect you to the artifact, if it is +stored externally. Either way, the response may not be JSON. So API +client users might want to generate a signed URL for this end-point and +use that URL with a normal HTTP client. + +**Remark**, this end-point is slightly slower than +`queue.getArtifact`, so consider that if you already know the `runId` of +the latest run. Otherwise, just us the most convenient API end-point. + + + +Takes the following arguments: + + * `taskId` + * `name` + +```python +# Sync calls +queue.getLatestArtifact(taskId, name) # -> None` +queue.getLatestArtifact(taskId='value', name='value') # -> None +# Async call +await asyncQueue.getLatestArtifact(taskId, name) # -> None +await asyncQueue.getLatestArtifact(taskId='value', name='value') # -> None +``` + +#### Get Artifacts from Run +Returns a list of artifacts and associated meta-data for a given run. + +As a task may have many artifacts paging may be necessary. If this +end-point returns a `continuationToken`, you should call the end-point +again with the `continuationToken` as the query-string option: +`continuationToken`. + +By default this end-point will list up-to 1000 artifacts in a single page +you may limit this with the query-string parameter `limit`. + + + +Takes the following arguments: + + * `taskId` + * `runId` + +Required [output schema](v1/list-artifacts-response.json#) + +```python +# Sync calls +queue.listArtifacts(taskId, runId) # -> result` +queue.listArtifacts(taskId='value', runId='value') # -> result +# Async call +await asyncQueue.listArtifacts(taskId, runId) # -> result +await asyncQueue.listArtifacts(taskId='value', runId='value') # -> result +``` + +#### Get Artifacts from Latest Run +Returns a list of artifacts and associated meta-data for the latest run +from the given task. + +As a task may have many artifacts paging may be necessary. If this +end-point returns a `continuationToken`, you should call the end-point +again with the `continuationToken` as the query-string option: +`continuationToken`. + +By default this end-point will list up-to 1000 artifacts in a single page +you may limit this with the query-string parameter `limit`. + + + +Takes the following arguments: + + * `taskId` + +Required [output schema](v1/list-artifacts-response.json#) + +```python +# Sync calls +queue.listLatestArtifacts(taskId) # -> result` +queue.listLatestArtifacts(taskId='value') # -> result +# Async call +await asyncQueue.listLatestArtifacts(taskId) # -> result +await asyncQueue.listLatestArtifacts(taskId='value') # -> result +``` + +#### Get a list of all active provisioners +Get all active provisioners. + +The term "provisioner" is taken broadly to mean anything with a provisionerId. +This does not necessarily mean there is an associated service performing any +provisioning activity. + +The response is paged. If this end-point returns a `continuationToken`, you +should call the end-point again with the `continuationToken` as a query-string +option. By default this end-point will list up to 1000 provisioners in a single +page. You may limit this with the query-string parameter `limit`. + + +Required [output schema](v1/list-provisioners-response.json#) + +```python +# Sync calls +queue.listProvisioners() # -> result` +# Async call +await asyncQueue.listProvisioners() # -> result +``` + +#### Get an active provisioner +Get an active provisioner. + +The term "provisioner" is taken broadly to mean anything with a provisionerId. +This does not necessarily mean there is an associated service performing any +provisioning activity. + + + +Takes the following arguments: + + * `provisionerId` + +Required [output schema](v1/provisioner-response.json#) + +```python +# Sync calls +queue.getProvisioner(provisionerId) # -> result` +queue.getProvisioner(provisionerId='value') # -> result +# Async call +await asyncQueue.getProvisioner(provisionerId) # -> result +await asyncQueue.getProvisioner(provisionerId='value') # -> result +``` + +#### Update a provisioner +Declare a provisioner, supplying some details about it. + +`declareProvisioner` allows updating one or more properties of a provisioner as long as the required scopes are +possessed. For example, a request to update the `aws-provisioner-v1` +provisioner with a body `{description: 'This provisioner is great'}` would require you to have the scope +`queue:declare-provisioner:aws-provisioner-v1#description`. + +The term "provisioner" is taken broadly to mean anything with a provisionerId. +This does not necessarily mean there is an associated service performing any +provisioning activity. + + + +Takes the following arguments: + + * `provisionerId` + +Required [input schema](v1/update-provisioner-request.json#) + +Required [output schema](v1/provisioner-response.json#) + +```python +# Sync calls +queue.declareProvisioner(provisionerId, payload) # -> result` +queue.declareProvisioner(payload, provisionerId='value') # -> result +# Async call +await asyncQueue.declareProvisioner(provisionerId, payload) # -> result +await asyncQueue.declareProvisioner(payload, provisionerId='value') # -> result +``` + +#### Get Number of Pending Tasks +Get an approximate number of pending tasks for the given `provisionerId` +and `workerType`. + +The underlying Azure Storage Queues only promises to give us an estimate. +Furthermore, we cache the result in memory for 20 seconds. So consumers +should be no means expect this to be an accurate number. +It is, however, a solid estimate of the number of pending tasks. + + + +Takes the following arguments: + + * `provisionerId` + * `workerType` + +Required [output schema](v1/pending-tasks-response.json#) + +```python +# Sync calls +queue.pendingTasks(provisionerId, workerType) # -> result` +queue.pendingTasks(provisionerId='value', workerType='value') # -> result +# Async call +await asyncQueue.pendingTasks(provisionerId, workerType) # -> result +await asyncQueue.pendingTasks(provisionerId='value', workerType='value') # -> result +``` + +#### Get a list of all active worker-types +Get all active worker-types for the given provisioner. + +The response is paged. If this end-point returns a `continuationToken`, you +should call the end-point again with the `continuationToken` as a query-string +option. By default this end-point will list up to 1000 worker-types in a single +page. You may limit this with the query-string parameter `limit`. + + + +Takes the following arguments: + + * `provisionerId` + +Required [output schema](v1/list-workertypes-response.json#) + +```python +# Sync calls +queue.listWorkerTypes(provisionerId) # -> result` +queue.listWorkerTypes(provisionerId='value') # -> result +# Async call +await asyncQueue.listWorkerTypes(provisionerId) # -> result +await asyncQueue.listWorkerTypes(provisionerId='value') # -> result +``` + +#### Get a worker-type +Get a worker-type from a provisioner. + + + +Takes the following arguments: + + * `provisionerId` + * `workerType` + +Required [output schema](v1/workertype-response.json#) + +```python +# Sync calls +queue.getWorkerType(provisionerId, workerType) # -> result` +queue.getWorkerType(provisionerId='value', workerType='value') # -> result +# Async call +await asyncQueue.getWorkerType(provisionerId, workerType) # -> result +await asyncQueue.getWorkerType(provisionerId='value', workerType='value') # -> result +``` + +#### Update a worker-type +Declare a workerType, supplying some details about it. + +`declareWorkerType` allows updating one or more properties of a worker-type as long as the required scopes are +possessed. For example, a request to update the `gecko-b-1-w2008` worker-type within the `aws-provisioner-v1` +provisioner with a body `{description: 'This worker type is great'}` would require you to have the scope +`queue:declare-worker-type:aws-provisioner-v1/gecko-b-1-w2008#description`. + + + +Takes the following arguments: + + * `provisionerId` + * `workerType` + +Required [input schema](v1/update-workertype-request.json#) + +Required [output schema](v1/workertype-response.json#) + +```python +# Sync calls +queue.declareWorkerType(provisionerId, workerType, payload) # -> result` +queue.declareWorkerType(payload, provisionerId='value', workerType='value') # -> result +# Async call +await asyncQueue.declareWorkerType(provisionerId, workerType, payload) # -> result +await asyncQueue.declareWorkerType(payload, provisionerId='value', workerType='value') # -> result +``` + +#### Get a list of all active workers of a workerType +Get a list of all active workers of a workerType. + +`listWorkers` allows a response to be filtered by quarantined and non quarantined workers. +To filter the query, you should call the end-point with `quarantined` as a query-string option with a +true or false value. + +The response is paged. If this end-point returns a `continuationToken`, you +should call the end-point again with the `continuationToken` as a query-string +option. By default this end-point will list up to 1000 workers in a single +page. You may limit this with the query-string parameter `limit`. + + + +Takes the following arguments: + + * `provisionerId` + * `workerType` + +Required [output schema](v1/list-workers-response.json#) + +```python +# Sync calls +queue.listWorkers(provisionerId, workerType) # -> result` +queue.listWorkers(provisionerId='value', workerType='value') # -> result +# Async call +await asyncQueue.listWorkers(provisionerId, workerType) # -> result +await asyncQueue.listWorkers(provisionerId='value', workerType='value') # -> result +``` + +#### Get a worker-type +Get a worker from a worker-type. + + + +Takes the following arguments: + + * `provisionerId` + * `workerType` + * `workerGroup` + * `workerId` + +Required [output schema](v1/worker-response.json#) + +```python +# Sync calls +queue.getWorker(provisionerId, workerType, workerGroup, workerId) # -> result` +queue.getWorker(provisionerId='value', workerType='value', workerGroup='value', workerId='value') # -> result +# Async call +await asyncQueue.getWorker(provisionerId, workerType, workerGroup, workerId) # -> result +await asyncQueue.getWorker(provisionerId='value', workerType='value', workerGroup='value', workerId='value') # -> result +``` + +#### Quarantine a worker +Quarantine a worker + + + +Takes the following arguments: + + * `provisionerId` + * `workerType` + * `workerGroup` + * `workerId` + +Required [input schema](v1/quarantine-worker-request.json#) + +Required [output schema](v1/worker-response.json#) + +```python +# Sync calls +queue.quarantineWorker(provisionerId, workerType, workerGroup, workerId, payload) # -> result` +queue.quarantineWorker(payload, provisionerId='value', workerType='value', workerGroup='value', workerId='value') # -> result +# Async call +await asyncQueue.quarantineWorker(provisionerId, workerType, workerGroup, workerId, payload) # -> result +await asyncQueue.quarantineWorker(payload, provisionerId='value', workerType='value', workerGroup='value', workerId='value') # -> result +``` + +#### Declare a worker +Declare a worker, supplying some details about it. + +`declareWorker` allows updating one or more properties of a worker as long as the required scopes are +possessed. + + + +Takes the following arguments: + + * `provisionerId` + * `workerType` + * `workerGroup` + * `workerId` + +Required [input schema](v1/update-worker-request.json#) + +Required [output schema](v1/worker-response.json#) + +```python +# Sync calls +queue.declareWorker(provisionerId, workerType, workerGroup, workerId, payload) # -> result` +queue.declareWorker(payload, provisionerId='value', workerType='value', workerGroup='value', workerId='value') # -> result +# Async call +await asyncQueue.declareWorker(provisionerId, workerType, workerGroup, workerId, payload) # -> result +await asyncQueue.declareWorker(payload, provisionerId='value', workerType='value', workerGroup='value', workerId='value') # -> result +``` + + + + +### Exchanges in `taskcluster.QueueEvents` +```python +// Create QueueEvents client instance +import taskcluster +queueEvents = taskcluster.QueueEvents(options) +``` +The queue, typically available at `queue.taskcluster.net`, is responsible +for accepting tasks and track their state as they are executed by +workers. In order ensure they are eventually resolved. + +This document describes AMQP exchanges offered by the queue, which allows +third-party listeners to monitor tasks as they progress to resolution. +These exchanges targets the following audience: + * Schedulers, who takes action after tasks are completed, + * Workers, who wants to listen for new or canceled tasks (optional), + * Tools, that wants to update their view as task progress. + +You'll notice that all the exchanges in the document shares the same +routing key pattern. This makes it very easy to bind to all messages +about a certain kind tasks. + +**Task specific routes**, a task can define a task specific route using +the `task.routes` property. See task creation documentation for details +on permissions required to provide task specific routes. If a task has +the entry `'notify.by-email'` in as task specific route defined in +`task.routes` all messages about this task will be CC'ed with the +routing-key `'route.notify.by-email'`. + +These routes will always be prefixed `route.`, so that cannot interfere +with the _primary_ routing key as documented here. Notice that the +_primary_ routing key is always prefixed `primary.`. This is ensured +in the routing key reference, so API clients will do this automatically. + +Please, note that the way RabbitMQ works, the message will only arrive +in your queue once, even though you may have bound to the exchange with +multiple routing key patterns that matches more of the CC'ed routing +routing keys. + +**Delivery guarantees**, most operations on the queue are idempotent, +which means that if repeated with the same arguments then the requests +will ensure completion of the operation and return the same response. +This is useful if the server crashes or the TCP connection breaks, but +when re-executing an idempotent operation, the queue will also resend +any related AMQP messages. Hence, messages may be repeated. + +This shouldn't be much of a problem, as the best you can achieve using +confirm messages with AMQP is at-least-once delivery semantics. Hence, +this only prevents you from obtaining at-most-once delivery semantics. + +**Remark**, some message generated by timeouts maybe dropped if the +server crashes at wrong time. Ideally, we'll address this in the +future. For now we suggest you ignore this corner case, and notify us +if this corner case is of concern to you. +#### Task Defined Messages + * `queueEvents.taskDefined(routingKeyPattern) -> routingKey` + * `routingKeyKind` is constant of `primary` is required Description: Identifier for the routing-key kind. This is always `'primary'` for the formalized routing key. + * `taskId` is required Description: `taskId` for the task this message concerns + * `runId` Description: `runId` of latest run for the task, `_` if no run is exists for the task. + * `workerGroup` Description: `workerGroup` of latest run for the task, `_` if no run is exists for the task. + * `workerId` Description: `workerId` of latest run for the task, `_` if no run is exists for the task. + * `provisionerId` is required Description: `provisionerId` this task is targeted at. + * `workerType` is required Description: `workerType` this task must run on. + * `schedulerId` is required Description: `schedulerId` this task was created by. + * `taskGroupId` is required Description: `taskGroupId` this task was created in. + * `reserved` Description: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified. + +#### Task Pending Messages + * `queueEvents.taskPending(routingKeyPattern) -> routingKey` + * `routingKeyKind` is constant of `primary` is required Description: Identifier for the routing-key kind. This is always `'primary'` for the formalized routing key. + * `taskId` is required Description: `taskId` for the task this message concerns + * `runId` is required Description: `runId` of latest run for the task, `_` if no run is exists for the task. + * `workerGroup` Description: `workerGroup` of latest run for the task, `_` if no run is exists for the task. + * `workerId` Description: `workerId` of latest run for the task, `_` if no run is exists for the task. + * `provisionerId` is required Description: `provisionerId` this task is targeted at. + * `workerType` is required Description: `workerType` this task must run on. + * `schedulerId` is required Description: `schedulerId` this task was created by. + * `taskGroupId` is required Description: `taskGroupId` this task was created in. + * `reserved` Description: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified. + +#### Task Running Messages + * `queueEvents.taskRunning(routingKeyPattern) -> routingKey` + * `routingKeyKind` is constant of `primary` is required Description: Identifier for the routing-key kind. This is always `'primary'` for the formalized routing key. + * `taskId` is required Description: `taskId` for the task this message concerns + * `runId` is required Description: `runId` of latest run for the task, `_` if no run is exists for the task. + * `workerGroup` is required Description: `workerGroup` of latest run for the task, `_` if no run is exists for the task. + * `workerId` is required Description: `workerId` of latest run for the task, `_` if no run is exists for the task. + * `provisionerId` is required Description: `provisionerId` this task is targeted at. + * `workerType` is required Description: `workerType` this task must run on. + * `schedulerId` is required Description: `schedulerId` this task was created by. + * `taskGroupId` is required Description: `taskGroupId` this task was created in. + * `reserved` Description: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified. + +#### Artifact Creation Messages + * `queueEvents.artifactCreated(routingKeyPattern) -> routingKey` + * `routingKeyKind` is constant of `primary` is required Description: Identifier for the routing-key kind. This is always `'primary'` for the formalized routing key. + * `taskId` is required Description: `taskId` for the task this message concerns + * `runId` is required Description: `runId` of latest run for the task, `_` if no run is exists for the task. + * `workerGroup` is required Description: `workerGroup` of latest run for the task, `_` if no run is exists for the task. + * `workerId` is required Description: `workerId` of latest run for the task, `_` if no run is exists for the task. + * `provisionerId` is required Description: `provisionerId` this task is targeted at. + * `workerType` is required Description: `workerType` this task must run on. + * `schedulerId` is required Description: `schedulerId` this task was created by. + * `taskGroupId` is required Description: `taskGroupId` this task was created in. + * `reserved` Description: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified. + +#### Task Completed Messages + * `queueEvents.taskCompleted(routingKeyPattern) -> routingKey` + * `routingKeyKind` is constant of `primary` is required Description: Identifier for the routing-key kind. This is always `'primary'` for the formalized routing key. + * `taskId` is required Description: `taskId` for the task this message concerns + * `runId` is required Description: `runId` of latest run for the task, `_` if no run is exists for the task. + * `workerGroup` is required Description: `workerGroup` of latest run for the task, `_` if no run is exists for the task. + * `workerId` is required Description: `workerId` of latest run for the task, `_` if no run is exists for the task. + * `provisionerId` is required Description: `provisionerId` this task is targeted at. + * `workerType` is required Description: `workerType` this task must run on. + * `schedulerId` is required Description: `schedulerId` this task was created by. + * `taskGroupId` is required Description: `taskGroupId` this task was created in. + * `reserved` Description: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified. + +#### Task Failed Messages + * `queueEvents.taskFailed(routingKeyPattern) -> routingKey` + * `routingKeyKind` is constant of `primary` is required Description: Identifier for the routing-key kind. This is always `'primary'` for the formalized routing key. + * `taskId` is required Description: `taskId` for the task this message concerns + * `runId` Description: `runId` of latest run for the task, `_` if no run is exists for the task. + * `workerGroup` Description: `workerGroup` of latest run for the task, `_` if no run is exists for the task. + * `workerId` Description: `workerId` of latest run for the task, `_` if no run is exists for the task. + * `provisionerId` is required Description: `provisionerId` this task is targeted at. + * `workerType` is required Description: `workerType` this task must run on. + * `schedulerId` is required Description: `schedulerId` this task was created by. + * `taskGroupId` is required Description: `taskGroupId` this task was created in. + * `reserved` Description: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified. + +#### Task Exception Messages + * `queueEvents.taskException(routingKeyPattern) -> routingKey` + * `routingKeyKind` is constant of `primary` is required Description: Identifier for the routing-key kind. This is always `'primary'` for the formalized routing key. + * `taskId` is required Description: `taskId` for the task this message concerns + * `runId` Description: `runId` of latest run for the task, `_` if no run is exists for the task. + * `workerGroup` Description: `workerGroup` of latest run for the task, `_` if no run is exists for the task. + * `workerId` Description: `workerId` of latest run for the task, `_` if no run is exists for the task. + * `provisionerId` is required Description: `provisionerId` this task is targeted at. + * `workerType` is required Description: `workerType` this task must run on. + * `schedulerId` is required Description: `schedulerId` this task was created by. + * `taskGroupId` is required Description: `taskGroupId` this task was created in. + * `reserved` Description: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified. + +#### Task Group Resolved Messages + * `queueEvents.taskGroupResolved(routingKeyPattern) -> routingKey` + * `routingKeyKind` is constant of `primary` is required Description: Identifier for the routing-key kind. This is always `'primary'` for the formalized routing key. + * `taskGroupId` is required Description: `taskGroupId` for the task-group this message concerns + * `schedulerId` is required Description: `schedulerId` for the task-group this message concerns + * `reserved` Description: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified. + + + + +### Methods in `taskcluster.Secrets` +```python +import asyncio # Only for async +// Create Secrets client instance +import taskcluster +import taskcluster.aio + +secrets = taskcluster.Secrets(options) +# Below only for async instances, assume already in coroutine +loop = asyncio.get_event_loop() +session = taskcluster.aio.createSession(loop=loop) +asyncSecrets = taskcluster.aio.Secrets(options, session=session) +``` +The secrets service provides a simple key/value store for small bits of secret +data. Access is limited by scopes, so values can be considered secret from +those who do not have the relevant scopes. + +Secrets also have an expiration date, and once a secret has expired it can no +longer be read. This is useful for short-term secrets such as a temporary +service credential or a one-time signing key. +#### Ping Server +Respond without doing anything. +This endpoint is used to check that the service is up. + + +```python +# Sync calls +secrets.ping() # -> None` +# Async call +await asyncSecrets.ping() # -> None +``` + +#### Set Secret +Set the secret associated with some key. If the secret already exists, it is +updated instead. + + + +Takes the following arguments: + + * `name` + +Required [input schema](v1/secret.json#) + +```python +# Sync calls +secrets.set(name, payload) # -> None` +secrets.set(payload, name='value') # -> None +# Async call +await asyncSecrets.set(name, payload) # -> None +await asyncSecrets.set(payload, name='value') # -> None +``` + +#### Delete Secret +Delete the secret associated with some key. + + + +Takes the following arguments: + + * `name` + +```python +# Sync calls +secrets.remove(name) # -> None` +secrets.remove(name='value') # -> None +# Async call +await asyncSecrets.remove(name) # -> None +await asyncSecrets.remove(name='value') # -> None +``` + +#### Read Secret +Read the secret associated with some key. If the secret has recently +expired, the response code 410 is returned. If the caller lacks the +scope necessary to get the secret, the call will fail with a 403 code +regardless of whether the secret exists. + + + +Takes the following arguments: + + * `name` + +Required [output schema](v1/secret.json#) + +```python +# Sync calls +secrets.get(name) # -> result` +secrets.get(name='value') # -> result +# Async call +await asyncSecrets.get(name) # -> result +await asyncSecrets.get(name='value') # -> result +``` + +#### List Secrets +List the names of all secrets. + +By default this end-point will try to return up to 1000 secret names in one +request. But it **may return less**, even if more tasks are available. +It may also return a `continuationToken` even though there are no more +results. However, you can only be sure to have seen all results if you +keep calling `listTaskGroup` with the last `continuationToken` until you +get a result without a `continuationToken`. + +If you are not interested in listing all the members at once, you may +use the query-string option `limit` to return fewer. + + +Required [output schema](v1/secret-list.json#) + +```python +# Sync calls +secrets.list() # -> result` +# Async call +await asyncSecrets.list() # -> result +``` + + + + +### Exchanges in `taskcluster.TreeherderEvents` +```python +// Create TreeherderEvents client instance +import taskcluster +treeherderEvents = taskcluster.TreeherderEvents(options) +``` +The taskcluster-treeherder service is responsible for processing +task events published by TaskCluster Queue and producing job messages +that are consumable by Treeherder. + +This exchange provides that job messages to be consumed by any queue that +attached to the exchange. This could be a production Treeheder instance, +a local development environment, or a custom dashboard. +#### Job Messages + * `treeherderEvents.jobs(routingKeyPattern) -> routingKey` + * `destination` is required Description: destination + * `project` is required Description: project + * `reserved` Description: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified. + + + + diff --git a/third_party/python/taskcluster/setup.cfg b/third_party/python/taskcluster/setup.cfg new file mode 100644 index 000000000000..fcc4254b5993 --- /dev/null +++ b/third_party/python/taskcluster/setup.cfg @@ -0,0 +1,8 @@ +[nosetests] +verbosity = 1 +detailed-errors = 1 + +[egg_info] +tag_build = +tag_date = 0 + diff --git a/third_party/python/taskcluster/setup.py b/third_party/python/taskcluster/setup.py new file mode 100644 index 000000000000..c9c7ff4a22c7 --- /dev/null +++ b/third_party/python/taskcluster/setup.py @@ -0,0 +1,89 @@ +#!/usr/bin/env python + +from setuptools import setup +from setuptools.command.test import test as TestCommand +import sys + +# The VERSION variable is automagically changed +# by release.sh. Make sure you understand how +# that script works if you want to change this +VERSION = '6.0.0' + +tests_require = [ + 'nose==1.3.7', + 'nose-exclude==0.5.0', + 'httmock==1.2.6', + 'rednose==1.2.1', + 'mock==1.0.1', + 'setuptools-lint==0.3', + 'flake8==2.5.0', + 'psutil==2.1.3', + 'hypothesis==3.6.1', + 'tox==2.3.2', + 'coverage==4.1b2', + 'python-dateutil==2.6.0', +] + +# requests has a policy of not breaking apis between major versions +# http://docs.python-requests.org/en/latest/community/release-process/ +install_requires = [ + 'requests>=2.4.3,<3', + 'mohawk>=0.3.4,<0.4', + 'slugid>=1.0.7,<2', + 'taskcluster-urls>=10.1.0,<12', + 'six>=1.10.0,<2', +] + +# from http://testrun.org/tox/latest/example/basic.html +class Tox(TestCommand): + user_options = [('tox-args=', 'a', "Arguments to pass to tox")] + + def initialize_options(self): + TestCommand.initialize_options(self) + self.tox_args = None + + def finalize_options(self): + TestCommand.finalize_options(self) + self.test_args = [] + self.test_suite = True + + def run_tests(self): + # import here, cause outside the eggs aren't loaded + import tox + import shlex + args = self.tox_args + if args: + args = shlex.split(self.tox_args) + errno = tox.cmdline(args=args) + sys.exit(errno) + +if sys.version_info.major == 2: + tests_require.extend([ + 'subprocess32==3.2.6', + ]) +elif sys.version_info[:2] < (3, 5): + raise Exception('This library does not support Python 3 versions below 3.5') +elif sys.version_info[:2] >= (3, 5): + install_requires.extend([ + 'aiohttp>=2.0.0,<4', + 'async_timeout>=2.0.0,<4', + ]) + +if __name__ == '__main__': + setup( + name='taskcluster', + version=VERSION, + description='Python client for Taskcluster', + author='John Ford', + author_email='jhford@mozilla.com', + url='https://github.com/taskcluster/taskcluster-client.py', + packages=['taskcluster', 'taskcluster.aio'], + install_requires=install_requires, + test_suite="nose.collector", + tests_require=tests_require, + cmdclass={'test': Tox}, + zip_safe=False, + classifiers=['Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3.5', + 'Programming Language :: Python :: 3.6'], + ) diff --git a/third_party/python/taskcluster/taskcluster-6.0.0.dist-info/RECORD b/third_party/python/taskcluster/taskcluster-6.0.0.dist-info/RECORD deleted file mode 100644 index 8f98c94f9a9b..000000000000 --- a/third_party/python/taskcluster/taskcluster-6.0.0.dist-info/RECORD +++ /dev/null @@ -1,50 +0,0 @@ -taskcluster/__init__.py,sha256=_y18MJ1uOCWGq8aj2a0H2B3aVweH2L3VqjmmPJ65_k0,557 -taskcluster/_client_importer.py,sha256=bOE1gEKP7vsp9nGXh4gL7Ghd8rXg0h8F65lgdsG-xbM,748 -taskcluster/auth.py,sha256=O2xl_cCkAIT-ZZIXwIS0c4Dy2lathKpnSsnbyMCBnOI,30165 -taskcluster/authevents.py,sha256=5iHNvGulIZtAKSCfUzT61SnuysxA1ZluigfLJA3412A,5959 -taskcluster/awsprovisioner.py,sha256=YbICq_50aeA1EE0fHR_zyhsBwQqSSsx6uIFV2QOBmIo,18346 -taskcluster/awsprovisionerevents.py,sha256=55kd4f4n4TTgq7fekx-Fa0UqLvnTtnUyXXWtydsAXVM,5186 -taskcluster/client.py,sha256=A1SnUo3cqRQKqLUtJIzWk6u-9s4YeLh_Ob2O44PVet0,27040 -taskcluster/ec2manager.py,sha256=NHdGIDYAv-4sml7bPKhirwpFaK4Z0CWUpDJS-ewHZng,14139 -taskcluster/exceptions.py,sha256=KAOkEXiIyVSOlnucQQzANdx-DHf966BILBVQYMtyyjA,1105 -taskcluster/github.py,sha256=21NdKuPnnK28Quh3ISBU2WhEqFKMlNAQWcEzpSoFMig,6273 -taskcluster/githubevents.py,sha256=MfbdeT91PmJAGoiYtiJoGMlJIEAes5EP9fYzRu7gO9s,7851 -taskcluster/hooks.py,sha256=OsSIE_LqnFZCBBZMfoC7c3VJ_3sb1GOjBdNwInvD-jI,10468 -taskcluster/index.py,sha256=uzdVlmT0iowa9MTkwSraa3ygElHJQLxiVYKzVDfIyPM,10444 -taskcluster/login.py,sha256=MDy_f-t3KikPMiNlbtG93982XLAj42Go6_PewmWuLao,2936 -taskcluster/notify.py,sha256=X4vfoTflaNzM7eaY6nLNgTMJWtXMZozkgcqlrd5-LPM,3751 -taskcluster/pulse.py,sha256=iI1m2NFsKWjkqG1sCFIDSkkrBhCpvM3RMoI5PilGTf4,4408 -taskcluster/purgecache.py,sha256=Q7zlOadn7MCkM_eR85Ro7cJ4x8_xHBQlk_uM_IZEQbg,3921 -taskcluster/purgecacheevents.py,sha256=vvg-eqAy3lzxGsxRCcW6n8aUn6-Mp1O0ot6mpxU3vbg,2411 -taskcluster/queue.py,sha256=s94lccCmE0LN-bVZmZWryW1qLDImOzNLs4U-8R9Wj2g,47025 -taskcluster/queueevents.py,sha256=Umo41Hm_9Pht4Sb6jiiYeAAWxH8K723VDCjLdc8D77o,27581 -taskcluster/secrets.py,sha256=-WABYJ2Nb8G82s4u0B7dq9In7w40_fDG9t8lIOLRcog,4500 -taskcluster/treeherderevents.py,sha256=F6RR5RjcZGS2y10CvXuAlRllgBw_x7H6Pjp-P6LIiDk,2309 -taskcluster/utils.py,sha256=UOPZUtpn3lteBneg_0YLWc9oUnUov4toFV6mWvqcKCM,10861 -taskcluster/aio/__init__.py,sha256=14NuY4rMcfnt6_mfarZPnlz_vNesVHOS7HoxHihZ_IQ,468 -taskcluster/aio/_client_importer.py,sha256=bOE1gEKP7vsp9nGXh4gL7Ghd8rXg0h8F65lgdsG-xbM,748 -taskcluster/aio/asyncclient.py,sha256=NFtAzyKyYRFhKFzH3pIHh3UzFfosRgvHjrb39dlcSWY,14303 -taskcluster/aio/asyncutils.py,sha256=FX0tBb75gJnVMdlWtE-3Gp6N6p2AXW-rqghxV2PCGhw,4340 -taskcluster/aio/auth.py,sha256=Ri9nV2FH-M6yiMvFeFQKLz4M2P7Eo-MwEiVfMDXldPI,30548 -taskcluster/aio/authevents.py,sha256=xw_DsTBZjmISCQthXlhxlAL84KsJ0Q_g0mS5WwGrDlk,5994 -taskcluster/aio/awsprovisioner.py,sha256=7tyA0v9D9l3FlMMeWmPVNlENXKqvDRD8G_wlEc-sfbA,18561 -taskcluster/aio/awsprovisionerevents.py,sha256=9xG6V3-nncH4MSfbGjqg83LpaCEMNdP6zSJXbYOtKlo,5221 -taskcluster/aio/ec2manager.py,sha256=KOH98Dks2nf_Ud7yLJOsbtGbR8uzST_KVPAuQ9hwfPY,14438 -taskcluster/aio/github.py,sha256=FjtgQvpZghMQR8HHx2ML-wqTDBM1SZgZ6qLNVPbCGL8,6404 -taskcluster/aio/githubevents.py,sha256=HtHuCYF1iaoDqPpQFLzivIjEkgej_-OtQmbf6QhsoIg,7886 -taskcluster/aio/hooks.py,sha256=keH2xTqJQP8E-mmpzpx4c7N_X0qzsCM9YvGYPbQZozo,10647 -taskcluster/aio/index.py,sha256=OkfEcyD7bb9R7KmMRtRsFaF_NIztPlRLDrLFWoaJL0U,10551 -taskcluster/aio/login.py,sha256=4Nd9rPJVWSLk6bcd8qel-trWfMYFmaXa8UhS5kLI3co,2995 -taskcluster/aio/notify.py,sha256=7sDdBrMJvTIILn0teO3M2z1VIEwyf-ANt9WCuKT5d6c,3834 -taskcluster/aio/pulse.py,sha256=eRju7rWh2ZhD-UrYgF6GV3EcKNuayFrOjPjmMORJwk8,4491 -taskcluster/aio/purgecache.py,sha256=rgqJ51gNinkYRqDSQKyetazIT8r8c28UMuIHOmr52dU,4004 -taskcluster/aio/purgecacheevents.py,sha256=WU_PsvO0OhT3uSq-RrHXl3mjuDO1Nj_iXTh7-P4HPOE,2446 -taskcluster/aio/queue.py,sha256=vt_48tA1RlZueyEGHHYRDlVj-tmYPzrhYn-H1W-MuZ8,47456 -taskcluster/aio/queueevents.py,sha256=oP1Qh3H3iBha2QU0hxwQjECZ4rjr9YbjaRgLAnKtpSQ,27616 -taskcluster/aio/secrets.py,sha256=Sisg9QYFWB1YKctKSAMjOeFU7T3FVOrkYumcqcnDZxU,4595 -taskcluster/aio/treeherderevents.py,sha256=m9Hh0nf2tDpi4Hlra-0ESRR-QOUuFv-9GfgLoez00Kg,2344 -taskcluster-6.0.0.dist-info/LICENSE,sha256=HyVuytGSiAUQ6ErWBHTqt1iSGHhLmlC8fO7jTCuR8dU,16725 -taskcluster-6.0.0.dist-info/METADATA,sha256=mxOSBQqHo1B4uxzCVBQTpOlbqnTlQ4Iiu0OMlq07ugs,672 -taskcluster-6.0.0.dist-info/WHEEL,sha256=_NOXIqFgOaYmlm9RJLPQZ13BJuEIrp5jx5ptRD5uh3Y,92 -taskcluster-6.0.0.dist-info/top_level.txt,sha256=Uxnnep-l0fTSnwOst3XkLMA-KHfY5ONwwtSgRmcErXU,12 -taskcluster-6.0.0.dist-info/RECORD,, diff --git a/third_party/python/taskcluster/taskcluster-6.0.0.dist-info/top_level.txt b/third_party/python/taskcluster/taskcluster-6.0.0.dist-info/top_level.txt deleted file mode 100644 index cb1e1bb482a2..000000000000 --- a/third_party/python/taskcluster/taskcluster-6.0.0.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -taskcluster diff --git a/third_party/python/taskcluster/test/test_async.py b/third_party/python/taskcluster/test/test_async.py new file mode 100644 index 000000000000..8a06f422b596 --- /dev/null +++ b/third_party/python/taskcluster/test/test_async.py @@ -0,0 +1,63 @@ +from __future__ import division, print_function, absolute_import +import unittest +import datetime +import os + +import asyncio + +import base +import taskcluster.aio.auth as subjectAsync + + +@unittest.skipIf(os.environ.get('NO_TESTS_OVER_WIRE'), "Skipping tests over wire") +class TestAuthenticationAsync(base.TCTest): + + def test_async_works_with_permanent_credentials(self): + """we can call methods which require authentication with valid + permacreds""" + + loop = asyncio.get_event_loop() + + async def x(): + async with subjectAsync.createSession(loop=loop) as session: + client = subjectAsync.Auth({ + 'rootUrl': self.real_root_url, + 'credentials': { + 'clientId': 'tester', + 'accessToken': 'no-secret', + }, + }, session=session) + result = await client.testAuthenticate({ + 'clientScopes': ['test:a'], + 'requiredScopes': ['test:a'], + }) + self.assertEqual(result, {'scopes': ['test:a'], 'clientId': 'tester'}) + + loop.run_until_complete(x()) + + def test_async_works_with_temporary_credentials(self): + """we can call methods which require authentication with temporary + credentials generated by python client""" + loop = asyncio.get_event_loop() + + async def x(): + async with subjectAsync.createSession(loop=loop) as session: + tempCred = subjectAsync.createTemporaryCredentials( + 'tester', + 'no-secret', + datetime.datetime.utcnow(), + datetime.datetime.utcnow() + datetime.timedelta(hours=1), + ['test:xyz'], + ) + client = subjectAsync.Auth({ + 'rootUrl': self.real_root_url, + 'credentials': tempCred, + }, session=session) + + result = await client.testAuthenticate({ + 'clientScopes': ['test:*'], + 'requiredScopes': ['test:xyz'], + }) + self.assertEqual(result, {'scopes': ['test:xyz'], 'clientId': 'tester'}) + + loop.run_until_complete(x()) diff --git a/third_party/python/taskcluster/test/test_client.py b/third_party/python/taskcluster/test/test_client.py new file mode 100644 index 000000000000..afe4b254bca3 --- /dev/null +++ b/third_party/python/taskcluster/test/test_client.py @@ -0,0 +1,955 @@ +from __future__ import division, print_function +import types +import unittest +import time +import datetime +from six.moves import urllib +import os +import re +import json +import copy + +import mock +import httmock +import requests + +import base +import taskcluster.auth as subject +import taskcluster.exceptions as exc +import taskcluster.utils as utils +import taskcluster_urls as liburls + + +class ClientTest(base.TCTest): + + realTimeSleep = time.sleep + + def setUp(self): + subject.config['credentials'] = { + 'clientId': 'clientId', + 'accessToken': 'accessToken', + } + keys = [ + base.createTopicExchangeKey('primary_key', constant='primary'), + base.createTopicExchangeKey('norm1'), + base.createTopicExchangeKey('norm2'), + base.createTopicExchangeKey('norm3'), + base.createTopicExchangeKey('multi_key', multipleWords=True), + ] + topicEntry = base.createApiEntryTopicExchange('topicName', 'topicExchange', routingKey=keys) + entries = [ + base.createApiEntryFunction('no_args_no_input', 0, False), + base.createApiEntryFunction('two_args_no_input', 2, False), + base.createApiEntryFunction('no_args_with_input', 0, True), + base.createApiEntryFunction('two_args_with_input', 2, True), + base.createApiEntryFunction('NEVER_CALL_ME', 0, False), + topicEntry + ] + self.apiRef = base.createApiRef(entries=entries) + self.clientClass = subject.createApiClient('testApi', self.apiRef) + self.client = self.clientClass({'rootUrl': self.test_root_url}) + # Patch time.sleep so that we don't delay tests + sleepPatcher = mock.patch('time.sleep') + sleepSleep = sleepPatcher.start() + sleepSleep.return_value = None + self.addCleanup(sleepSleep.stop) + + def tearDown(self): + time.sleep = self.realTimeSleep + + +class TestConstructorOptions(ClientTest): + + def test_baseUrl_not_allowed(self): + with self.assertRaises(exc.TaskclusterFailure): + self.clientClass({'baseUrl': 'https://bogus.net'}) + + def test_rootUrl_set_correctly(self): + client = self.clientClass({'rootUrl': self.test_root_url}) + self.assertEqual(client.options['rootUrl'], self.test_root_url) + + def test_apiVersion_set_correctly(self): + client = self.clientClass({'rootUrl': self.test_root_url}) + self.assertEqual(client.apiVersion, 'v1') + + def test_apiVersion_set_correctly_default(self): + apiRef = copy.deepcopy(self.apiRef) + del apiRef['reference']['apiVersion'] + clientClass = subject.createApiClient('testApi', apiRef) + client = clientClass({'rootUrl': self.test_root_url}) + self.assertEqual(client.apiVersion, 'v1') + + def test_serviceName_set_correctly(self): + client = self.clientClass({'rootUrl': self.test_root_url}) + self.assertEqual(client.serviceName, 'fake') + + +class TestSubArgsInRoute(ClientTest): + + def test_valid_no_subs(self): + provided = {'route': '/no/args/here', 'name': 'test'} + expected = 'no/args/here' + result = self.client._subArgsInRoute(provided, {}) + self.assertEqual(expected, result) + + def test_valid_one_sub(self): + provided = {'route': '/one//here', 'name': 'test'} + expected = 'one/value/here' + arguments = {'argToSub': 'value'} + result = self.client._subArgsInRoute(provided, arguments) + self.assertEqual(expected, result) + + def test_invalid_one_sub(self): + with self.assertRaises(exc.TaskclusterFailure): + self.client._subArgsInRoute({ + 'route': '/one//here', + 'name': 'test' + }, {'unused': 'value'}) + + def test_invalid_route_no_sub(self): + with self.assertRaises(exc.TaskclusterFailure): + self.client._subArgsInRoute({ + 'route': 'askldjflkasdf', + 'name': 'test' + }, {'should': 'fail'}) + + def test_invalid_route_no_arg(self): + with self.assertRaises(exc.TaskclusterFailure): + self.client._subArgsInRoute({ + 'route': 'askldjflkasdf', + 'name': 'test' + }, {'should': 'fail'}) + + +class TestProcessArgs(ClientTest): + + def test_no_args(self): + self.assertEqual(({}, None, {}, None, None), self.client._processArgs({'args': [], 'name': 'test'})) + + def test_finds_payload(self): + expected = ({}, {'a': 123}, {}, None, None) + actual = self.client._processArgs({'args': [], 'name': 'test', 'input': True}, {'a': 123}) + self.assertEqual(expected, actual) + + def test_positional_args_only(self): + expected = {'test': 'works', 'test2': 'still works'} + entry = {'args': ['test', 'test2'], 'name': 'test'} + actual = self.client._processArgs(entry, 'works', 'still works') + self.assertEqual((expected, None, {}, None, None), actual) + + def test_keyword_args_only(self): + expected = {'test': 'works', 'test2': 'still works'} + entry = {'args': ['test', 'test2'], 'name': 'test'} + actual = self.client._processArgs(entry, test2='still works', test='works') + self.assertEqual((expected, None, {}, None, None), actual) + + def test_int_args(self): + expected = {'test': 'works', 'test2': 42} + entry = {'args': ['test', 'test2'], 'name': 'test'} + actual = self.client._processArgs(entry, 'works', 42) + self.assertEqual((expected, None, {}, None, None), actual) + + def test_keyword_and_positional(self): + entry = {'args': ['test'], 'name': 'test'} + with self.assertRaises(exc.TaskclusterFailure): + self.client._processArgs(entry, ['broken'], test='works') + + def test_invalid_not_enough_args(self): + with self.assertRaises(exc.TaskclusterFailure): + self.client._processArgs({'args': ['test'], 'name': 'test'}) + + def test_invalid_too_many_positional_args(self): + with self.assertRaises(exc.TaskclusterFailure): + self.client._processArgs({'args': ['test'], 'name': 'test'}, 'enough', 'one too many') + + def test_invalid_too_many_keyword_args(self): + with self.assertRaises(exc.TaskclusterFailure): + self.client._processArgs({ + 'args': ['test'], + 'name': 'test' + }, test='enough', test2='one too many') + + def test_invalid_missing_arg_positional(self): + with self.assertRaises(exc.TaskclusterFailure): + self.client._processArgs({'args': ['test', 'test2'], 'name': 'test'}, 'enough') + + def test_invalid_not_enough_args_because_of_overwriting(self): + with self.assertRaises(exc.TaskclusterFailure): + self.client._processArgs({ + 'args': ['test', 'test2'], + 'name': 'test' + }, 'enough', test='enough') + + def test_invalid_positional_not_string_empty_dict(self): + with self.assertRaises(exc.TaskclusterFailure): + self.client._processArgs({'args': ['test'], 'name': 'test'}, {}) + + def test_invalid_positional_not_string_non_empty_dict(self): + with self.assertRaises(exc.TaskclusterFailure): + self.client._processArgs({'args': ['test'], 'name': 'test'}, {'john': 'ford'}) + + def test_calling_convention_1_without_payload(self): + params, payload, query, _, _ = self.client._processArgs({'args': ['k1', 'k2'], 'name': 'test'}, 1, 2) + self.assertEqual(params, {'k1': 1, 'k2': 2}) + self.assertEqual(payload, None) + self.assertEqual(query, {}) + + def test_calling_convention_1_with_payload(self): + params, payload, query, _, _ = self.client._processArgs( + {'args': ['k1', 'k2'], 'name': 'test', 'input': True}, + 1, + 2, + {'A': 123} + ) + self.assertEqual(params, {'k1': 1, 'k2': 2}) + self.assertEqual(payload, {'A': 123}) + self.assertEqual(query, {}) + + def test_calling_convention_2_without_payload(self): + params, payload, query, _, _ = self.client._processArgs({'args': ['k1', 'k2'], 'name': 'test'}, k1=1, k2=2) + self.assertEqual(params, {'k1': 1, 'k2': 2}) + self.assertEqual(payload, None) + self.assertEqual(query, {}) + + def test_calling_convention_2_with_payload(self): + params, payload, query, _, _ = self.client._processArgs( + {'args': ['k1', 'k2'], 'name': 'test', 'input': True}, + {'A': 123}, k1=1, k2=2 + ) + self.assertEqual(params, {'k1': 1, 'k2': 2}) + self.assertEqual(payload, {'A': 123}) + self.assertEqual(query, {}) + + def test_calling_convention_3_without_payload_without_query(self): + params, payload, query, _, _ = self.client._processArgs( + {'args': ['k1', 'k2'], 'name': 'test'}, + params={'k1': 1, 'k2': 2} + ) + self.assertEqual(params, {'k1': 1, 'k2': 2}) + self.assertEqual(payload, None) + self.assertEqual(query, {}) + + def test_calling_convention_3_with_payload_without_query(self): + params, payload, query, _, _ = self.client._processArgs( + {'args': ['k1', 'k2'], 'name': 'test'}, + params={'k1': 1, 'k2': 2}, + payload={'A': 123} + ) + self.assertEqual(params, {'k1': 1, 'k2': 2}) + self.assertEqual(payload, {'A': 123}) + self.assertEqual(query, {}) + + def test_calling_convention_3_with_payload_with_query(self): + params, payload, query, _, _ = self.client._processArgs( + {'args': ['k1', 'k2'], 'name': 'test'}, + params={'k1': 1, 'k2': 2}, + payload={'A': 123}, + query={'B': 456} + ) + self.assertEqual(params, {'k1': 1, 'k2': 2}) + self.assertEqual(payload, {'A': 123}) + self.assertEqual(query, {'B': 456}) + + def test_calling_convention_3_without_payload_with_query(self): + params, payload, query, _, _ = self.client._processArgs( + {'args': ['k1', 'k2'], 'name': 'test'}, + params={'k1': 1, 'k2': 2}, + query={'B': 456} + ) + self.assertEqual(params, {'k1': 1, 'k2': 2}) + self.assertEqual(payload, None) + self.assertEqual(query, {'B': 456}) + + def test_calling_convention_3_with_positional_arguments_with_payload_with_query(self): + params, payload, query, _, _ = self.client._processArgs( + {'args': ['k1', 'k2'], 'name': 'test'}, + 1, + 2, + query={'B': 456}, + payload={'A': 123} + ) + self.assertEqual(params, {'k1': 1, 'k2': 2}) + self.assertEqual(payload, {'A': 123}) + self.assertEqual(query, {'B': 456}) + + def test_calling_convention_3_with_pagination(self): + def a(x): + return x + + _, _, _, ph, _ = self.client._processArgs({ + 'args': ['k1', 'k2'], + 'name': 'test', + 'query': ['continuationToken', 'limit'], + }, 1, 2, paginationHandler=a) + self.assertIs(ph, a) + + def test_calling_convention_3_with_pos_args_same_as_param_kwarg_dict_vals_with_payload_with_query(self): + with self.assertRaises(exc.TaskclusterFailure): + params, payload, query, _, _ = self.client._processArgs( + {'args': ['k1', 'k2'], 'name': 'test'}, + 1, + 2, + params={'k1': 1, 'k2': 2}, + query={'B': 456}, + payload={'A': 123} + ) + + +# This could probably be done better with Mock +class ObjWithDotJson(object): + + def __init__(self, status_code, x): + self.status_code = status_code + self.x = x + + def json(self): + return self.x + + def raise_for_status(self): + if self.status_code >= 300 or self.status_code < 200: + raise requests.exceptions.HTTPError() + + +class TestMakeHttpRequest(ClientTest): + + apiPath = liburls.api(ClientTest.test_root_url, 'fake', 'v1', 'test') + + def setUp(self): + + ClientTest.setUp(self) + + def test_success_first_try(self): + with mock.patch.object(utils, 'makeSingleHttpRequest') as p: + expected = {'test': 'works'} + p.return_value = ObjWithDotJson(200, expected) + + v = self.client._makeHttpRequest('GET', 'test', None) + p.assert_called_once_with('GET', self.apiPath, None, mock.ANY) + self.assertEqual(expected, v) + + def test_success_first_try_payload(self): + with mock.patch.object(utils, 'makeSingleHttpRequest') as p: + expected = {'test': 'works'} + p.return_value = ObjWithDotJson(200, expected) + + v = self.client._makeHttpRequest('GET', 'test', {'payload': 2}) + p.assert_called_once_with('GET', self.apiPath, + utils.dumpJson({'payload': 2}), mock.ANY) + self.assertEqual(expected, v) + + def test_success_fifth_try_status_code(self): + with mock.patch.object(utils, 'makeSingleHttpRequest') as p: + expected = {'test': 'works'} + sideEffect = [ + ObjWithDotJson(500, None), + ObjWithDotJson(500, None), + ObjWithDotJson(500, None), + ObjWithDotJson(500, None), + ObjWithDotJson(200, expected) + ] + p.side_effect = sideEffect + expectedCalls = [mock.call('GET', self.apiPath, None, mock.ANY) + for x in range(self.client.options['maxRetries'])] + + v = self.client._makeHttpRequest('GET', 'test', None) + p.assert_has_calls(expectedCalls) + self.assertEqual(expected, v) + + def test_exhaust_retries_try_status_code(self): + with mock.patch.object(utils, 'makeSingleHttpRequest') as p: + msg = {'message': 'msg', 'test': 'works'} + sideEffect = [ + ObjWithDotJson(500, msg), + ObjWithDotJson(500, msg), + ObjWithDotJson(500, msg), + ObjWithDotJson(500, msg), + ObjWithDotJson(500, msg), # exhaust retries + ObjWithDotJson(500, msg), + ObjWithDotJson(500, msg), + ObjWithDotJson(500, msg), + ObjWithDotJson(500, msg), + ObjWithDotJson(500, msg), + ObjWithDotJson(500, msg), + ObjWithDotJson(200, {'got this': 'wrong'}) + ] + p.side_effect = sideEffect + expectedCalls = [mock.call('GET', self.apiPath, None, mock.ANY) + for x in range(self.client.options['maxRetries'] + 1)] + + with self.assertRaises(exc.TaskclusterRestFailure): + try: + self.client._makeHttpRequest('GET', 'test', None) + except exc.TaskclusterRestFailure as err: + self.assertEqual('msg', str(err)) + self.assertEqual(500, err.status_code) + self.assertEqual(msg, err.body) + raise err + p.assert_has_calls(expectedCalls) + + def test_success_fifth_try_connection_errors(self): + with mock.patch.object(utils, 'makeSingleHttpRequest') as p: + expected = {'test': 'works'} + sideEffect = [ + requests.exceptions.RequestException, + requests.exceptions.RequestException, + requests.exceptions.RequestException, + requests.exceptions.RequestException, + ObjWithDotJson(200, expected) + ] + p.side_effect = sideEffect + expectedCalls = [mock.call('GET', self.apiPath, None, mock.ANY) + for x in range(self.client.options['maxRetries'])] + + v = self.client._makeHttpRequest('GET', 'test', None) + p.assert_has_calls(expectedCalls) + self.assertEqual(expected, v) + + def test_failure_status_code(self): + with mock.patch.object(utils, 'makeSingleHttpRequest') as p: + p.return_value = ObjWithDotJson(500, None) + expectedCalls = [mock.call('GET', self.apiPath, None, mock.ANY) + for x in range(self.client.options['maxRetries'])] + with self.assertRaises(exc.TaskclusterRestFailure): + self.client._makeHttpRequest('GET', 'test', None) + p.assert_has_calls(expectedCalls) + + def test_failure_connection_errors(self): + with mock.patch.object(utils, 'makeSingleHttpRequest') as p: + p.side_effect = requests.exceptions.RequestException + expectedCalls = [mock.call('GET', self.apiPath, None, mock.ANY) + for x in range(self.client.options['maxRetries'])] + with self.assertRaises(exc.TaskclusterConnectionError): + self.client._makeHttpRequest('GET', 'test', None) + p.assert_has_calls(expectedCalls) + + +class TestOptions(ClientTest): + + def test_change_default_doesnt_change_previous_instances(self): + prevMaxRetries = subject._defaultConfig['maxRetries'] + with mock.patch.dict(subject._defaultConfig, {'maxRetries': prevMaxRetries + 1}): + self.assertEqual(self.client.options['maxRetries'], prevMaxRetries) + + def test_credentials_which_cannot_be_encoded_in_unicode_work(self): + badCredentials = { + 'accessToken': u"\U0001F4A9", + 'clientId': u"\U0001F4A9", + } + with self.assertRaises(exc.TaskclusterAuthFailure): + subject.Auth({ + 'rootUrl': self.real_root_url, + 'credentials': badCredentials, + }) + + +class TestMakeApiCall(ClientTest): + """ This class covers both the _makeApiCall function logic as well as the + logic involved in setting up the api member functions since these are very + related things""" + + def setUp(self): + ClientTest.setUp(self) + patcher = mock.patch.object(self.client, 'NEVER_CALL_ME') + never_call = patcher.start() + never_call.side_effect = AssertionError + self.addCleanup(never_call.stop) + + def test_creates_methods(self): + self.assertIsInstance(self.client.no_args_no_input, types.MethodType) + + def test_methods_setup_correctly(self): + # Because of how scoping works, I've had trouble where the last API Entry + # dict is used for all entires, which is wrong. This is to make sure that + # the scoping stuff isn't broken + self.assertIsNot(self.client.NEVER_CALL_ME, self.client.no_args_no_input) + + def test_hits_no_args_no_input(self): + expected = 'works' + with mock.patch.object(self.client, '_makeHttpRequest') as patcher: + patcher.return_value = expected + + actual = self.client.no_args_no_input() + self.assertEqual(expected, actual) + + patcher.assert_called_once_with('get', 'no_args_no_input', None) + + def test_hits_two_args_no_input(self): + expected = 'works' + with mock.patch.object(self.client, '_makeHttpRequest') as patcher: + patcher.return_value = expected + + actual = self.client.two_args_no_input('argone', 'argtwo') + self.assertEqual(expected, actual) + + patcher.assert_called_once_with('get', 'two_args_no_input/argone/argtwo', None) + + def test_hits_no_args_with_input(self): + expected = 'works' + with mock.patch.object(self.client, '_makeHttpRequest') as patcher: + patcher.return_value = expected + + actual = self.client.no_args_with_input({}) + self.assertEqual(expected, actual) + + patcher.assert_called_once_with('get', 'no_args_with_input', {}) + + def test_hits_two_args_with_input(self): + expected = 'works' + with mock.patch.object(self.client, '_makeHttpRequest') as patcher: + patcher.return_value = expected + + actual = self.client.two_args_with_input('argone', 'argtwo', {}) + self.assertEqual(expected, actual) + + patcher.assert_called_once_with('get', 'two_args_with_input/argone/argtwo', {}) + + def test_input_is_procesed(self): + expected = 'works' + expected_input = {'test': 'does work'} + with mock.patch.object(self.client, '_makeHttpRequest') as patcher: + patcher.return_value = expected + + actual = self.client.no_args_with_input(expected_input) + self.assertEqual(expected, actual) + + patcher.assert_called_once_with('get', 'no_args_with_input', expected_input) + + def test_kwargs(self): + expected = 'works' + with mock.patch.object(self.client, '_makeHttpRequest') as patcher: + patcher.return_value = expected + + actual = self.client.two_args_with_input({}, arg0='argone', arg1='argtwo') + self.assertEqual(expected, actual) + + patcher.assert_called_once_with('get', 'two_args_with_input/argone/argtwo', {}) + + def test_mixing_kw_and_positional_fails(self): + with self.assertRaises(exc.TaskclusterFailure): + self.client.two_args_no_input('arg1', arg2='arg2') + + def test_missing_input_raises(self): + with self.assertRaises(exc.TaskclusterFailure): + self.client.no_args_with_input() + + +# TODO: I should run the same things through the node client and compare the output +class TestTopicExchange(ClientTest): + + def test_string_pass_through(self): + expected = 'johnwrotethis' + actual = self.client.topicName(expected) + self.assertEqual(expected, actual['routingKeyPattern']) + + def test_exchange(self): + expected = 'exchange/taskcluster-fake/v1/topicExchange' + actual = self.client.topicName('') + self.assertEqual(expected, actual['exchange']) + + def test_exchange_trailing_slash(self): + self.client.options['exchangePrefix'] = 'exchange/taskcluster-fake2/v1/' + expected = 'exchange/taskcluster-fake2/v1/topicExchange' + actual = self.client.topicName('') + self.assertEqual(expected, actual['exchange']) + + def test_constant(self): + expected = 'primary.*.*.*.#' + actual = self.client.topicName({}) + self.assertEqual(expected, actual['routingKeyPattern']) + + def test_does_insertion(self): + expected = 'primary.*.value2.*.#' + actual = self.client.topicName({'norm2': 'value2'}) + self.assertEqual(expected, actual['routingKeyPattern']) + + def test_too_many_star_args(self): + with self.assertRaises(exc.TaskclusterTopicExchangeFailure): + self.client.topicName({'taskId': '123'}, 'another') + + def test_both_args_and_kwargs(self): + with self.assertRaises(exc.TaskclusterTopicExchangeFailure): + self.client.topicName({'taskId': '123'}, taskId='123') + + def test_no_args_no_kwargs(self): + expected = 'primary.*.*.*.#' + actual = self.client.topicName() + self.assertEqual(expected, actual['routingKeyPattern']) + actual = self.client.topicName({}) + self.assertEqual(expected, actual['routingKeyPattern']) + + +class TestBuildUrl(ClientTest): + + apiPath = liburls.api(ClientTest.test_root_url, 'fake', 'v1', 'two_args_no_input/arg0/arg1') + + def test_build_url_positional(self): + actual = self.client.buildUrl('two_args_no_input', 'arg0', 'arg1') + self.assertEqual(self.apiPath, actual) + + def test_build_url_keyword(self): + actual = self.client.buildUrl('two_args_no_input', arg0='arg0', arg1='arg1') + self.assertEqual(self.apiPath, actual) + + def test_build_url_query_string(self): + actual = self.client.buildUrl( + 'two_args_no_input', + params={ + 'arg0': 'arg0', + 'arg1': 'arg1' + }, + query={'qs0': 1} + ) + self.assertEqual(self.apiPath + '?qs0=1', actual) + + def test_fails_to_build_url_for_missing_method(self): + with self.assertRaises(exc.TaskclusterFailure): + self.client.buildUrl('non-existing') + + def test_fails_to_build_not_enough_args(self): + with self.assertRaises(exc.TaskclusterFailure): + self.client.buildUrl('two_args_no_input', 'not-enough-args') + + +class TestBuildSignedUrl(ClientTest): + + apiPath = liburls.api(ClientTest.test_root_url, 'fake', 'v1', 'two_args_no_input/arg0/arg1') + + def test_builds_surl_positional(self): + actual = self.client.buildSignedUrl('two_args_no_input', 'arg0', 'arg1') + actual = re.sub('bewit=[^&]*', 'bewit=X', actual) + self.assertEqual(self.apiPath + '?bewit=X', actual) + + def test_builds_surl_keyword(self): + actual = self.client.buildSignedUrl('two_args_no_input', arg0='arg0', arg1='arg1') + actual = re.sub('bewit=[^&]*', 'bewit=X', actual) + self.assertEqual(self.apiPath + '?bewit=X', actual) + + +class TestMockHttpCalls(ClientTest): + + """Test entire calls down to the requests layer, ensuring they have + well-formed URLs and handle request and response bodies properly. This + verifies that we can call real methods with both position and keyword + args""" + + def setUp(self): + ClientTest.setUp(self) + self.fakeResponse = '' + + def fakeSite(url, request): + self.gotUrl = urllib.parse.urlunsplit(url) + self.gotRequest = request + return self.fakeResponse + self.fakeSite = fakeSite + + def test_no_args_no_input(self): + with httmock.HTTMock(self.fakeSite): + self.client.no_args_no_input() + self.assertEqual(self.gotUrl, 'https://tc-tests.example.com/api/fake/v1/no_args_no_input') + + def test_two_args_no_input(self): + with httmock.HTTMock(self.fakeSite): + self.client.two_args_no_input('1', '2') + self.assertEqual(self.gotUrl, 'https://tc-tests.example.com/api/fake/v1/two_args_no_input/1/2') + + def test_no_args_with_input(self): + with httmock.HTTMock(self.fakeSite): + self.client.no_args_with_input({'x': 1}) + self.assertEqual(self.gotUrl, 'https://tc-tests.example.com/api/fake/v1/no_args_with_input') + self.assertEqual(json.loads(self.gotRequest.body), {"x": 1}) + + def test_no_args_with_empty_input(self): + with httmock.HTTMock(self.fakeSite): + self.client.no_args_with_input({}) + self.assertEqual(self.gotUrl, 'https://tc-tests.example.com/api/fake/v1/no_args_with_input') + self.assertEqual(json.loads(self.gotRequest.body), {}) + + def test_two_args_with_input(self): + with httmock.HTTMock(self.fakeSite): + self.client.two_args_with_input('a', 'b', {'x': 1}) + self.assertEqual(self.gotUrl, + 'https://tc-tests.example.com/api/fake/v1/two_args_with_input/a/b') + self.assertEqual(json.loads(self.gotRequest.body), {"x": 1}) + + def test_kwargs(self): + with httmock.HTTMock(self.fakeSite): + self.client.two_args_with_input( + {'x': 1}, arg0='a', arg1='b') + self.assertEqual(self.gotUrl, + 'https://tc-tests.example.com/api/fake/v1/two_args_with_input/a/b') + self.assertEqual(json.loads(self.gotRequest.body), {"x": 1}) + + +@unittest.skipIf(os.environ.get('NO_TESTS_OVER_WIRE'), "Skipping tests over wire") +class TestAuthentication(base.TCTest): + + def test_no_creds_needed(self): + """we can call methods which require no scopes with an unauthenticated + client""" + # mock this request so we don't depend on the existence of a client + @httmock.all_requests + def auth_response(url, request): + self.assertEqual(urllib.parse.urlunsplit(url), + 'https://tc-tests.example.com/api/auth/v1/clients/abc') + self.failIf('Authorization' in request.headers) + headers = {'content-type': 'application/json'} + content = {"clientId": "abc"} + return httmock.response(200, content, headers, None, 5, request) + + with httmock.HTTMock(auth_response): + client = subject.Auth({"rootUrl": "https://tc-tests.example.com", "credentials": {}}) + result = client.client('abc') + self.assertEqual(result, {"clientId": "abc"}) + + def test_permacred_simple(self): + """we can call methods which require authentication with valid + permacreds""" + client = subject.Auth({ + 'rootUrl': self.real_root_url, + 'credentials': { + 'clientId': 'tester', + 'accessToken': 'no-secret', + } + }) + result = client.testAuthenticate({ + 'clientScopes': ['test:a'], + 'requiredScopes': ['test:a'], + }) + self.assertEqual(result, {'scopes': ['test:a'], 'clientId': 'tester'}) + + def test_permacred_simple_authorizedScopes(self): + client = subject.Auth({ + 'rootUrl': self.real_root_url, + 'credentials': { + 'clientId': 'tester', + 'accessToken': 'no-secret', + }, + 'authorizedScopes': ['test:a', 'test:b'], + }) + result = client.testAuthenticate({ + 'clientScopes': ['test:*'], + 'requiredScopes': ['test:a'], + }) + self.assertEqual(result, {'scopes': ['test:a', 'test:b'], + 'clientId': 'tester'}) + + def test_unicode_permacred_simple(self): + """Unicode strings that encode to ASCII in credentials do not cause issues""" + client = subject.Auth({ + 'rootUrl': self.real_root_url, + 'credentials': { + 'clientId': u'tester', + 'accessToken': u'no-secret', + } + }) + result = client.testAuthenticate({ + 'clientScopes': ['test:a'], + 'requiredScopes': ['test:a'], + }) + self.assertEqual(result, {'scopes': ['test:a'], 'clientId': 'tester'}) + + def test_invalid_unicode_permacred_simple(self): + """Unicode strings that do not encode to ASCII in credentials cause issues""" + with self.assertRaises(exc.TaskclusterAuthFailure): + subject.Auth({ + 'rootUrl': self.test_root_url, + 'credentials': { + 'clientId': u"\U0001F4A9", + 'accessToken': u"\U0001F4A9", + } + }) + + def test_permacred_insufficient_scopes(self): + """A call with insufficient scopes results in an error""" + client = subject.Auth({ + 'rootUrl': self.real_root_url, + 'credentials': { + 'clientId': 'tester', + 'accessToken': 'no-secret', + } + }) + # TODO: this should be TaskclsuterAuthFailure; most likely the client + # is expecting AuthorizationFailure instead of AuthenticationFailure + with self.assertRaises(exc.TaskclusterRestFailure): + client.testAuthenticate({ + 'clientScopes': ['test:*'], + 'requiredScopes': ['something-more'], + }) + + def test_temporary_credentials(self): + """we can call methods which require authentication with temporary + credentials generated by python client""" + tempCred = subject.createTemporaryCredentials( + 'tester', + 'no-secret', + datetime.datetime.utcnow(), + datetime.datetime.utcnow() + datetime.timedelta(hours=1), + ['test:xyz'], + ) + client = subject.Auth({ + 'rootUrl': self.real_root_url, + 'credentials': tempCred, + }) + + result = client.testAuthenticate({ + 'clientScopes': ['test:*'], + 'requiredScopes': ['test:xyz'], + }) + self.assertEqual(result, {'scopes': ['test:xyz'], 'clientId': 'tester'}) + + def test_named_temporary_credentials(self): + tempCred = subject.createTemporaryCredentials( + 'tester', + 'no-secret', + datetime.datetime.utcnow(), + datetime.datetime.utcnow() + datetime.timedelta(hours=1), + ['test:xyz'], + name='credName' + ) + client = subject.Auth({ + 'rootUrl': self.real_root_url, + 'credentials': tempCred, + }) + + result = client.testAuthenticate({ + 'clientScopes': ['test:*', 'auth:create-client:credName'], + 'requiredScopes': ['test:xyz'], + }) + self.assertEqual(result, {'scopes': ['test:xyz'], 'clientId': 'credName'}) + + def test_temporary_credentials_authorizedScopes(self): + tempCred = subject.createTemporaryCredentials( + 'tester', + 'no-secret', + datetime.datetime.utcnow(), + datetime.datetime.utcnow() + datetime.timedelta(hours=1), + ['test:xyz:*'], + ) + client = subject.Auth({ + 'rootUrl': self.real_root_url, + 'credentials': tempCred, + 'authorizedScopes': ['test:xyz:abc'], + }) + + result = client.testAuthenticate({ + 'clientScopes': ['test:*'], + 'requiredScopes': ['test:xyz:abc'], + }) + self.assertEqual(result, {'scopes': ['test:xyz:abc'], + 'clientId': 'tester'}) + + def test_named_temporary_credentials_authorizedScopes(self): + tempCred = subject.createTemporaryCredentials( + 'tester', + 'no-secret', + datetime.datetime.utcnow(), + datetime.datetime.utcnow() + datetime.timedelta(hours=1), + ['test:xyz:*'], + name='credName' + ) + client = subject.Auth({ + 'rootUrl': self.real_root_url, + 'credentials': tempCred, + 'authorizedScopes': ['test:xyz:abc'], + }) + + result = client.testAuthenticate({ + 'clientScopes': ['test:*', 'auth:create-client:credName'], + 'requiredScopes': ['test:xyz:abc'], + }) + self.assertEqual(result, {'scopes': ['test:xyz:abc'], + 'clientId': 'credName'}) + + def test_signed_url(self): + """we can use a signed url built with the python client""" + client = subject.Auth({ + 'rootUrl': self.real_root_url, + 'credentials': { + 'clientId': 'tester', + 'accessToken': 'no-secret', + } + }) + signedUrl = client.buildSignedUrl('testAuthenticateGet') + response = requests.get(signedUrl) + response.raise_for_status() + response = response.json() + response['scopes'].sort() + self.assertEqual(response, { + 'scopes': sorted(['test:*', u'auth:create-client:test:*']), + 'clientId': 'tester', + }) + + def test_signed_url_bad_credentials(self): + client = subject.Auth({ + 'rootUrl': self.real_root_url, + 'credentials': { + 'clientId': 'tester', + 'accessToken': 'wrong-secret', + } + }) + signedUrl = client.buildSignedUrl('testAuthenticateGet') + response = requests.get(signedUrl) + with self.assertRaises(requests.exceptions.RequestException): + response.raise_for_status() + self.assertEqual(401, response.status_code) + + def test_temp_credentials_signed_url(self): + tempCred = subject.createTemporaryCredentials( + 'tester', + 'no-secret', + datetime.datetime.utcnow(), + datetime.datetime.utcnow() + datetime.timedelta(hours=1), + ['test:*'], + ) + client = subject.Auth({ + 'rootUrl': self.real_root_url, + 'credentials': tempCred, + }) + signedUrl = client.buildSignedUrl('testAuthenticateGet') + response = requests.get(signedUrl) + response.raise_for_status() + response = response.json() + self.assertEqual(response, { + 'scopes': ['test:*'], + 'clientId': 'tester', + }) + + def test_signed_url_authorizedScopes(self): + client = subject.Auth({ + 'rootUrl': self.real_root_url, + 'credentials': { + 'clientId': 'tester', + 'accessToken': 'no-secret', + }, + 'authorizedScopes': ['test:authenticate-get'], + }) + signedUrl = client.buildSignedUrl('testAuthenticateGet') + response = requests.get(signedUrl) + response.raise_for_status() + response = response.json() + self.assertEqual(response, { + 'scopes': ['test:authenticate-get'], + 'clientId': 'tester', + }) + + def test_temp_credentials_signed_url_authorizedScopes(self): + tempCred = subject.createTemporaryCredentials( + 'tester', + 'no-secret', + datetime.datetime.utcnow(), + datetime.datetime.utcnow() + datetime.timedelta(hours=1), + ['test:*'], + ) + client = subject.Auth({ + 'rootUrl': self.real_root_url, + 'credentials': tempCred, + 'authorizedScopes': ['test:authenticate-get'], + }) + signedUrl = client.buildSignedUrl('testAuthenticateGet') + response = requests.get(signedUrl) + response.raise_for_status() + response = response.json() + self.assertEqual(response, { + 'scopes': ['test:authenticate-get'], + 'clientId': 'tester', + }) diff --git a/third_party/python/taskcluster/test/test_utils.py b/third_party/python/taskcluster/test/test_utils.py new file mode 100644 index 000000000000..31328199d87e --- /dev/null +++ b/third_party/python/taskcluster/test/test_utils.py @@ -0,0 +1,439 @@ +import datetime +import uuid +import os + +import taskcluster.utils as subject +import dateutil.parser +import httmock +import mock +import requests + +import base +from unittest import TestCase +from hypothesis import given +import hypothesis.strategies as st + + +# https://docs.python.org/2/library/datetime.html#tzinfo-objects +class UTC(datetime.tzinfo): + """UTC""" + + def utcoffset(self, dt): + return datetime.timedelta(0) + + def tzname(self, dt): + return 'UTC' + + def dst(self, dt): + return datetime.timedelta(0) + + +utc = UTC() + + +class StringDateTests(base.TCTest): + def test_naive(self): + dateObj = datetime.datetime( + year=2000, + month=1, + day=1, + hour=1, + minute=1, + second=1 + ) + expected = '2000-01-01T01:01:01Z' + actual = subject.stringDate(dateObj) + self.assertEqual(expected, actual) + + def test_aware(self): + dateObj = datetime.datetime( + year=2000, + month=1, + day=1, + hour=1, + minute=1, + second=1, + tzinfo=utc + ) + expected = '2000-01-01T01:01:01Z' + actual = subject.stringDate(dateObj) + self.assertEqual(expected, actual) + + +class DumpJsonTests(base.TCTest): + def test_has_no_spaces(self): + expected = [ + '{"test":"works","doesit":"yes"}', + '{"doesit":"yes","test":"works"}' + ] + actual = subject.dumpJson({'test': 'works', 'doesit': 'yes'}) + self.assertTrue(actual in expected) + + def test_serializes_naive_date(self): + dateObj = datetime.datetime( + year=2000, + month=1, + day=1, + hour=1, + minute=1, + second=1 + ) + expected = '{"date":"2000-01-01T01:01:01Z"}' + actual = subject.dumpJson({'date': dateObj}) + self.assertEqual(expected, actual) + + def test_serializes_aware_date(self): + dateObj = datetime.datetime( + year=2000, + month=1, + day=1, + hour=1, + minute=1, + second=1, + tzinfo=utc + ) + expected = '{"date":"2000-01-01T01:01:01Z"}' + actual = subject.dumpJson({'date': dateObj}) + self.assertEqual(expected, actual) + + +class TestBase64Utils(base.TCTest): + def test_encode_string_for_b64_header(self): + # Really long strings trigger newlines every 72 ch + expected = 'YWJjZGVm' * 500 + expected = expected.encode('ascii') + actual = subject.encodeStringForB64Header('abcdef' * 500) + self.assertEqual(expected, actual) + + def test_makeb64urlsafe(self): + expected = b'-_' + actual = subject.makeB64UrlSafe('+/') + self.assertEqual(expected, actual) + + def test_makeb64urlunsafe(self): + expected = b'+/' + actual = subject.makeB64UrlUnsafe('-_') + self.assertEqual(expected, actual) + + +class TestSlugId(base.TCTest): + def test_slug_id_is_always_nice(self): + with mock.patch('uuid.uuid4') as p: + # first bit of uuid set, which should get unset + p.return_value = uuid.UUID('bed97923-7616-4ec8-85ed-4b695f67ac2e') + expected = b'Ptl5I3YWTsiF7UtpX2esLg' + actual = subject.slugId() + self.assertEqual(expected, actual) + + def test_slug_id_nice_stays_nice(self): + with mock.patch('uuid.uuid4') as p: + # first bit of uuid unset, should remain unset + p.return_value = uuid.UUID('3ed97923-7616-4ec8-85ed-4b695f67ac2e') + expected = b'Ptl5I3YWTsiF7UtpX2esLg' + actual = subject.slugId() + self.assertEqual(expected, actual) + + +class TestMakeSingleHttpRequest(base.TCTest): + def test_success_no_payload(self): + @httmock.all_requests + def response_content(url, request): + return {'status_code': 200, 'content': {}} + + with httmock.HTTMock(response_content): + d = subject.makeSingleHttpRequest('GET', 'http://www.example.com', {}, {}) + self.assertEqual(d.json(), {}) + self.assertEqual(d.status_code, 200) + d.raise_for_status() + + def test_success_payload(self): + @httmock.all_requests + def response_content(url, request): + self.assertEqual(request.body, 'i=j') + return {'status_code': 200, 'content': {'k': 'l'}} + + with httmock.HTTMock(response_content): + d = subject.makeSingleHttpRequest('GET', 'http://www.example.com', {'i': 'j'}, {}) + self.assertEqual(d.json(), {'k': 'l'}) + self.assertEqual(d.status_code, 200) + d.raise_for_status() + + def test_failure(self): + @httmock.all_requests + def response_content(url, requet): + return {'status_code': 404} + + with httmock.HTTMock(response_content): + d = subject.makeSingleHttpRequest('GET', 'http://www.example.com', {}, {}) + with self.assertRaises(requests.exceptions.RequestException): + d.raise_for_status() + + +class TestPutfile(base.TCTest): + def test_success_put_file(self): + with mock.patch.object(subject, 'makeSingleHttpRequest') as p: + class FakeResp: + status_code = 200 + + def raise_for_status(self): + pass + + p.return_value = FakeResp() + subject.putFile('setup.py', 'http://www.example.com', 'text/plain') + p.assert_called_once_with('put', 'http://www.example.com', mock.ANY, mock.ANY, mock.ANY) + + +class TestStableSlugIdClosure(TestCase): + + @given(st.text()) + def test_repeat(self, text): + s = subject.stableSlugId() + self.assertEqual(s(text), s(text)) + + def test_not_equal(self): + s = subject.stableSlugId() + self.assertNotEqual(s("first"), s("second")) + + @given(st.text()) + def test_invalidate(self, text): + s1 = subject.stableSlugId() + s2 = subject.stableSlugId() + self.assertNotEqual(s1(text), s2(text)) + + +class TestFromNow(TestCase): + + examples = [ + {"expr": '1 hour', "from": '2017-01-19T16:27:20.974Z', "result": '2017-01-19T17:27:20.974Z'}, + {"expr": '3h', "from": '2017-01-19T16:27:20.974Z', "result": '2017-01-19T19:27:20.974Z'}, + {"expr": '1 hours', "from": '2017-01-19T16:27:20.974Z', "result": '2017-01-19T17:27:20.974Z'}, + {"expr": '-1 hour', "from": '2017-01-19T16:27:20.974Z', "result": '2017-01-19T15:27:20.974Z'}, + {"expr": '1 m', "from": '2017-01-19T16:27:20.974Z', "result": '2017-01-19T16:28:20.974Z'}, + {"expr": '1m', "from": '2017-01-19T16:27:20.974Z', "result": '2017-01-19T16:28:20.974Z'}, + {"expr": '12 min', "from": '2017-01-19T16:27:20.974Z', "result": '2017-01-19T16:39:20.974Z'}, + {"expr": '12min', "from": '2017-01-19T16:27:20.974Z', "result": '2017-01-19T16:39:20.974Z'}, + {"expr": '11m', "from": '2017-01-19T16:27:20.974Z', "result": '2017-01-19T16:38:20.974Z'}, + {"expr": '11 m', "from": '2017-01-19T16:27:20.974Z', "result": '2017-01-19T16:38:20.974Z'}, + {"expr": '1 day', "from": '2017-01-19T16:27:20.974Z', "result": '2017-01-20T16:27:20.974Z'}, + {"expr": '2 days', "from": '2017-01-19T16:27:20.974Z', "result": '2017-01-21T16:27:20.974Z'}, + {"expr": '1 second', "from": '2017-01-19T16:27:20.974Z', "result": '2017-01-19T16:27:21.974Z'}, + {"expr": '1 week', "from": '2017-01-19T16:27:20.974Z', "result": '2017-01-26T16:27:20.974Z'}, + {"expr": '1 month', "from": '2017-01-19T16:27:20.974Z', "result": '2017-02-18T16:27:20.974Z'}, + {"expr": '30 mo', "from": '2017-01-19T16:27:20.974Z', "result": '2019-07-08T16:27:20.974Z'}, + {"expr": '-30 mo', "from": '2017-01-19T16:27:20.974Z', "result": '2014-08-03T16:27:20.974Z'}, + {"expr": '1 year', "from": '2017-01-19T16:27:20.974Z', "result": '2018-01-19T16:27:20.974Z'}, + ] + + def test_examples(self): + for example in self.examples: + from_ = dateutil.parser.parse(example['from']) + res = dateutil.parser.parse(example['result']) + self.assertEqual(subject.fromNow(example['expr'], from_), res) + + +class TestScopeMatch(TestCase): + def assertScopeMatch(self, assumed, requiredScopeSets, expected): + try: + result = subject.scopeMatch(assumed, requiredScopeSets) + self.assertEqual(result, expected) + except: + if expected != 'exception': + raise + + def test_single_exact_match_string_except_1(self): + self.assertScopeMatch(["foo:bar"], "foo:bar", "exception") + + def test_single_exact_match_string_except_2(self): + self.assertScopeMatch(["foo:bar"], ["foo:bar"], "exception") + + def test_single_exact_match_string(self): + self.assertScopeMatch(["foo:bar"], [["foo:bar"]], True) + + def test_empty_string_in_scopesets_except_1(self): + self.assertScopeMatch(["foo:bar"], "", "exception") + + def test_empty_string_in_scopesets_except_2(self): + self.assertScopeMatch(["foo:bar"], [""], "exception") + + def test_empty_string_in_scopesets(self): + self.assertScopeMatch(["foo:bar"], [[""]], False) + + def test_prefix(self): + self.assertScopeMatch(["foo:*"], [["foo:bar"]], True) + + def test_star_not_at_end(self): + self.assertScopeMatch(["foo:*:bing"], [["foo:bar:bing"]], False) + + def test_star_at_beginnging(self): + self.assertScopeMatch(["*:bar"], [["foo:bar"]], False) + + def test_prefix_with_no_star(self): + self.assertScopeMatch(["foo:"], [["foo:bar"]], False) + + def test_star_but_not_prefix_1(self): + self.assertScopeMatch(["foo:bar:*"], [["bar:bing"]], False) + + def test_star_but_not_prefix_2(self): + self.assertScopeMatch(["bar:*"], [["foo:bar:bing"]], False) + + def test_disjunction_strings_except(self): + self.assertScopeMatch(["bar:*"], ["foo:x", "bar:x"], "exception") + + def test_disjunction_strings_2(self): + self.assertScopeMatch(["bar:*"], [["foo:x"], ["bar:x"]], True) + + def test_conjunction(self): + self.assertScopeMatch(["bar:*", "foo:x"], [["foo:x", "bar:y"]], True) + + def test_empty_pattern(self): + self.assertScopeMatch([""], [["foo:bar"]], False) + + def test_empty_patterns(self): + self.assertScopeMatch([], [["foo:bar"]], False) + + def test_bare_star(self): + self.assertScopeMatch(["*"], [["foo:bar", "bar:bing"]], True) + + def test_empty_conjunction_in_scopesets(self): + self.assertScopeMatch(["foo:bar"], [[]], True) + + def test_non_string_scopesets(self): + self.assertScopeMatch(["foo:bar"], {}, "exception") + + def test_non_string_scopeset(self): + self.assertScopeMatch(["foo:bar"], [{}], "exception") + + def test_non_string_scope(self): + self.assertScopeMatch(["foo:bar"], [[{}]], "exception") + + def test_empty_disjunction_in_scopesets(self): + self.assertScopeMatch(["foo:bar"], [], False) + + +class TestIsExpired(TestCase): + + def test_not_expired(self): + isExpired = subject.isExpired(""" + { + "version":1, + "scopes":["*"], + "start":1450740520182, + "expiry":2451000620182, + "seed":"90PyTwYxS96-lBPc0f_MqQGV-hHCUsTYWpXZilv6EqDg", + "signature":"HocA2IiCoGzjUQZbrbLSwKMXZSYWCu/hfMPCa/ovggQ=" + } + """) + self.assertEqual(isExpired, False) + + def test_expired(self): + # Warning we have to test with expiry: 0 as magic python spy thing + # mess up time.time() so it won't work. + isExpired = subject.isExpired(""" + { + "version":1, + "scopes":["*"], + "start":1450740520182, + "expiry":0, + "seed":"90PyTwYxS96-lBPc0f_MqQGV-hHCUsTYWpXZilv6EqDg", + "signature":"HocA2IiCoGzjUQZbrbLSwKMXZSYWCu/hfMPCa/ovggQ=" + } + """) + self.assertEqual(isExpired, True) + + +class TestFromEnv(TestCase): + + def clear_env(self): + for v in 'ROOT_URL', 'CLIENT_ID', 'ACCESS_TOKEN', 'CERTIFICATE': + v = 'TASKCLUSTER_' + v + if v in os.environ: + del os.environ[v] + + @mock.patch.dict(os.environ) + def test_empty(self): + self.clear_env() + self.assertEqual(subject.optionsFromEnvironment(), {}) + + @mock.patch.dict(os.environ) + def test_all(self): + os.environ['TASKCLUSTER_ROOT_URL'] = 'https://tc.example.com' + os.environ['TASKCLUSTER_CLIENT_ID'] = 'me' + os.environ['TASKCLUSTER_ACCESS_TOKEN'] = 'shave-and-a-haircut' + os.environ['TASKCLUSTER_CERTIFICATE'] = '{"bits":2}' + self.assertEqual(subject.optionsFromEnvironment(), { + 'rootUrl': 'https://tc.example.com', + 'credentials': { + 'clientId': 'me', + 'accessToken': 'shave-and-a-haircut', + 'certificate': '{"bits":2}', + }, + }) + + @mock.patch.dict(os.environ) + def test_cred_only(self): + os.environ['TASKCLUSTER_ACCESS_TOKEN'] = 'shave-and-a-haircut' + self.assertEqual(subject.optionsFromEnvironment(), { + 'credentials': { + 'accessToken': 'shave-and-a-haircut', + }, + }) + + @mock.patch.dict(os.environ) + def test_rooturl_only(self): + os.environ['TASKCLUSTER_ROOT_URL'] = 'https://tc.example.com' + self.assertEqual(subject.optionsFromEnvironment(), { + 'rootUrl': 'https://tc.example.com', + }) + + @mock.patch.dict(os.environ) + def test_default_rooturl(self): + os.environ['TASKCLUSTER_CLIENT_ID'] = 'me' + os.environ['TASKCLUSTER_ACCESS_TOKEN'] = 'shave-and-a-haircut' + os.environ['TASKCLUSTER_CERTIFICATE'] = '{"bits":2}' + self.assertEqual( + subject.optionsFromEnvironment({'rootUrl': 'https://other.example.com'}), { + 'rootUrl': 'https://other.example.com', + 'credentials': { + 'clientId': 'me', + 'accessToken': 'shave-and-a-haircut', + 'certificate': '{"bits":2}', + }, + }) + + @mock.patch.dict(os.environ) + def test_default_rooturl_overridden(self): + os.environ['TASKCLUSTER_ROOT_URL'] = 'https://tc.example.com' + self.assertEqual( + subject.optionsFromEnvironment({'rootUrl': 'https://other.example.com'}), + {'rootUrl': 'https://tc.example.com'}) + + @mock.patch.dict(os.environ) + def test_default_creds(self): + os.environ['TASKCLUSTER_ROOT_URL'] = 'https://tc.example.com' + os.environ['TASKCLUSTER_ACCESS_TOKEN'] = 'shave-and-a-haircut' + os.environ['TASKCLUSTER_CERTIFICATE'] = '{"bits":2}' + self.assertEqual( + subject.optionsFromEnvironment({'credentials': {'clientId': 'them'}}), { + 'rootUrl': 'https://tc.example.com', + 'credentials': { + 'clientId': 'them', + 'accessToken': 'shave-and-a-haircut', + 'certificate': '{"bits":2}', + }, + }) + + @mock.patch.dict(os.environ) + def test_default_creds_overridden(self): + os.environ['TASKCLUSTER_ROOT_URL'] = 'https://tc.example.com' + os.environ['TASKCLUSTER_CLIENT_ID'] = 'me' + os.environ['TASKCLUSTER_ACCESS_TOKEN'] = 'shave-and-a-haircut' + os.environ['TASKCLUSTER_CERTIFICATE'] = '{"bits":2}' + self.assertEqual( + subject.optionsFromEnvironment({'credentials': {'clientId': 'them'}}), { + 'rootUrl': 'https://tc.example.com', + 'credentials': { + 'clientId': 'me', + 'accessToken': 'shave-and-a-haircut', + 'certificate': '{"bits":2}', + }, + }) diff --git a/third_party/python/taskcluster_urls/taskcluster_urls-11.0.0.dist-info/RECORD b/third_party/python/taskcluster_urls/taskcluster_urls-11.0.0.dist-info/RECORD deleted file mode 100644 index 9002609637c0..000000000000 --- a/third_party/python/taskcluster_urls/taskcluster_urls-11.0.0.dist-info/RECORD +++ /dev/null @@ -1,5 +0,0 @@ -taskcluster_urls/__init__.py,sha256=9MCKMyolP3tmuPgKCmUGHfy0CiiC0NtlxZdiq3GI8pQ,2596 -taskcluster_urls-11.0.0.dist-info/METADATA,sha256=BLxhzYGGt8v5mhLv_NLdnceZI91gcWGyXGcXYyAVNLo,7636 -taskcluster_urls-11.0.0.dist-info/RECORD,, -taskcluster_urls-11.0.0.dist-info/WHEEL,sha256=NzFAKnL7g-U64xnS1s5e3mJnxKpOTeOtlXdFwS9yNXI,92 -taskcluster_urls-11.0.0.dist-info/top_level.txt,sha256=ZOahZE9aH516RGht4_177HGJ9cJg6JgsD9PVUdwnATo,17 diff --git a/third_party/python/taskcluster_urls/taskcluster_urls-11.0.0.dist-info/top_level.txt b/third_party/python/taskcluster_urls/taskcluster_urls-11.0.0.dist-info/top_level.txt deleted file mode 100644 index 99a7791a995c..000000000000 --- a/third_party/python/taskcluster_urls/taskcluster_urls-11.0.0.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -taskcluster_urls diff --git a/third_party/python/virtualenv/__virtualenv__/platformdirs-2.0.2-py2.py3-none-any/platformdirs-2.0.2.dist-info/LICENSE.txt b/third_party/python/virtualenv/__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs-1.4.4.dist-info/LICENSE.txt similarity index 100% rename from third_party/python/virtualenv/__virtualenv__/platformdirs-2.0.2-py2.py3-none-any/platformdirs-2.0.2.dist-info/LICENSE.txt rename to third_party/python/virtualenv/__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs-1.4.4.dist-info/LICENSE.txt diff --git a/third_party/python/virtualenv/__virtualenv__/platformdirs-2.0.2-py2.py3-none-any/platformdirs-2.0.2.dist-info/METADATA b/third_party/python/virtualenv/__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs-1.4.4.dist-info/METADATA similarity index 72% rename from third_party/python/virtualenv/__virtualenv__/platformdirs-2.0.2-py2.py3-none-any/platformdirs-2.0.2.dist-info/METADATA rename to third_party/python/virtualenv/__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs-1.4.4.dist-info/METADATA index edde32eac073..f95073104453 100644 --- a/third_party/python/virtualenv/__virtualenv__/platformdirs-2.0.2-py2.py3-none-any/platformdirs-2.0.2.dist-info/METADATA +++ b/third_party/python/virtualenv/__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs-1.4.4.dist-info/METADATA @@ -1,8 +1,8 @@ Metadata-Version: 2.1 -Name: platformdirs -Version: 2.0.2 +Name: appdirs +Version: 1.4.4 Summary: A small Python module for determining appropriate platform-specific dirs, e.g. a "user data dir". -Home-page: https://github.com/platformdirs/platformdirs +Home-page: http://github.com/ActiveState/appdirs Author: Trent Mick Author-email: trentm@gmail.com Maintainer: Jeff Rouse @@ -14,29 +14,26 @@ Classifier: Development Status :: 5 - Production/Stable Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: MIT License Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 2 Classifier: Programming Language :: Python :: 2.7 Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 Classifier: Programming Language :: Python :: 3.5 Classifier: Programming Language :: Python :: 3.6 Classifier: Programming Language :: Python :: 3.7 Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 Classifier: Programming Language :: Python :: Implementation :: PyPy Classifier: Programming Language :: Python :: Implementation :: CPython Classifier: Topic :: Software Development :: Libraries :: Python Modules -Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.* -License-File: LICENSE.txt +.. image:: https://secure.travis-ci.org/ActiveState/appdirs.png + :target: http://travis-ci.org/ActiveState/appdirs + the problem =========== -.. image:: https://github.com/platformdirs/platformdirs/workflows/Test/badge.svg - :target: https://github.com/platformdirs/platformdirs/actions?query=workflow%3ATest - -What directory should your app use for storing user data? If running on macOS, you +What directory should your app use for storing user data? If running on Mac OS X, you should use:: ~/Library/Application Support/ @@ -49,19 +46,19 @@ or possibly:: C:\Documents and Settings\\Application Data\\ -for `roaming profiles `_ but that is another story. +for `roaming profiles `_ but that is another story. On Linux (and other Unices) the dir, according to the `XDG -spec `_, is:: +spec `_, is:: ~/.local/share/ -``platformdirs`` to the rescue -============================== +``appdirs`` to the rescue +========================= -This kind of thing is what the ``platformdirs`` module is for. -``platformdirs`` will help you choose an appropriate: +This kind of thing is what the ``appdirs`` module is for. ``appdirs`` will +help you choose an appropriate: - user data dir (``user_data_dir``) - user config dir (``user_config_dir``) @@ -80,9 +77,9 @@ and also: some example output =================== -On macOS:: +On Mac OS X:: - >>> from platformdirs import * + >>> from appdirs import * >>> appname = "SuperApp" >>> appauthor = "Acme" >>> user_data_dir(appname, appauthor) @@ -96,7 +93,7 @@ On macOS:: On Windows 7:: - >>> from platformdirs import * + >>> from appdirs import * >>> appname = "SuperApp" >>> appauthor = "Acme" >>> user_data_dir(appname, appauthor) @@ -110,7 +107,7 @@ On Windows 7:: On Linux:: - >>> from platformdirs import * + >>> from appdirs import * >>> appname = "SuperApp" >>> appauthor = "Acme" >>> user_data_dir(appname, appauthor) @@ -132,13 +129,13 @@ On Linux:: '/etc/SuperApp:/usr/local/etc/SuperApp' -``PlatformDirs`` for convenience -================================ +``AppDirs`` for convenience +=========================== :: - >>> from platformdirs import PlatformDirs - >>> dirs = PlatformDirs("SuperApp", "Acme") + >>> from appdirs import AppDirs + >>> dirs = AppDirs("SuperApp", "Acme") >>> dirs.user_data_dir '/Users/trentm/Library/Application Support/SuperApp' >>> dirs.site_data_dir @@ -157,8 +154,8 @@ If you have multiple versions of your app in use that you want to be able to run side-by-side, then you may want version-isolation for these dirs:: - >>> from platformdirs import PlatformDirs - >>> dirs = PlatformDirs("SuperApp", "Acme", version="1.0") + >>> from appdirs import AppDirs + >>> dirs = AppDirs("SuperApp", "Acme", version="1.0") >>> dirs.user_data_dir '/Users/trentm/Library/Application Support/SuperApp/1.0' >>> dirs.site_data_dir @@ -169,46 +166,19 @@ dirs:: '/Users/trentm/Library/Logs/SuperApp/1.0' -Why the Fork? -============= -This repository is a friendly fork of the wonderful work started by -`ActiveState `_ who created -``appdirs``, this package's ancestor. - -Maintaining an open source project is no easy task, particularly -from within an organization, and the Python community is indebted -to ``appdirs`` (and to Trent Mick and Jeff Rouse in particular) for -creating an incredibly useful simple module, as evidenced by the wide -number of users it has attracted over the years. - -Nonetheless, given the number of long-standing open issues -and pull requests, and no clear path towards `ensuring -that maintenance of the package would continue or grow -`_, this fork was -created. - -Contributions are most welcome. - - -platformdirs Changelog -====================== - -platformdirs 2.0.0 ------------------- - -- **BREAKING** Name change as part of the friendly fork -- **BREAKING** Remove support for end-of-life Pythons 2.6, 3.2, and 3.3 -- **BREAKING** Correct the config directory on OSX/macOS -- Add Python 3.7, 3.8, and 3.9 support +appdirs Changelog +================= appdirs 1.4.4 ------------- -- [PR #92] Don't import appdirs from setup.py which resolves issue #91 +- [PR #92] Don't import appdirs from setup.py Project officially classified as Stable which is important for inclusion in other distros such as ActivePython. +First of several incremental releases to catch up on maintenance. + appdirs 1.4.3 ------------- - [PR #76] Python 3.6 invalid escape sequence deprecation fixes @@ -256,7 +226,7 @@ appdirs 1.1.0 - [issue 4] Add ``AppDirs.user_log_dir``. - [Unix, issue 2, issue 7] appdirs now conforms to `XDG base directory spec - `_. + `_. - [Mac, issue 5] Fix ``site_data_dir()`` on Mac. - [Mac] Drop use of 'Carbon' module in favour of hardcoded paths; supports Python3 now. @@ -283,11 +253,12 @@ appdirs 1.0.1 (never released) ------------------------------ Started this changelog 27 July 2010. Before that this module originated in the -`Komodo `_ product as ``applib.py`` and then +`Komodo `_ product as ``applib.py`` and then as `applib/location.py -`_ (used by -`PyPM `_ in `ActivePython -`_). This is basically a fork of +`_ (used by +`PyPM `_ in `ActivePython +`_). This is basically a fork of applib.py 1.0.1 and applib/location.py 1.0.1. + diff --git a/third_party/python/virtualenv/__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs-1.4.4.dist-info/RECORD b/third_party/python/virtualenv/__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs-1.4.4.dist-info/RECORD new file mode 100644 index 000000000000..9cbb30620ebf --- /dev/null +++ b/third_party/python/virtualenv/__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs-1.4.4.dist-info/RECORD @@ -0,0 +1,6 @@ +appdirs.py,sha256=g99s2sXhnvTEm79oj4bWI0Toapc-_SmKKNXvOXHkVic,24720 +appdirs-1.4.4.dist-info/LICENSE.txt,sha256=Nt200KdFqTqyAyA9cZCBSxuJcn0lTK_0jHp6-71HAAs,1097 +appdirs-1.4.4.dist-info/METADATA,sha256=k5TVfXMNKGHTfp2wm6EJKTuGwGNuoQR5TqQgH8iwG8M,8981 +appdirs-1.4.4.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110 +appdirs-1.4.4.dist-info/top_level.txt,sha256=nKncE8CUqZERJ6VuQWL4_bkunSPDNfn7KZqb4Tr5YEM,8 +appdirs-1.4.4.dist-info/RECORD,, diff --git a/third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.6-py2.py3-none-any/pathlib2-2.3.6.dist-info/WHEEL b/third_party/python/virtualenv/__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs-1.4.4.dist-info/WHEEL similarity index 100% rename from third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.6-py2.py3-none-any/pathlib2-2.3.6.dist-info/WHEEL rename to third_party/python/virtualenv/__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs-1.4.4.dist-info/WHEEL diff --git a/third_party/python/virtualenv/__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs-1.4.4.dist-info/top_level.txt b/third_party/python/virtualenv/__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs-1.4.4.dist-info/top_level.txt new file mode 100644 index 000000000000..d64bc321a11c --- /dev/null +++ b/third_party/python/virtualenv/__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs-1.4.4.dist-info/top_level.txt @@ -0,0 +1 @@ +appdirs diff --git a/third_party/python/virtualenv/__virtualenv__/platformdirs-2.0.2-py2.py3-none-any/platformdirs.py b/third_party/python/virtualenv/__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs.py similarity index 61% rename from third_party/python/virtualenv/__virtualenv__/platformdirs-2.0.2-py2.py3-none-any/platformdirs.py rename to third_party/python/virtualenv/__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs.py index 23c6af8c7127..2acd1debeb1d 100644 --- a/third_party/python/virtualenv/__virtualenv__/platformdirs-2.0.2-py2.py3-none-any/platformdirs.py +++ b/third_party/python/virtualenv/__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs.py @@ -1,27 +1,28 @@ +#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2005-2010 ActiveState Software Inc. # Copyright (c) 2013 Eddy Petrișor """Utilities for determining application-specific dirs. -See for details and usage. +See for details and usage. """ # Dev Notes: # - MSDN on where to store app data files: # http://support.microsoft.com/default.aspx?scid=kb;en-us;310294#XSLTH3194121123120121120120 # - Mac OS X: http://developer.apple.com/documentation/MacOSX/Conceptual/BPFileSystem/index.html -# - XDG spec for Un*x: https://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html +# - XDG spec for Un*x: http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html -__version__ = "2.0.2" -__version_info__ = 2, 0, 2 +__version__ = "1.4.4" +__version_info__ = tuple(int(segment) for segment in __version__.split(".")) import sys import os -PY2 = sys.version_info[0] == 2 +PY3 = sys.version_info[0] == 3 -if not PY2: +if PY3: unicode = str if sys.platform.startswith('java'): @@ -40,346 +41,6 @@ else: system = sys.platform -# https://docs.python.org/dev/library/sys.html#sys.platform -if system == 'win32': - try: - from ctypes import windll - except ImportError: - try: - import com.sun.jna - except ImportError: - try: - if PY2: - import _winreg as winreg - else: - import winreg - except ImportError: - def _get_win_folder(csidl_name): - """Get folder from environment variables.""" - if csidl_name == 'CSIDL_APPDATA': - env_var_name = 'APPDATA' - elif csidl_name == 'CSIDL_COMMON_APPDATA': - env_var_name = 'ALLUSERSPROFILE' - elif csidl_name == 'CSIDL_LOCAL_APPDATA': - env_var_name = 'LOCALAPPDATA' - else: - raise ValueError('Unknown CSIDL name: {}'.format(csidl_name)) - - if env_var_name in os.environ: - return os.environ[env_var_name] - else: - raise ValueError('Unset environment variable: {}'.format(env_var_name)) - else: - def _get_win_folder(csidl_name): - """Get folder from the registry. - - This is a fallback technique at best. I'm not sure if using the - registry for this guarantees us the correct answer for all CSIDL_* - names. - """ - if csidl_name == 'CSIDL_APPDATA': - shell_folder_name = 'AppData' - elif csidl_name == 'CSIDL_COMMON_APPDATA': - shell_folder_name = 'Common AppData' - elif csidl_name == 'CSIDL_LOCAL_APPDATA': - shell_folder_name = 'Local AppData' - else: - raise ValueError('Unknown CSIDL name: {}'.format(csidl_name)) - - key = winreg.OpenKey( - winreg.HKEY_CURRENT_USER, - r'Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders' - ) - directory, _ = winreg.QueryValueEx(key, shell_folder_name) - return directory - else: - def _get_win_folder_with_jna(csidl_name): - """Get folder with JNA.""" - import array - from com.sun import jna - from com.sun.jna.platform import win32 - - buf_size = win32.WinDef.MAX_PATH * 2 - buf = array.zeros('c', buf_size) - shell = win32.Shell32.INSTANCE - shell.SHGetFolderPath( - None, getattr(win32.ShlObj, csidl_name), None, win32.ShlObj.SHGFP_TYPE_CURRENT, buf - ) - directory = jna.Native.toString(buf.tostring()).rstrip('\0') - - # Downgrade to short path name if have highbit chars. See - # . - has_high_char = False - for c in directory: - if ord(c) > 255: - has_high_char = True - break - if has_high_char: - buf = array.zeros('c', buf_size) - kernel = win32.Kernel32.INSTANCE - if kernel.GetShortPathName(directory, buf, buf_size): - directory = jna.Native.toString(buf.tostring()).rstrip('\0') - - return directory - else: - def _get_win_folder(csidl_name): - """Get folder with ctypes.""" - import ctypes - - if csidl_name == 'CSIDL_APPDATA': - csidl_const = 26 - elif csidl_name == 'CSIDL_COMMON_APPDATA': - csidl_const = 35 - elif csidl_name == 'CSIDL_LOCAL_APPDATA': - csidl_const = 28 - else: - raise ValueError('Unknown CSIDL name: {}'.format(csidl_name)) - - buf = ctypes.create_unicode_buffer(1024) - ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf) - - # Downgrade to short path name if have highbit chars. See - # . - has_high_char = False - for c in buf: - if ord(c) > 255: - has_high_char = True - break - if has_high_char: - buf2 = ctypes.create_unicode_buffer(1024) - if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024): - buf = buf2 - - return buf.value - - def _user_data_dir_impl(appname=None, appauthor=None, version=None, roaming=False): - if appauthor is None: - appauthor = appname - - const = 'CSIDL_APPDATA' if roaming else 'CSIDL_LOCAL_APPDATA' - path = os.path.normpath(_get_win_folder(const)) - if appname: - if appauthor is not False: - path = os.path.join(path, appauthor, appname) - else: - path = os.path.join(path, appname) - - if version: - path = os.path.join(path, version) - - return path - - def _site_data_dir_impl(appname=None, appauthor=None, version=None, multipath=False): - if appauthor is None: - appauthor = appname - - path = os.path.normpath(_get_win_folder('CSIDL_COMMON_APPDATA')) - if appname: - if appauthor is not False: - path = os.path.join(path, appauthor, appname) - else: - path = os.path.join(path, appname) - - if version: - path = os.path.join(path, version) - - return path - - def _user_config_dir_impl(appname=None, appauthor=None, version=None, roaming=False): - return _user_data_dir_impl(appname=appname, appauthor=appauthor, version=version, roaming=roaming) - - def _site_config_dir_impl(appname=None, appauthor=None, version=None, multipath=False): - return _site_data_dir_impl(appname=appname, appauthor=appauthor, version=version) - - def _user_cache_dir_impl(appname=None, appauthor=None, version=None, opinion=True): - if appauthor is None: - appauthor = appname - - path = os.path.normpath(_get_win_folder('CSIDL_LOCAL_APPDATA')) - if appname: - if appauthor is not False: - path = os.path.join(path, appauthor, appname) - else: - path = os.path.join(path, appname) - - if opinion: - path = os.path.join(path, 'Cache') - - if version: - path = os.path.join(path, version) - - return path - - def _user_state_dir_impl(appname=None, appauthor=None, version=None, roaming=False): - return _user_data_dir_impl(appname=appname, appauthor=appauthor, version=version, roaming=roaming) - - def _user_log_dir_impl(appname=None, appauthor=None, version=None, opinion=True): - path = _user_data_dir_impl(appname=appname, appauthor=appauthor, version=version) - if opinion: - path = os.path.join(path, 'Logs') - - return path - -elif system == 'darwin': - - def _user_data_dir_impl(appname=None, appauthor=None, version=None, roaming=False): - path = os.path.expanduser('~/Library/Application Support/') - if appname: - path = os.path.join(path, appname) - if version: - path = os.path.join(path, version) - - return path - - def _site_data_dir_impl(appname=None, appauthor=None, version=None, multipath=False): - path = '/Library/Application Support' - if appname: - path = os.path.join(path, appname) - if version: - path = os.path.join(path, version) - - return path - - def _user_config_dir_impl(appname=None, appauthor=None, version=None, roaming=False): - path = os.path.expanduser('~/Library/Preferences/') - if appname: - path = os.path.join(path, appname) - if version: - path = os.path.join(path, version) - - return path - - def _site_config_dir_impl(appname=None, appauthor=None, version=None, multipath=False): - path = '/Library/Preferences' - if appname: - path = os.path.join(path, appname) - - return path - - def _user_cache_dir_impl(appname=None, appauthor=None, version=None, opinion=True): - path = os.path.expanduser('~/Library/Caches') - if appname: - path = os.path.join(path, appname) - if version: - path = os.path.join(path, version) - - return path - - def _user_state_dir_impl(appname=None, appauthor=None, version=None, roaming=False): - return _user_data_dir_impl(appname=appname, appauthor=appauthor, version=version, roaming=roaming) - - def _user_log_dir_impl(appname=None, appauthor=None, version=None, opinion=True): - path = os.path.expanduser('~/Library/Logs') - if appname: - path = os.path.join(path, appname) - if version: - path = os.path.join(path, version) - - return path - -else: - - def _user_data_dir_impl(appname=None, appauthor=None, version=None, roaming=False): - if 'XDG_DATA_HOME' in os.environ: - path = os.environ['XDG_DATA_HOME'] - else: - path = os.path.expanduser('~/.local/share') - - if appname: - path = os.path.join(path, appname) - if version: - path = os.path.join(path, version) - - return path - - def _site_data_dir_impl(appname=None, appauthor=None, version=None, multipath=False): - # XDG default for $XDG_DATA_DIRS - # only first, if multipath is False - if 'XDG_DATA_DIRS' in os.environ: - path = os.environ['XDG_DATA_DIRS'] - else: - path = '/usr/local/share{}/usr/share'.format(os.pathsep) - - pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)] - if appname: - if version: - appname = os.path.join(appname, version) - pathlist = [os.path.join(x, appname) for x in pathlist] - - if multipath: - path = os.pathsep.join(pathlist) - else: - path = pathlist[0] - - return path - - def _user_config_dir_impl(appname=None, appauthor=None, version=None, roaming=False): - if 'XDG_CONFIG_HOME' in os.environ: - path = os.environ['XDG_CONFIG_HOME'] - else: - path = os.path.expanduser('~/.config') - - if appname: - path = os.path.join(path, appname) - if version: - path = os.path.join(path, version) - - return path - - def _site_config_dir_impl(appname=None, appauthor=None, version=None, multipath=False): - # XDG default for $XDG_CONFIG_DIRS - # only first, if multipath is False - if 'XDG_CONFIG_DIRS' in os.environ: - path = os.environ['XDG_CONFIG_DIRS'] - else: - path = '/etc/xdg' - - pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)] - if appname: - if version: - appname = os.path.join(appname, version) - pathlist = [os.path.join(x, appname) for x in pathlist] - - if multipath: - path = os.pathsep.join(pathlist) - else: - path = pathlist[0] - - return path - - def _user_cache_dir_impl(appname=None, appauthor=None, version=None, opinion=True): - if 'XDG_CACHE_HOME' in os.environ: - path = os.environ['XDG_CACHE_HOME'] - else: - path = os.path.expanduser('~/.cache') - - if appname: - path = os.path.join(path, appname) - if version: - path = os.path.join(path, version) - - return path - - def _user_state_dir_impl(appname=None, appauthor=None, version=None, roaming=False): - if 'XDG_STATE_HOME' in os.environ: - path = os.environ['XDG_STATE_HOME'] - else: - path = os.path.expanduser('~/.local/state') - - if appname: - path = os.path.join(path, appname) - if version: - path = os.path.join(path, version) - - return path - - def _user_log_dir_impl(appname=None, appauthor=None, version=None, opinion=True): - path = _user_cache_dir_impl(appname=appname, appauthor=appauthor, version=version) - if opinion: - path = os.path.join(path, 'log') - - return path - def user_data_dir(appname=None, appauthor=None, version=None, roaming=False): r"""Return full path to the user-specific data dir for this application. @@ -413,7 +74,27 @@ def user_data_dir(appname=None, appauthor=None, version=None, roaming=False): For Unix, we follow the XDG spec and support $XDG_DATA_HOME. That means, by default "~/.local/share/". """ - return _user_data_dir_impl(appname=appname, appauthor=appauthor, version=version, roaming=roaming) + if system == "win32": + if appauthor is None: + appauthor = appname + const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA" + path = os.path.normpath(_get_win_folder(const)) + if appname: + if appauthor is not False: + path = os.path.join(path, appauthor, appname) + else: + path = os.path.join(path, appname) + elif system == 'darwin': + path = os.path.expanduser('~/Library/Application Support/') + if appname: + path = os.path.join(path, appname) + else: + path = os.getenv('XDG_DATA_HOME', os.path.expanduser("~/.local/share")) + if appname: + path = os.path.join(path, appname) + if appname and version: + path = os.path.join(path, version) + return path def site_data_dir(appname=None, appauthor=None, version=None, multipath=False): @@ -447,7 +128,39 @@ def site_data_dir(appname=None, appauthor=None, version=None, multipath=False): WARNING: Do not use this on Windows. See the Vista-Fail note above for why. """ - return _site_data_dir_impl(appname=appname, appauthor=appauthor, version=version, multipath=multipath) + if system == "win32": + if appauthor is None: + appauthor = appname + path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA")) + if appname: + if appauthor is not False: + path = os.path.join(path, appauthor, appname) + else: + path = os.path.join(path, appname) + elif system == 'darwin': + path = os.path.expanduser('/Library/Application Support') + if appname: + path = os.path.join(path, appname) + else: + # XDG default for $XDG_DATA_DIRS + # only first, if multipath is False + path = os.getenv('XDG_DATA_DIRS', + os.pathsep.join(['/usr/local/share', '/usr/share'])) + pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)] + if appname: + if version: + appname = os.path.join(appname, version) + pathlist = [os.sep.join([x, appname]) for x in pathlist] + + if multipath: + path = os.pathsep.join(pathlist) + else: + path = pathlist[0] + return path + + if appname and version: + path = os.path.join(path, version) + return path def user_config_dir(appname=None, appauthor=None, version=None, roaming=False): @@ -472,14 +185,22 @@ def user_config_dir(appname=None, appauthor=None, version=None, roaming=False): for a discussion of issues. Typical user config directories are: - Mac OS X: ~/Library/Preferences/ + Mac OS X: same as user_data_dir Unix: ~/.config/ # or in $XDG_CONFIG_HOME, if defined Win *: same as user_data_dir For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME. That means, by default "~/.config/". """ - return _user_config_dir_impl(appname=appname, appauthor=appauthor, version=version, roaming=roaming) + if system in ["win32", "darwin"]: + path = user_data_dir(appname, appauthor, None, roaming) + else: + path = os.getenv('XDG_CONFIG_HOME', os.path.expanduser("~/.config")) + if appname: + path = os.path.join(path, appname) + if appname and version: + path = os.path.join(path, version) + return path def site_config_dir(appname=None, appauthor=None, version=None, multipath=False): @@ -512,7 +233,25 @@ def site_config_dir(appname=None, appauthor=None, version=None, multipath=False) WARNING: Do not use this on Windows. See the Vista-Fail note above for why. """ - return _site_config_dir_impl(appname=appname, appauthor=appauthor, version=version, multipath=multipath) + if system in ["win32", "darwin"]: + path = site_data_dir(appname, appauthor) + if appname and version: + path = os.path.join(path, version) + else: + # XDG default for $XDG_CONFIG_DIRS + # only first, if multipath is False + path = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg') + pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)] + if appname: + if version: + appname = os.path.join(appname, version) + pathlist = [os.sep.join([x, appname]) for x in pathlist] + + if multipath: + path = os.pathsep.join(pathlist) + else: + path = pathlist[0] + return path def user_cache_dir(appname=None, appauthor=None, version=None, opinion=True): @@ -548,7 +287,28 @@ def user_cache_dir(appname=None, appauthor=None, version=None, opinion=True): OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value. This can be disabled with the `opinion=False` option. """ - return _user_cache_dir_impl(appname=appname, appauthor=appauthor, version=version, opinion=opinion) + if system == "win32": + if appauthor is None: + appauthor = appname + path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA")) + if appname: + if appauthor is not False: + path = os.path.join(path, appauthor, appname) + else: + path = os.path.join(path, appname) + if opinion: + path = os.path.join(path, "Cache") + elif system == 'darwin': + path = os.path.expanduser('~/Library/Caches') + if appname: + path = os.path.join(path, appname) + else: + path = os.getenv('XDG_CACHE_HOME', os.path.expanduser('~/.cache')) + if appname: + path = os.path.join(path, appname) + if appname and version: + path = os.path.join(path, version) + return path def user_state_dir(appname=None, appauthor=None, version=None, roaming=False): @@ -582,7 +342,15 @@ def user_state_dir(appname=None, appauthor=None, version=None, roaming=False): That means, by default "~/.local/state/". """ - return _user_state_dir_impl(appname=appname, appauthor=appauthor, version=version, roaming=roaming) + if system in ["win32", "darwin"]: + path = user_data_dir(appname, appauthor, None, roaming) + else: + path = os.getenv('XDG_STATE_HOME', os.path.expanduser("~/.local/state")) + if appname: + path = os.path.join(path, appname) + if appname and version: + path = os.path.join(path, version) + return path def user_log_dir(appname=None, appauthor=None, version=None, opinion=True): @@ -617,10 +385,26 @@ def user_log_dir(appname=None, appauthor=None, version=None, opinion=True): value for Windows and appends "log" to the user cache dir for Unix. This can be disabled with the `opinion=False` option. """ - return _user_log_dir_impl(appname=appname, appauthor=appauthor, version=version, opinion=opinion) + if system == "darwin": + path = os.path.join( + os.path.expanduser('~/Library/Logs'), + appname) + elif system == "win32": + path = user_data_dir(appname, appauthor, version) + version = False + if opinion: + path = os.path.join(path, "Logs") + else: + path = user_cache_dir(appname, appauthor, version) + version = False + if opinion: + path = os.path.join(path, "log") + if appname and version: + path = os.path.join(path, version) + return path -class PlatformDirs(object): +class AppDirs(object): """Convenience wrapper for getting application dirs.""" def __init__(self, appname=None, appauthor=None, version=None, roaming=False, multipath=False): @@ -666,12 +450,130 @@ class PlatformDirs(object): version=self.version) -# Backwards compatibility with appdirs -AppDirs = PlatformDirs +#---- internal support stuff +def _get_win_folder_from_registry(csidl_name): + """This is a fallback technique at best. I'm not sure if using the + registry for this guarantees us the correct answer for all CSIDL_* + names. + """ + if PY3: + import winreg as _winreg + else: + import _winreg + + shell_folder_name = { + "CSIDL_APPDATA": "AppData", + "CSIDL_COMMON_APPDATA": "Common AppData", + "CSIDL_LOCAL_APPDATA": "Local AppData", + }[csidl_name] + + key = _winreg.OpenKey( + _winreg.HKEY_CURRENT_USER, + r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders" + ) + dir, type = _winreg.QueryValueEx(key, shell_folder_name) + return dir + + +def _get_win_folder_with_pywin32(csidl_name): + from win32com.shell import shellcon, shell + dir = shell.SHGetFolderPath(0, getattr(shellcon, csidl_name), 0, 0) + # Try to make this a unicode path because SHGetFolderPath does + # not return unicode strings when there is unicode data in the + # path. + try: + dir = unicode(dir) + + # Downgrade to short path name if have highbit chars. See + # . + has_high_char = False + for c in dir: + if ord(c) > 255: + has_high_char = True + break + if has_high_char: + try: + import win32api + dir = win32api.GetShortPathName(dir) + except ImportError: + pass + except UnicodeError: + pass + return dir + + +def _get_win_folder_with_ctypes(csidl_name): + import ctypes + + csidl_const = { + "CSIDL_APPDATA": 26, + "CSIDL_COMMON_APPDATA": 35, + "CSIDL_LOCAL_APPDATA": 28, + }[csidl_name] + + buf = ctypes.create_unicode_buffer(1024) + ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf) + + # Downgrade to short path name if have highbit chars. See + # . + has_high_char = False + for c in buf: + if ord(c) > 255: + has_high_char = True + break + if has_high_char: + buf2 = ctypes.create_unicode_buffer(1024) + if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024): + buf = buf2 + + return buf.value + +def _get_win_folder_with_jna(csidl_name): + import array + from com.sun import jna + from com.sun.jna.platform import win32 + + buf_size = win32.WinDef.MAX_PATH * 2 + buf = array.zeros('c', buf_size) + shell = win32.Shell32.INSTANCE + shell.SHGetFolderPath(None, getattr(win32.ShlObj, csidl_name), None, win32.ShlObj.SHGFP_TYPE_CURRENT, buf) + dir = jna.Native.toString(buf.tostring()).rstrip("\0") + + # Downgrade to short path name if have highbit chars. See + # . + has_high_char = False + for c in dir: + if ord(c) > 255: + has_high_char = True + break + if has_high_char: + buf = array.zeros('c', buf_size) + kernel = win32.Kernel32.INSTANCE + if kernel.GetShortPathName(dir, buf, buf_size): + dir = jna.Native.toString(buf.tostring()).rstrip("\0") + + return dir + +if system == "win32": + try: + import win32com.shell + _get_win_folder = _get_win_folder_with_pywin32 + except ImportError: + try: + from ctypes import windll + _get_win_folder = _get_win_folder_with_ctypes + except ImportError: + try: + import com.sun.jna + _get_win_folder = _get_win_folder_with_jna + except ImportError: + _get_win_folder = _get_win_folder_from_registry + + +#---- self test code if __name__ == "__main__": - # ---- self test code appname = "MyApp" appauthor = "MyCompany" @@ -686,21 +588,21 @@ if __name__ == "__main__": print("-- app dirs %s --" % __version__) print("-- app dirs (with optional 'version')") - dirs = PlatformDirs(appname, appauthor, version="1.0") + dirs = AppDirs(appname, appauthor, version="1.0") for prop in props: print("%s: %s" % (prop, getattr(dirs, prop))) print("\n-- app dirs (without optional 'version')") - dirs = PlatformDirs(appname, appauthor) + dirs = AppDirs(appname, appauthor) for prop in props: print("%s: %s" % (prop, getattr(dirs, prop))) print("\n-- app dirs (without optional 'appauthor')") - dirs = PlatformDirs(appname) + dirs = AppDirs(appname) for prop in props: print("%s: %s" % (prop, getattr(dirs, prop))) print("\n-- app dirs (with disabled 'appauthor')") - dirs = PlatformDirs(appname, appauthor=False) + dirs = AppDirs(appname, appauthor=False) for prop in props: print("%s: %s" % (prop, getattr(dirs, prop))) diff --git a/third_party/python/virtualenv/__virtualenv__/backports.entry_points_selectable-1.1.0-py2.py3-none-any/backports.entry_points_selectable-1.1.0.dist-info/METADATA b/third_party/python/virtualenv/__virtualenv__/backports.entry_points_selectable-1.1.0-py2.py3-none-any/backports.entry_points_selectable-1.1.0.dist-info/METADATA deleted file mode 100644 index 6402f8182108..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/backports.entry_points_selectable-1.1.0-py2.py3-none-any/backports.entry_points_selectable-1.1.0.dist-info/METADATA +++ /dev/null @@ -1,67 +0,0 @@ -Metadata-Version: 2.1 -Name: backports.entry-points-selectable -Version: 1.1.0 -Summary: Compatibility shim providing selectable entry points for older implementations -Home-page: https://github.com/jaraco/backports.entry_points_selectable -Author: Jason R. Coombs -Author-email: jaraco@jaraco.com -License: UNKNOWN -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 2 -Requires-Python: >=2.7 -License-File: LICENSE -Requires-Dist: importlib-metadata ; python_version < "3.8" -Provides-Extra: docs -Requires-Dist: sphinx ; extra == 'docs' -Requires-Dist: jaraco.packaging (>=8.2) ; extra == 'docs' -Requires-Dist: rst.linker (>=1.9) ; extra == 'docs' -Provides-Extra: testing -Requires-Dist: pytest (>=4.6) ; extra == 'testing' -Requires-Dist: pytest-flake8 ; extra == 'testing' -Requires-Dist: pytest-cov ; extra == 'testing' -Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy" and python_version < "3.10") and extra == 'testing' -Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy" and python_version < "3.10" and python_version >= "3") and extra == 'testing' -Requires-Dist: pytest-checkdocs (>=2.4) ; (python_version >= "3") and extra == 'testing' -Requires-Dist: pytest-enabler (>=1.0.1) ; (python_version >= "3") and extra == 'testing' - -.. image:: https://img.shields.io/pypi/v/backports.entry_points_selectable.svg - :target: `PyPI link`_ - -.. image:: https://img.shields.io/pypi/pyversions/backports.entry_points_selectable.svg - :target: `PyPI link`_ - -.. _PyPI link: https://pypi.org/project/backports.entry_points_selectable - -.. image:: https://github.com/jaraco/backports.entry_points_selectable/workflows/tests/badge.svg - :target: https://github.com/jaraco/backports.entry_points_selectable/actions?query=workflow%3A%22tests%22 - :alt: tests - -.. image:: https://img.shields.io/badge/code%20style-black-000000.svg - :target: https://github.com/psf/black - :alt: Code style: Black - -.. .. image:: https://readthedocs.org/projects/skeleton/badge/?version=latest -.. :target: https://skeleton.readthedocs.io/en/latest/?badge=latest - -.. image:: https://img.shields.io/badge/skeleton-2021-informational - :target: https://blog.jaraco.com/skeleton - -Compatibility shim to ease adoption of `importlib_metadata 3.6 `_. Supplies forward-compatibility of "selectable" entry points even on older versions of ``importlib_metadata`` and ``importlib.metadata``, and avoids usage that triggers `deprecation warnings `_. - -Use this shim for libraries or applications invoking ``entry_points()`` that run on Python older than 3.10 or where importlib_metadata is older than 3.6. In most cases, this shim is unnecessary and the easiest thing to do is simply require ``importlib_metadata >= 3.6`` on all Pythons (or only those prior to 3.10a7). In some environments, a library may be constrained on which versions of ``importlib_metadata`` can be required, so this library bridges that gap. - -To use this shim, add ``backports.entry_points_selectable`` to your project requirements. It will require ``importlib_metadata`` automatically where needed (prior to Python 3.8) but be satisfied by older versions. Projects should still require ``importlib_metadata`` as appropriate for API uses other than for ``entry_points``. - -Then in code, instead of ``from importlib.metadata import entry_points``, use:: - - from backports.entry_points_selectable import entry_points - -And then use the "selectable" features (pass keyword arguments to ``entry_points`` or invoke ``.select()`` on the result). - -This backport has a very lenient dependency on `importlib_metadata` for older Pythons and is a single module implementation. If adding a dependency is a concern, this module may be vendored into the downstream project. - - diff --git a/third_party/python/virtualenv/__virtualenv__/backports.entry_points_selectable-1.1.0-py2.py3-none-any/backports.entry_points_selectable-1.1.0.dist-info/RECORD b/third_party/python/virtualenv/__virtualenv__/backports.entry_points_selectable-1.1.0-py2.py3-none-any/backports.entry_points_selectable-1.1.0.dist-info/RECORD deleted file mode 100644 index ea354ea65811..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/backports.entry_points_selectable-1.1.0-py2.py3-none-any/backports.entry_points_selectable-1.1.0.dist-info/RECORD +++ /dev/null @@ -1,7 +0,0 @@ -backports/__init__.py,sha256=iOEMwnlORWezdO8-2vxBIPSR37D7JGjluZ8f55vzxls,81 -backports/entry_points_selectable.py,sha256=p6WRKhYFNocmWtTassxkXAbSwyqO01hJMq60y4rPyxE,7444 -backports.entry_points_selectable-1.1.0.dist-info/LICENSE,sha256=2z8CRrH5J48VhFuZ_sR4uLUG63ZIeZNyL4xuJUKF-vg,1050 -backports.entry_points_selectable-1.1.0.dist-info/METADATA,sha256=A4EQx2Qnr7lDwW92E5aD2w1pr4Fq_EpcxVjO5l-4KUg,4078 -backports.entry_points_selectable-1.1.0.dist-info/WHEEL,sha256=Z-nyYpwrcSqxfdux5Mbn_DQ525iP7J2DG3JgGvOYyTQ,110 -backports.entry_points_selectable-1.1.0.dist-info/top_level.txt,sha256=cGjaLMOoBR1FK0ApojtzWVmViTtJ7JGIK_HwXiEsvtU,10 -backports.entry_points_selectable-1.1.0.dist-info/RECORD,, diff --git a/third_party/python/virtualenv/__virtualenv__/backports.entry_points_selectable-1.1.0-py2.py3-none-any/backports.entry_points_selectable-1.1.0.dist-info/WHEEL b/third_party/python/virtualenv/__virtualenv__/backports.entry_points_selectable-1.1.0-py2.py3-none-any/backports.entry_points_selectable-1.1.0.dist-info/WHEEL deleted file mode 100644 index 01b8fc7d4a10..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/backports.entry_points_selectable-1.1.0-py2.py3-none-any/backports.entry_points_selectable-1.1.0.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.36.2) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/third_party/python/virtualenv/__virtualenv__/backports.entry_points_selectable-1.1.0-py2.py3-none-any/backports.entry_points_selectable-1.1.0.dist-info/top_level.txt b/third_party/python/virtualenv/__virtualenv__/backports.entry_points_selectable-1.1.0-py2.py3-none-any/backports.entry_points_selectable-1.1.0.dist-info/top_level.txt deleted file mode 100644 index 99d2be5b64d7..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/backports.entry_points_selectable-1.1.0-py2.py3-none-any/backports.entry_points_selectable-1.1.0.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -backports diff --git a/third_party/python/virtualenv/__virtualenv__/backports.entry_points_selectable-1.1.0-py2.py3-none-any/backports/__init__.py b/third_party/python/virtualenv/__virtualenv__/backports.entry_points_selectable-1.1.0-py2.py3-none-any/backports/__init__.py deleted file mode 100644 index 0d1f7edf5dc6..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/backports.entry_points_selectable-1.1.0-py2.py3-none-any/backports/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__path__ = __import__('pkgutil').extend_path(__path__, __name__) # type: ignore diff --git a/third_party/python/virtualenv/__virtualenv__/backports.entry_points_selectable-1.1.0-py2.py3-none-any/backports/entry_points_selectable.py b/third_party/python/virtualenv/__virtualenv__/backports.entry_points_selectable-1.1.0-py2.py3-none-any/backports/entry_points_selectable.py deleted file mode 100644 index f459f535fde5..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/backports.entry_points_selectable-1.1.0-py2.py3-none-any/backports/entry_points_selectable.py +++ /dev/null @@ -1,275 +0,0 @@ -""" ->>> hasattr(entry_points(), 'select') -True ->>> tuple(entry_points(group='console_scripts')) -(...) - -Some usage is deprecated and may emit deprecation warnings -on later versions. - ->>> import warnings ->>> warnings.filterwarnings('ignore', category=DeprecationWarning) - ->>> entry_points()['console_scripts'][0] -EntryPoint...(...) -""" - -import collections -import textwrap -import itertools -import operator -import functools - -try: - from itertools import filterfalse # type: ignore -except ImportError: - from itertools import ifilterfalse as filterfalse # type: ignore - - -try: - # prefer importlib_metadata if it has EntryPoints - import importlib_metadata as metadata # type: ignore - - if not hasattr(metadata, 'EntryPoints'): - raise ImportError("package without EntryPoints") - from importlib_metadata import distributions, EntryPoint # type: ignore -except ImportError: - try: - import importlib.metadata as metadata # type: ignore - from importlib.metadata import distributions, EntryPoint # type: ignore - except ImportError: - from importlib_metadata import distributions, EntryPoint # type: ignore - - -__all__ = ['entry_points'] - - -def unique_everseen(iterable, key=None): - "List unique elements, preserving order. Remember all elements ever seen." - # unique_everseen('AAAABBBCCDAABBB') --> A B C D - # unique_everseen('ABBCcAD', str.lower) --> A B C D - seen = set() - seen_add = seen.add - if key is None: - for element in filterfalse(seen.__contains__, iterable): - seen_add(element) - yield element - else: - for element in iterable: - k = key(element) - if k not in seen: - seen_add(k) - yield element - - -class Pair(collections.namedtuple('Pair', 'name value')): - @classmethod - def parse(cls, text): - return cls(*map(str.strip, text.split("=", 1))) - - -class Sectioned: - """ - A simple entry point config parser for performance - - >>> for item in Sectioned.read(Sectioned._sample): - ... print(item) - Pair(name='sec1', value='# comments ignored') - Pair(name='sec1', value='a = 1') - Pair(name='sec1', value='b = 2') - Pair(name='sec2', value='a = 2') - - >>> res = Sectioned.section_pairs(Sectioned._sample) - >>> item = next(res) - >>> item.name - 'sec1' - >>> item.value - Pair(name='a', value='1') - >>> item = next(res) - >>> item.value - Pair(name='b', value='2') - >>> item = next(res) - >>> item.name - 'sec2' - >>> item.value - Pair(name='a', value='2') - >>> list(res) - [] - """ - - _sample = textwrap.dedent( - """ - [sec1] - # comments ignored - a = 1 - b = 2 - - [sec2] - a = 2 - """ - ).lstrip() - - @classmethod - def section_pairs(cls, text): - return ( - section._replace(value=Pair.parse(section.value)) - for section in cls.read(text, filter_=cls.valid) - if section.name is not None - ) - - @staticmethod - def read(text, filter_=None): - lines = filter(filter_, map(str.strip, text.splitlines())) - name = None - for value in lines: - section_match = value.startswith('[') and value.endswith(']') - if section_match: - name = value.strip('[]') - continue - yield Pair(name, value) - - @staticmethod - def valid(line): - return line and not line.startswith('#') - - -def compat_matches(ep, **params): - try: - return ep.matches(**params) - except AttributeError: - pass - attrs = (getattr(ep, param) for param in params) - return all(map(operator.eq, params.values(), attrs)) - - -class EntryPoints(list): - """ - An immutable collection of selectable EntryPoint objects. - """ - - __slots__ = () - - def __getitem__(self, name): # -> EntryPoint: - """ - Get the EntryPoint in self matching name. - """ - if isinstance(name, int): - return super(EntryPoints, self).__getitem__(name) - try: - return next(iter(self.select(name=name))) - except StopIteration: - raise KeyError(name) - - def select(self, **params): - """ - Select entry points from self that match the - given parameters (typically group and/or name). - """ - return EntryPoints(ep for ep in self if compat_matches(ep, **params)) - - @property - def names(self): - """ - Return the set of all names of all entry points. - """ - return set(ep.name for ep in self) - - @property - def groups(self): - """ - Return the set of all groups of all entry points. - - For coverage while SelectableGroups is present. - >>> EntryPoints().groups - set(...) - """ - return set(ep.group for ep in self) - - @classmethod - def _from_text_for(cls, text, dist): - return cls(ep._for(dist) for ep in cls._from_text(text)) - - @classmethod - def _from_text(cls, text): - return itertools.starmap(EntryPoint, cls._parse_groups(text or '')) - - @staticmethod - def _parse_groups(text): - return ( - (item.value.name, item.value.value, item.name) - for item in Sectioned.section_pairs(text) - ) - - -class SelectableGroups(dict): - """ - A backward- and forward-compatible result from - entry_points that fully implements the dict interface. - """ - - @classmethod - def load(cls, eps): - by_group = operator.attrgetter('group') - ordered = sorted(eps, key=by_group) - grouped = itertools.groupby(ordered, by_group) - return cls((group, EntryPoints(eps)) for group, eps in grouped) - - @property - def _all(self): - """ - Reconstruct a list of all entrypoints from the groups. - """ - return EntryPoints(itertools.chain.from_iterable(self.values())) - - @property - def groups(self): - return self._all.groups - - @property - def names(self): - """ - for coverage: - >>> SelectableGroups().names - set(...) - """ - return self._all.names - - def select(self, **params): - if not params: - return self - return self._all.select(**params) - - -def entry_points_compat(**params): - """Return EntryPoint objects for all installed packages. - - Pass selection parameters (group or name) to filter the - result to entry points matching those properties (see - EntryPoints.select()). - - For compatibility, returns ``SelectableGroups`` object unless - selection parameters are supplied. In the future, this function - will return ``EntryPoints`` instead of ``SelectableGroups`` - even when no selection parameters are supplied. - - For maximum future compatibility, pass selection parameters - or invoke ``.select`` with parameters on the result. - - :return: EntryPoints or SelectableGroups for all installed packages. - """ - - def dist_name(dist): - return dist.metadata['Name'] - - unique = functools.partial(unique_everseen, key=dist_name) - eps = itertools.chain.from_iterable( - dist.entry_points for dist in unique(distributions()) - ) - return SelectableGroups.load(eps).select(**params) - - -needs_backport = not hasattr(metadata, 'EntryPoints') or issubclass( - metadata.EntryPoints, tuple -) - -entry_points = entry_points_compat if needs_backport else metadata.entry_points diff --git a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib-0.3.2.dist-info/METADATA b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib-0.3.1.dist-info/METADATA similarity index 97% rename from third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib-0.3.2.dist-info/METADATA rename to third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib-0.3.1.dist-info/METADATA index ea46c411e9de..54f5f6497f32 100644 --- a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib-0.3.2.dist-info/METADATA +++ b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib-0.3.1.dist-info/METADATA @@ -1,6 +1,6 @@ Metadata-Version: 1.1 Name: distlib -Version: 0.3.2 +Version: 0.3.1 Summary: Distribution utilities Description: Low-level components of distutils2/packaging, augmented with higher-level APIs for making packaging easier. Home-page: https://bitbucket.org/pypa/distlib @@ -21,4 +21,4 @@ Classifier: Programming Language :: Python :: 3.6 Classifier: Programming Language :: Python :: 3.7 Classifier: Programming Language :: Python :: 3.8 Classifier: Topic :: Software Development :: Libraries :: Python Modules -Download-URL: https://bitbucket.org/pypa/distlib/downloads/distlib-0.3.2.zip +Download-URL: https://bitbucket.org/pypa/distlib/downloads/distlib-0.3.1.zip diff --git a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib-0.3.2.dist-info/RECORD b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib-0.3.1.dist-info/RECORD similarity index 51% rename from third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib-0.3.2.dist-info/RECORD rename to third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib-0.3.1.dist-info/RECORD index 2ba735d3d0c5..93b724c474aa 100644 --- a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib-0.3.2.dist-info/RECORD +++ b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib-0.3.1.dist-info/RECORD @@ -1,26 +1,26 @@ -distlib/__init__.py,sha256=bHNWOvZsLE4ES9S4FEA8CyP-rDYzatVgp9GHbpTnb2I,581 +distlib/__init__.py,sha256=3veAk2rPznOB2gsK6tjbbh0TQMmGE5P82eE9wXq6NIk,581 distlib/compat.py,sha256=ADA56xiAxar3mU6qemlBhNbsrFPosXRhO44RzsbJPqk,41408 distlib/database.py,sha256=Kl0YvPQKc4OcpVi7k5cFziydM1xOK8iqdxLGXgbZHV4,51059 -distlib/index.py,sha256=UfcimNW19AB7IKWam4VaJbXuCBvArKfSxhV16EwavzE,20739 -distlib/locators.py,sha256=AKlB3oZvfOTg4E0CtfwOzujFL19X5V4XUA4eHdKOu44,51965 +distlib/index.py,sha256=SXKzpQCERctxYDMp_OLee2f0J0e19ZhGdCIoMlUfUQM,21066 +distlib/locators.py,sha256=c9E4cDEacJ_uKbuE5BqAVocoWp6rsuBGTkiNDQq3zV4,52100 distlib/manifest.py,sha256=nQEhYmgoreaBZzyFzwYsXxJARu3fo4EkunU163U16iE,14811 -distlib/markers.py,sha256=OunMSH1SIbvLLt4z2VEERCll4WNlz2tDrg1mSXCNUj4,4344 -distlib/metadata.py,sha256=vatoxFdmBr6ie-sTVXVNPOPG3uwMDWJTnEECnm7xDCw,39109 -distlib/resources.py,sha256=LwbPksc0A1JMbi6XnuPdMBUn83X7BPuFNWqPGEKI698,10820 -distlib/scripts.py,sha256=YD5_kioPD-qybYwQ4Gxyu-FR4ffxczy2gdBuU4II9qA,17248 +distlib/markers.py,sha256=6Ac3cCfFBERexiESWIOXmg-apIP8l2esafNSX3KMy-8,4387 +distlib/metadata.py,sha256=z2KPy3h3tcDnb9Xs7nAqQ5Oz0bqjWAUFmKWcFKRoodg,38962 +distlib/resources.py,sha256=2FGv0ZHF14KXjLIlL0R991lyQQGcewOS4mJ-5n-JVnc,10766 +distlib/scripts.py,sha256=_MAj3sMuv56kuM8FsiIWXqbT0gmumPGaOR_atOzn4a4,17180 distlib/t32.exe,sha256=NS3xBCVAld35JVFNmb-1QRyVtThukMrwZVeXn4LhaEQ,96768 distlib/t64.exe,sha256=oAqHes78rUWVM0OtVqIhUvequl_PKhAhXYQWnUf7zR0,105984 -distlib/util.py,sha256=eIKKJ5Mp4unHMOVzixRIRxGq4ty5-h_PoFmZ_lpvkkM,67558 -distlib/version.py,sha256=_geOv-cHoV-G8dQzKI8g6z8F0XeFeUqdJ_1G1K6iyrQ,23508 +distlib/util.py,sha256=f2jZCPrcLCt6LcnC0gUy-Fur60tXD8reA7k4rDpHMDw,59845 +distlib/version.py,sha256=_n7F6juvQGAcn769E_SHa7fOcf5ERlEVymJ_EjPRwGw,23391 distlib/w32.exe,sha256=lJtnZdeUxTZWya_EW5DZos_K5rswRECGspIl8ZJCIXs,90112 distlib/w64.exe,sha256=0aRzoN2BO9NWW4ENy4_4vHkHR4qZTFZNVSAJJYlODTI,99840 -distlib/wheel.py,sha256=W6aQQo2Si0CzWiCaqlS-Nu8CoHnDbmcGMqRxCHJmg_Q,43062 +distlib/wheel.py,sha256=v6DnwTqhNHwrEVFr8_YeiTW6G4ftP_evsywNgrmdb2o,41144 distlib/_backport/__init__.py,sha256=bqS_dTOH6uW9iGgd0uzfpPjo6vZ4xpPZ7kyfZJ2vNaw,274 distlib/_backport/misc.py,sha256=KWecINdbFNOxSOP1fGF680CJnaC6S4fBRgEtaYTw0ig,971 distlib/_backport/shutil.py,sha256=IX_G2NPqwecJibkIDje04bqu0xpHkfSQ2GaGdEVqM5Y,25707 distlib/_backport/sysconfig.cfg,sha256=swZKxq9RY5e9r3PXCrlvQPMsvOdiWZBTHLEbqS8LJLU,2617 distlib/_backport/sysconfig.py,sha256=BQHFlb6pubCl_dvT1NjtzIthylofjKisox239stDg0U,26854 distlib/_backport/tarfile.py,sha256=Ihp7rXRcjbIKw8COm9wSePV9ARGXbSF9gGXAMn2Q-KU,92628 -distlib-0.3.2.dist-info/METADATA,sha256=ZiUBpryb-__fVLO0q26MutK6IyLBrKMXBXTVye-NLrU,1128 -distlib-0.3.2.dist-info/WHEEL,sha256=drfrJ_lmbrASQtvw0M9reqwR5A6v9iRVE7Z0eesvh7Y,106 -distlib-0.3.2.dist-info/RECORD,, +distlib-0.3.1.dist-info/METADATA,sha256=i6wrPilVkro9BXvaHkwVsaemMZCx5xbWc8jS9oR_ZJw,1128 +distlib-0.3.1.dist-info/WHEEL,sha256=R4LNelR33E9ZPEGiPwrdPrrHnwkFEjiMPbVCAWVjsxI,106 +distlib-0.3.1.dist-info/RECORD,, diff --git a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib-0.3.2.dist-info/WHEEL b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib-0.3.1.dist-info/WHEEL similarity index 55% rename from third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib-0.3.2.dist-info/WHEEL rename to third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib-0.3.1.dist-info/WHEEL index c5bac42be2b7..78f54a1910cf 100644 --- a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib-0.3.2.dist-info/WHEEL +++ b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib-0.3.1.dist-info/WHEEL @@ -1,5 +1,5 @@ Wheel-Version: 1.0 -Generator: distlib 0.3.2.dev0 +Generator: distlib 0.3.1.dev0 Root-Is-Purelib: true Tag: py2-none-any Tag: py3-none-any \ No newline at end of file diff --git a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/__init__.py b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/__init__.py similarity index 96% rename from third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/__init__.py rename to third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/__init__.py index 492c2c70584d..63d916e345b6 100644 --- a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/__init__.py +++ b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/__init__.py @@ -6,7 +6,7 @@ # import logging -__version__ = '0.3.2' +__version__ = '0.3.1' class DistlibException(Exception): pass diff --git a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/_backport/__init__.py b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/__init__.py similarity index 100% rename from third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/_backport/__init__.py rename to third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/__init__.py diff --git a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/_backport/misc.py b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/misc.py similarity index 100% rename from third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/_backport/misc.py rename to third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/misc.py diff --git a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/_backport/shutil.py b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/shutil.py similarity index 100% rename from third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/_backport/shutil.py rename to third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/shutil.py diff --git a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/_backport/sysconfig.cfg b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/sysconfig.cfg similarity index 100% rename from third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/_backport/sysconfig.cfg rename to third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/sysconfig.cfg diff --git a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/_backport/sysconfig.py b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/sysconfig.py similarity index 100% rename from third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/_backport/sysconfig.py rename to third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/sysconfig.py diff --git a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/_backport/tarfile.py b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/tarfile.py similarity index 100% rename from third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/_backport/tarfile.py rename to third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/tarfile.py diff --git a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/compat.py b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/compat.py similarity index 100% rename from third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/compat.py rename to third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/compat.py diff --git a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/database.py b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/database.py similarity index 100% rename from third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/database.py rename to third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/database.py diff --git a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/index.py b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/index.py similarity index 96% rename from third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/index.py rename to third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/index.py index b1fbbf8e8d2a..7a87cdcf7a19 100644 --- a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/index.py +++ b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/index.py @@ -18,7 +18,7 @@ except ImportError: from . import DistlibException from .compat import (HTTPBasicAuthHandler, Request, HTTPPasswordMgr, urlparse, build_opener, string_types) -from .util import zip_dir, ServerProxy +from .util import cached_property, zip_dir, ServerProxy logger = logging.getLogger(__name__) @@ -67,17 +67,21 @@ class PackageIndex(object): Get the distutils command for interacting with PyPI configurations. :return: the command. """ - from .util import _get_pypirc_command as cmd - return cmd() + from distutils.core import Distribution + from distutils.config import PyPIRCCommand + d = Distribution() + return PyPIRCCommand(d) def read_configuration(self): """ - Read the PyPI access configuration as supported by distutils. This populates - ``username``, ``password``, ``realm`` and ``url`` attributes from the - configuration. + Read the PyPI access configuration as supported by distutils, getting + PyPI to do the actual work. This populates ``username``, ``password``, + ``realm`` and ``url`` attributes from the configuration. """ - from .util import _load_pypirc - cfg = _load_pypirc(self) + # get distutils to do the work + c = self._get_pypirc_command() + c.repository = self.url + cfg = c._read_pypirc() self.username = cfg.get('username') self.password = cfg.get('password') self.realm = cfg.get('realm', 'pypi') @@ -87,10 +91,13 @@ class PackageIndex(object): """ Save the PyPI access configuration. You must have set ``username`` and ``password`` attributes before calling this method. + + Again, distutils is used to do the actual work. """ self.check_credentials() - from .util import _store_pypirc - _store_pypirc(self) + # get distutils to do the work + c = self._get_pypirc_command() + c._store_pypirc(self.username, self.password) def check_credentials(self): """ diff --git a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/locators.py b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/locators.py similarity index 98% rename from third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/locators.py rename to third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/locators.py index 0c7d6391438b..12a1d06351e5 100644 --- a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/locators.py +++ b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/locators.py @@ -20,14 +20,14 @@ import zlib from . import DistlibException from .compat import (urljoin, urlparse, urlunparse, url2pathname, pathname2url, - queue, quote, unescape, build_opener, + queue, quote, unescape, string_types, build_opener, HTTPRedirectHandler as BaseRedirectHandler, text_type, Request, HTTPError, URLError) from .database import Distribution, DistributionPath, make_dist from .metadata import Metadata, MetadataInvalidError -from .util import (cached_property, ensure_slash, split_filename, get_project_data, - parse_requirement, parse_name_and_version, ServerProxy, - normalize_name) +from .util import (cached_property, parse_credentials, ensure_slash, + split_filename, get_project_data, parse_requirement, + parse_name_and_version, ServerProxy, normalize_name) from .version import get_scheme, UnsupportedVersionError from .wheel import Wheel, is_compatible @@ -378,13 +378,13 @@ class Locator(object): continue try: if not matcher.match(k): - pass # logger.debug('%s did not match %r', matcher, k) + logger.debug('%s did not match %r', matcher, k) else: if prereleases or not vcls(k).is_prerelease: slist.append(k) - # else: - # logger.debug('skipping pre-release ' - # 'version %s of %s', k, matcher.name) + else: + logger.debug('skipping pre-release ' + 'version %s of %s', k, matcher.name) except Exception: # pragma: no cover logger.warning('error matching %s with %r', matcher, k) pass # slist.append(k) @@ -593,7 +593,7 @@ class SimpleScrapingLocator(Locator): # These are used to deal with various Content-Encoding schemes. decoders = { 'deflate': zlib.decompress, - 'gzip': lambda b: gzip.GzipFile(fileobj=BytesIO(b)).read(), + 'gzip': lambda b: gzip.GzipFile(fileobj=BytesIO(d)).read(), 'none': lambda b: b, } @@ -1062,6 +1062,8 @@ default_locator = AggregatingLocator( locate = default_locator.locate +NAME_VERSION_RE = re.compile(r'(?P[\w-]+)\s*' + r'\(\s*(==\s*)?(?P[^)]+)\)$') class DependencyFinder(object): """ diff --git a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/manifest.py b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/manifest.py similarity index 100% rename from third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/manifest.py rename to third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/manifest.py diff --git a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/markers.py b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/markers.py similarity index 98% rename from third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/markers.py rename to third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/markers.py index 923a832b2a59..ee1f3e23655b 100644 --- a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/markers.py +++ b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/markers.py @@ -15,8 +15,9 @@ Parser for the environment markers micro-language defined in PEP 508. import os import sys import platform +import re -from .compat import string_types +from .compat import python_implementation, urlparse, string_types from .util import in_venv, parse_marker __all__ = ['interpret'] diff --git a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/metadata.py b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/metadata.py similarity index 99% rename from third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/metadata.py rename to third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/metadata.py index 6a26b0ab232e..6d5e236090d2 100644 --- a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/metadata.py +++ b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/metadata.py @@ -94,9 +94,8 @@ _426_MARKERS = ('Private-Version', 'Provides-Extra', 'Obsoleted-By', # See issue #106: Sometimes 'Requires' and 'Provides' occur wrongly in # the metadata. Include them in the tuple literal below to allow them # (for now). -# Ditto for Obsoletes - see issue #140. _566_FIELDS = _426_FIELDS + ('Description-Content-Type', - 'Requires', 'Provides', 'Obsoletes') + 'Requires', 'Provides') _566_MARKERS = ('Description-Content-Type',) @@ -118,8 +117,7 @@ def _version2fieldlist(version): elif version == '1.2': return _345_FIELDS elif version in ('1.3', '2.1'): - # avoid adding field names if already there - return _345_FIELDS + tuple(f for f in _566_FIELDS if f not in _345_FIELDS) + return _345_FIELDS + _566_FIELDS elif version == '2.0': return _426_FIELDS raise MetadataUnrecognizedVersionError(version) diff --git a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/resources.py b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/resources.py similarity index 98% rename from third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/resources.py rename to third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/resources.py index fef52aa103ea..18840167a9e3 100644 --- a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/resources.py +++ b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/resources.py @@ -11,12 +11,13 @@ import io import logging import os import pkgutil +import shutil import sys import types import zipimport from . import DistlibException -from .util import cached_property, get_cache_base, Cache +from .util import cached_property, get_cache_base, path_to_cache_dir, Cache logger = logging.getLogger(__name__) @@ -282,7 +283,6 @@ class ZipResourceFinder(ResourceFinder): result = False return result - _finder_registry = { type(None): ResourceFinder, zipimport.zipimporter: ZipResourceFinder @@ -296,8 +296,6 @@ try: import _frozen_importlib as _fi _finder_registry[_fi.SourceFileLoader] = ResourceFinder _finder_registry[_fi.FileFinder] = ResourceFinder - # See issue #146 - _finder_registry[_fi.SourcelessFileLoader] = ResourceFinder del _fi except (ImportError, AttributeError): pass @@ -306,7 +304,6 @@ except (ImportError, AttributeError): def register_finder(loader, finder_maker): _finder_registry[type(loader)] = finder_maker - _finder_cache = {} diff --git a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/scripts.py b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/scripts.py similarity index 97% rename from third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/scripts.py rename to third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/scripts.py index 1ac01dde512e..03f8f21e0ff2 100644 --- a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/scripts.py +++ b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/scripts.py @@ -282,19 +282,6 @@ class ScriptMaker(object): self._fileop.set_executable_mode([outname]) filenames.append(outname) - variant_separator = '-' - - def get_script_filenames(self, name): - result = set() - if '' in self.variants: - result.add(name) - if 'X' in self.variants: - result.add('%s%s' % (name, self.version_info[0])) - if 'X.Y' in self.variants: - result.add('%s%s%s.%s' % (name, self.variant_separator, - self.version_info[0], self.version_info[1])) - return result - def _make_script(self, entry, filenames, options=None): post_interp = b'' if options: @@ -304,7 +291,15 @@ class ScriptMaker(object): post_interp = args.encode('utf-8') shebang = self._get_shebang('utf-8', post_interp, options=options) script = self._get_script_text(entry).encode('utf-8') - scriptnames = self.get_script_filenames(entry.name) + name = entry.name + scriptnames = set() + if '' in self.variants: + scriptnames.add(name) + if 'X' in self.variants: + scriptnames.add('%s%s' % (name, self.version_info[0])) + if 'X.Y' in self.variants: + scriptnames.add('%s-%s.%s' % (name, self.version_info[0], + self.version_info[1])) if options and options.get('gui', False): ext = 'pyw' else: @@ -331,7 +326,8 @@ class ScriptMaker(object): else: first_line = f.readline() if not first_line: # pragma: no cover - logger.warning('%s is an empty file (skipping)', script) + logger.warning('%s: %s is an empty file (skipping)', + self.get_command_name(), script) return match = FIRST_LINE_RE.match(first_line.replace(b'\r\n', b'\n')) diff --git a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/t32.exe b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/t32.exe similarity index 100% rename from third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/t32.exe rename to third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/t32.exe diff --git a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/t64.exe b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/t64.exe similarity index 100% rename from third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/t64.exe rename to third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/t64.exe diff --git a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/util.py b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/util.py similarity index 88% rename from third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/util.py rename to third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/util.py index b9e2c695c98f..01324eae462f 100644 --- a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/util.py +++ b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/util.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2012-2021 The Python Software Foundation. +# Copyright (C) 2012-2017 The Python Software Foundation. # See LICENSE.txt and CONTRIBUTORS.txt. # import codecs @@ -309,9 +309,7 @@ def get_executable(): # else: # result = sys.executable # return result - # Avoid normcasing: see issue #143 - # result = os.path.normcase(sys.executable) - result = sys.executable + result = os.path.normcase(sys.executable) if not isinstance(result, text_type): result = fsdecode(result) return result @@ -1572,8 +1570,7 @@ class ServerProxy(xmlrpclib.ServerProxy): # The above classes only come into play if a timeout # is specified if timeout is not None: - # scheme = splittype(uri) # deprecated as of Python 3.8 - scheme = urlparse(uri)[0] + scheme, _ = splittype(uri) use_datetime = kwargs.get('use_datetime', 0) if scheme == 'https': tcls = SafeTransport @@ -1762,204 +1759,3 @@ def normalize_name(name): """Normalize a python package name a la PEP 503""" # https://www.python.org/dev/peps/pep-0503/#normalized-names return re.sub('[-_.]+', '-', name).lower() - -# def _get_pypirc_command(): - # """ - # Get the distutils command for interacting with PyPI configurations. - # :return: the command. - # """ - # from distutils.core import Distribution - # from distutils.config import PyPIRCCommand - # d = Distribution() - # return PyPIRCCommand(d) - -class PyPIRCFile(object): - - DEFAULT_REPOSITORY = 'https://upload.pypi.org/legacy/' - DEFAULT_REALM = 'pypi' - - def __init__(self, fn=None, url=None): - if fn is None: - fn = os.path.join(os.path.expanduser('~'), '.pypirc') - self.filename = fn - self.url = url - - def read(self): - result = {} - - if os.path.exists(self.filename): - repository = self.url or self.DEFAULT_REPOSITORY - - config = configparser.RawConfigParser() - config.read(self.filename) - sections = config.sections() - if 'distutils' in sections: - # let's get the list of servers - index_servers = config.get('distutils', 'index-servers') - _servers = [server.strip() for server in - index_servers.split('\n') - if server.strip() != ''] - if _servers == []: - # nothing set, let's try to get the default pypi - if 'pypi' in sections: - _servers = ['pypi'] - else: - for server in _servers: - result = {'server': server} - result['username'] = config.get(server, 'username') - - # optional params - for key, default in (('repository', self.DEFAULT_REPOSITORY), - ('realm', self.DEFAULT_REALM), - ('password', None)): - if config.has_option(server, key): - result[key] = config.get(server, key) - else: - result[key] = default - - # work around people having "repository" for the "pypi" - # section of their config set to the HTTP (rather than - # HTTPS) URL - if (server == 'pypi' and - repository in (self.DEFAULT_REPOSITORY, 'pypi')): - result['repository'] = self.DEFAULT_REPOSITORY - elif (result['server'] != repository and - result['repository'] != repository): - result = {} - elif 'server-login' in sections: - # old format - server = 'server-login' - if config.has_option(server, 'repository'): - repository = config.get(server, 'repository') - else: - repository = self.DEFAULT_REPOSITORY - result = { - 'username': config.get(server, 'username'), - 'password': config.get(server, 'password'), - 'repository': repository, - 'server': server, - 'realm': self.DEFAULT_REALM - } - return result - - def update(self, username, password): - # import pdb; pdb.set_trace() - config = configparser.RawConfigParser() - fn = self.filename - config.read(fn) - if not config.has_section('pypi'): - config.add_section('pypi') - config.set('pypi', 'username', username) - config.set('pypi', 'password', password) - with open(fn, 'w') as f: - config.write(f) - -def _load_pypirc(index): - """ - Read the PyPI access configuration as supported by distutils. - """ - return PyPIRCFile(url=index.url).read() - -def _store_pypirc(index): - PyPIRCFile().update(index.username, index.password) - -# -# get_platform()/get_host_platform() copied from Python 3.10.a0 source, with some minor -# tweaks -# - -def get_host_platform(): - """Return a string that identifies the current platform. This is used mainly to - distinguish platform-specific build directories and platform-specific built - distributions. Typically includes the OS name and version and the - architecture (as supplied by 'os.uname()'), although the exact information - included depends on the OS; eg. on Linux, the kernel version isn't - particularly important. - - Examples of returned values: - linux-i586 - linux-alpha (?) - solaris-2.6-sun4u - - Windows will return one of: - win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc) - win32 (all others - specifically, sys.platform is returned) - - For other non-POSIX platforms, currently just returns 'sys.platform'. - - """ - if os.name == 'nt': - if 'amd64' in sys.version.lower(): - return 'win-amd64' - if '(arm)' in sys.version.lower(): - return 'win-arm32' - if '(arm64)' in sys.version.lower(): - return 'win-arm64' - return sys.platform - - # Set for cross builds explicitly - if "_PYTHON_HOST_PLATFORM" in os.environ: - return os.environ["_PYTHON_HOST_PLATFORM"] - - if os.name != 'posix' or not hasattr(os, 'uname'): - # XXX what about the architecture? NT is Intel or Alpha, - # Mac OS is M68k or PPC, etc. - return sys.platform - - # Try to distinguish various flavours of Unix - - (osname, host, release, version, machine) = os.uname() - - # Convert the OS name to lowercase, remove '/' characters, and translate - # spaces (for "Power Macintosh") - osname = osname.lower().replace('/', '') - machine = machine.replace(' ', '_').replace('/', '-') - - if osname[:5] == 'linux': - # At least on Linux/Intel, 'machine' is the processor -- - # i386, etc. - # XXX what about Alpha, SPARC, etc? - return "%s-%s" % (osname, machine) - - elif osname[:5] == 'sunos': - if release[0] >= '5': # SunOS 5 == Solaris 2 - osname = 'solaris' - release = '%d.%s' % (int(release[0]) - 3, release[2:]) - # We can't use 'platform.architecture()[0]' because a - # bootstrap problem. We use a dict to get an error - # if some suspicious happens. - bitness = {2147483647:'32bit', 9223372036854775807:'64bit'} - machine += '.%s' % bitness[sys.maxsize] - # fall through to standard osname-release-machine representation - elif osname[:3] == 'aix': - from _aix_support import aix_platform - return aix_platform() - elif osname[:6] == 'cygwin': - osname = 'cygwin' - rel_re = re.compile (r'[\d.]+', re.ASCII) - m = rel_re.match(release) - if m: - release = m.group() - elif osname[:6] == 'darwin': - import _osx_support, distutils.sysconfig - osname, release, machine = _osx_support.get_platform_osx( - distutils.sysconfig.get_config_vars(), - osname, release, machine) - - return '%s-%s-%s' % (osname, release, machine) - - -_TARGET_TO_PLAT = { - 'x86' : 'win32', - 'x64' : 'win-amd64', - 'arm' : 'win-arm32', -} - - -def get_platform(): - if os.name != 'nt': - return get_host_platform() - cross_compilation_target = os.environ.get('VSCMD_ARG_TGT_ARCH') - if cross_compilation_target not in _TARGET_TO_PLAT: - return get_host_platform() - return _TARGET_TO_PLAT[cross_compilation_target] diff --git a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/version.py b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/version.py similarity index 99% rename from third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/version.py rename to third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/version.py index 86c069a7c2af..3eebe18ee849 100644 --- a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/version.py +++ b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/version.py @@ -710,9 +710,6 @@ class VersionScheme(object): """ Used for processing some metadata fields """ - # See issue #140. Be tolerant of a single trailing comma. - if s.endswith(','): - s = s[:-1] return self.is_valid_matcher('dummy_name (%s)' % s) def suggest(self, s): diff --git a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/w32.exe b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/w32.exe similarity index 100% rename from third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/w32.exe rename to third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/w32.exe diff --git a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/w64.exe b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/w64.exe similarity index 100% rename from third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/w64.exe rename to third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/w64.exe diff --git a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/wheel.py b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/wheel.py similarity index 95% rename from third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/wheel.py rename to third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/wheel.py index 5262c8323eae..1e2c7a020c93 100644 --- a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/wheel.py +++ b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/wheel.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright (C) 2013-2020 Vinay Sajip. +# Copyright (C) 2013-2017 Vinay Sajip. # Licensed to the Python Software Foundation under a contributor agreement. # See LICENSE.txt and CONTRIBUTORS.txt. # @@ -9,6 +9,7 @@ from __future__ import unicode_literals import base64 import codecs import datetime +import distutils.util from email import message_from_file import hashlib import imp @@ -28,8 +29,7 @@ from .database import InstalledDistribution from .metadata import (Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME, LEGACY_METADATA_FILENAME) from .util import (FileOperator, convert_path, CSVReader, CSVWriter, Cache, - cached_property, get_cache_base, read_exports, tempdir, - get_platform) + cached_property, get_cache_base, read_exports, tempdir) from .version import NormalizedVersion, UnsupportedVersionError logger = logging.getLogger(__name__) @@ -47,18 +47,15 @@ else: VER_SUFFIX = sysconfig.get_config_var('py_version_nodot') if not VER_SUFFIX: # pragma: no cover - if sys.version_info[1] >= 10: - VER_SUFFIX = '%s_%s' % sys.version_info[:2] # PEP 641 (draft) - else: - VER_SUFFIX = '%s%s' % sys.version_info[:2] + VER_SUFFIX = '%s%s' % sys.version_info[:2] PYVER = 'py' + VER_SUFFIX IMPVER = IMP_PREFIX + VER_SUFFIX -ARCH = get_platform().replace('-', '_').replace('.', '_') +ARCH = distutils.util.get_platform().replace('-', '_').replace('.', '_') ABI = sysconfig.get_config_var('SOABI') if ABI and ABI.startswith('cpython-'): - ABI = ABI.replace('cpython-', 'cp').split('-')[0] + ABI = ABI.replace('cpython-', 'cp') else: def _derive_abi(): parts = ['cp', VER_SUFFIX] @@ -579,13 +576,6 @@ class Wheel(object): if not is_script: with zf.open(arcname) as bf: fileop.copy_stream(bf, outfile) - # Issue #147: permission bits aren't preserved. Using - # zf.extract(zinfo, libdir) should have worked, but didn't, - # see https://www.thetopsites.net/article/53834422.shtml - # So ... manually preserve permission bits as given in zinfo - if os.name == 'posix': - # just set the normal permission bits - os.chmod(outfile, (zinfo.external_attr >> 16) & 0x1FF) outfiles.append(outfile) # Double check the digest of the written file if not dry_run and row[1]: @@ -948,16 +938,6 @@ class Wheel(object): shutil.copyfile(newpath, pathname) return modified -def _get_glibc_version(): - import platform - ver = platform.libc_ver() - result = [] - if ver[0] == 'glibc': - for s in ver[1].split('.'): - result.append(int(s) if s.isdigit() else 0) - result = tuple(result) - return result - def compatible_tags(): """ Return (pyver, abi, arch) tuples compatible with this Python. @@ -1005,23 +985,6 @@ def compatible_tags(): for abi in abis: for arch in arches: result.append((''.join((IMP_PREFIX, versions[0])), abi, arch)) - # manylinux - if abi != 'none' and sys.platform.startswith('linux'): - arch = arch.replace('linux_', '') - parts = _get_glibc_version() - if len(parts) == 2: - if parts >= (2, 5): - result.append((''.join((IMP_PREFIX, versions[0])), abi, - 'manylinux1_%s' % arch)) - if parts >= (2, 12): - result.append((''.join((IMP_PREFIX, versions[0])), abi, - 'manylinux2010_%s' % arch)) - if parts >= (2, 17): - result.append((''.join((IMP_PREFIX, versions[0])), abi, - 'manylinux2014_%s' % arch)) - result.append((''.join((IMP_PREFIX, versions[0])), abi, - 'manylinux_%s_%s_%s' % (parts[0], parts[1], - arch))) # where no ABI / arch dependency, but IMP_PREFIX dependency for i, version in enumerate(versions): @@ -1034,7 +997,6 @@ def compatible_tags(): result.append((''.join(('py', version)), 'none', 'any')) if i == 0: result.append((''.join(('py', version[0])), 'none', 'any')) - return set(result) diff --git a/third_party/python/virtualenv/__virtualenv__/filelock-3.0.12-py3-none-any/filelock-3.0.12.dist-info/METADATA b/third_party/python/virtualenv/__virtualenv__/filelock-3.0.12-py3-none-any/filelock-3.0.12.dist-info/METADATA index 75053bdcfe62..79d8d47990ea 100644 --- a/third_party/python/virtualenv/__virtualenv__/filelock-3.0.12-py3-none-any/filelock-3.0.12.dist-info/METADATA +++ b/third_party/python/virtualenv/__virtualenv__/filelock-3.0.12-py3-none-any/filelock-3.0.12.dist-info/METADATA @@ -24,7 +24,6 @@ Classifier: Topic :: System Classifier: Topic :: Internet Classifier: Topic :: Software Development :: Libraries Description-Content-Type: text/markdown -License-File: LICENSE # py-filelock diff --git a/third_party/python/virtualenv/__virtualenv__/filelock-3.0.12-py3-none-any/filelock-3.0.12.dist-info/RECORD b/third_party/python/virtualenv/__virtualenv__/filelock-3.0.12-py3-none-any/filelock-3.0.12.dist-info/RECORD index 007e582b08ee..c5f2e1f5cf4b 100644 --- a/third_party/python/virtualenv/__virtualenv__/filelock-3.0.12-py3-none-any/filelock-3.0.12.dist-info/RECORD +++ b/third_party/python/virtualenv/__virtualenv__/filelock-3.0.12-py3-none-any/filelock-3.0.12.dist-info/RECORD @@ -1,6 +1,6 @@ filelock.py,sha256=5DQTtOaQq7-vgLkZzvOhqhVMh_umfydWgSA8Vuzmf8M,13229 filelock-3.0.12.dist-info/LICENSE,sha256=iNm062BXnBkew5HKBMFhMFctfu3EqG2qWL8oxuFMm80,1210 -filelock-3.0.12.dist-info/METADATA,sha256=KYeYgE0nuoXtRp8NVK-5w4B0b_1HGjugDDScxhiBjFU,4365 -filelock-3.0.12.dist-info/WHEEL,sha256=OqRkF0eY5GHssMorFjlbTIq072vpHpF60fIQA6lS9xA,92 +filelock-3.0.12.dist-info/METADATA,sha256=gjzbv9nxtD-Rj2ysjUuG7SLZCHUQl5hMy68Jij8soPw,4343 +filelock-3.0.12.dist-info/WHEEL,sha256=EVRjI69F5qVjm_YgqcTXPnTAv3BfSUr0WVAHuSP3Xoo,92 filelock-3.0.12.dist-info/top_level.txt,sha256=NDrf9i5BNogz4hEdsr6Hi7Ws3TlSSKY4Q2Y9_-i2GwU,9 filelock-3.0.12.dist-info/RECORD,, diff --git a/third_party/python/virtualenv/__virtualenv__/filelock-3.0.12-py3-none-any/filelock-3.0.12.dist-info/WHEEL b/third_party/python/virtualenv/__virtualenv__/filelock-3.0.12-py3-none-any/filelock-3.0.12.dist-info/WHEEL index 385faab0525c..83ff02e961fc 100644 --- a/third_party/python/virtualenv/__virtualenv__/filelock-3.0.12-py3-none-any/filelock-3.0.12.dist-info/WHEEL +++ b/third_party/python/virtualenv/__virtualenv__/filelock-3.0.12-py3-none-any/filelock-3.0.12.dist-info/WHEEL @@ -1,5 +1,5 @@ Wheel-Version: 1.0 -Generator: bdist_wheel (0.36.2) +Generator: bdist_wheel (0.35.1) Root-Is-Purelib: true Tag: py3-none-any diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_metadata-4.6.2-py3-none-any/importlib_metadata-4.6.2.dist-info/LICENSE b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata-1.1.3.dist-info/LICENSE similarity index 100% rename from third_party/python/virtualenv/__virtualenv__/importlib_metadata-4.6.2-py3-none-any/importlib_metadata-4.6.2.dist-info/LICENSE rename to third_party/python/virtualenv/__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata-1.1.3.dist-info/LICENSE diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata-1.1.3.dist-info/METADATA b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata-1.1.3.dist-info/METADATA new file mode 100644 index 000000000000..165a67ded5e7 --- /dev/null +++ b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata-1.1.3.dist-info/METADATA @@ -0,0 +1,65 @@ +Metadata-Version: 2.1 +Name: importlib-metadata +Version: 1.1.3 +Summary: Read metadata from Python packages +Home-page: http://importlib-metadata.readthedocs.io/ +Author: Barry Warsaw +Author-email: barry@python.org +License: Apache Software License +Platform: UNKNOWN +Classifier: Development Status :: 3 - Alpha +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Topic :: Software Development :: Libraries +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 2 +Requires-Python: !=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7 +Requires-Dist: zipp (>=0.5) +Requires-Dist: contextlib2 ; python_version < "3" +Requires-Dist: configparser (>=3.5) ; python_version < "3" +Requires-Dist: pathlib2 ; python_version == "3.4.*" or python_version < "3" +Provides-Extra: docs +Requires-Dist: sphinx ; extra == 'docs' +Requires-Dist: rst.linker ; extra == 'docs' +Provides-Extra: testing +Requires-Dist: packaging ; extra == 'testing' +Requires-Dist: importlib-resources ; (python_version < "3.7") and extra == 'testing' + +========================= + ``importlib_metadata`` +========================= + +``importlib_metadata`` is a library to access the metadata for a Python +package. It is intended to be ported to Python 3.8. + + +Usage +===== + +See the `online documentation `_ +for usage details. + +`Finder authors +`_ can +also add support for custom package installers. See the above documentation +for details. + + +Caveats +======= + +This project primarily supports third-party packages installed by PyPA +tools (or other conforming packages). It does not support: + +- Packages in the stdlib. +- Packages installed without metadata. + +Project details +=============== + + * Project home: https://gitlab.com/python-devs/importlib_metadata + * Report bugs at: https://gitlab.com/python-devs/importlib_metadata/issues + * Code hosting: https://gitlab.com/python-devs/importlib_metadata.git + * Documentation: http://importlib_metadata.readthedocs.io/ + + diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata-1.1.3.dist-info/RECORD b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata-1.1.3.dist-info/RECORD new file mode 100644 index 000000000000..d0eac8875c24 --- /dev/null +++ b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata-1.1.3.dist-info/RECORD @@ -0,0 +1,21 @@ +importlib_metadata/__init__.py,sha256=wjIJ8vwgfW6r1J8Yckbk2mqOk_ZDPe7fQvsDj1oG-aQ,16840 +importlib_metadata/_compat.py,sha256=EwnYmvejrDFHENaQEutLz7L1rvyK6jJv9-xwk_bWVTI,4265 +importlib_metadata/docs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_metadata/docs/changelog.rst,sha256=QZ-WVYSgPlbeva4C8z5o58Ufpku4_JGfEOvEoPK-qt4,7086 +importlib_metadata/docs/conf.py,sha256=DM_-W8bvIar_YqWeRQUcgWT1_phXe-H2IcYgM8JIkiY,5468 +importlib_metadata/docs/index.rst,sha256=bHIGj1koPACV8OV02uHTGRMax46lGj00KLOji1aPl_c,2165 +importlib_metadata/docs/using.rst,sha256=2S6KGhJ66t8kM3cik7K03X1AJUGX0TWr6byaHEsJjnc,9826 +importlib_metadata/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_metadata/tests/fixtures.py,sha256=sshuoJ4ezljeouUddVg-76K1UOStKWBecovZOKOBguk,5004 +importlib_metadata/tests/test_api.py,sha256=YMAGTsRENrtvpw2CSLmRndJMBeT4q_M0GSe-QsnnMZ4,5544 +importlib_metadata/tests/test_integration.py,sha256=kzqav9qAePjz7UR-GNna65xLwXlRcxEDYDwmuOFwpKE,686 +importlib_metadata/tests/test_main.py,sha256=nnKTmcIA14lhynepCfXtiTYWH35hNFuFfIcKBkzShuY,7179 +importlib_metadata/tests/test_zip.py,sha256=qG3IquiTFLSrUtpxEJblqiUtgEcOTfjU2yM35REk0fo,2372 +importlib_metadata/tests/data/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_metadata/tests/data/example-21.12-py3-none-any.whl,sha256=I-kYufETid-tDYyR8f1OFJ3t5u_Io23k0cbQxJTUN4I,1455 +importlib_metadata/tests/data/example-21.12-py3.6.egg,sha256=-EeugFAijkdUO9xyQHTZkQwZoFXK0_QxICBj6R5AAJo,1497 +importlib_metadata-1.1.3.dist-info/LICENSE,sha256=wNe6dAchmJ1VvVB8D9oTc-gHHadCuaSBAev36sYEM6U,571 +importlib_metadata-1.1.3.dist-info/METADATA,sha256=zI5ihvOML51dmmsBF9_GrpnlUCgU8PTWXYa0Eb47nZU,2114 +importlib_metadata-1.1.3.dist-info/WHEEL,sha256=8zNYZbwQSXoB9IfXOjPfeNwvAsALAjffgk27FqvCWbo,110 +importlib_metadata-1.1.3.dist-info/top_level.txt,sha256=CO3fD9yylANiXkrMo4qHLV_mqXL2sC5JFKgt1yWAT-A,19 +importlib_metadata-1.1.3.dist-info/RECORD,, diff --git a/third_party/python/pyasn1_modules/pyasn1_modules-0.2.8.dist-info/WHEEL b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata-1.1.3.dist-info/WHEEL similarity index 100% rename from third_party/python/pyasn1_modules/pyasn1_modules-0.2.8.dist-info/WHEEL rename to third_party/python/virtualenv/__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata-1.1.3.dist-info/WHEEL diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_metadata-4.6.2-py3-none-any/importlib_metadata-4.6.2.dist-info/top_level.txt b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata-1.1.3.dist-info/top_level.txt similarity index 100% rename from third_party/python/virtualenv/__virtualenv__/importlib_metadata-4.6.2-py3-none-any/importlib_metadata-4.6.2.dist-info/top_level.txt rename to third_party/python/virtualenv/__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata-1.1.3.dist-info/top_level.txt diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata/__init__.py b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata/__init__.py new file mode 100644 index 000000000000..31ff8462f3e5 --- /dev/null +++ b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata/__init__.py @@ -0,0 +1,541 @@ +from __future__ import unicode_literals, absolute_import + +import io +import os +import re +import abc +import csv +import sys +import zipp +import operator +import functools +import itertools +import collections + +from ._compat import ( + install, + NullFinder, + ConfigParser, + suppress, + map, + FileNotFoundError, + IsADirectoryError, + NotADirectoryError, + PermissionError, + pathlib, + PYPY_OPEN_BUG, + ModuleNotFoundError, + MetaPathFinder, + email_message_from_string, + ensure_is_path, + PyPy_repr, + ) +from importlib import import_module +from itertools import starmap + + +__metaclass__ = type + + +__all__ = [ + 'Distribution', + 'DistributionFinder', + 'PackageNotFoundError', + 'distribution', + 'distributions', + 'entry_points', + 'files', + 'metadata', + 'requires', + 'version', + ] + + +class PackageNotFoundError(ModuleNotFoundError): + """The package was not found.""" + + +class EntryPoint( + PyPy_repr, + collections.namedtuple('EntryPointBase', 'name value group')): + """An entry point as defined by Python packaging conventions. + + See `the packaging docs on entry points + `_ + for more information. + """ + + pattern = re.compile( + r'(?P[\w.]+)\s*' + r'(:\s*(?P[\w.]+))?\s*' + r'(?P\[.*\])?\s*$' + ) + """ + A regular expression describing the syntax for an entry point, + which might look like: + + - module + - package.module + - package.module:attribute + - package.module:object.attribute + - package.module:attr [extra1, extra2] + + Other combinations are possible as well. + + The expression is lenient about whitespace around the ':', + following the attr, and following any extras. + """ + + def load(self): + """Load the entry point from its definition. If only a module + is indicated by the value, return that module. Otherwise, + return the named object. + """ + match = self.pattern.match(self.value) + module = import_module(match.group('module')) + attrs = filter(None, (match.group('attr') or '').split('.')) + return functools.reduce(getattr, attrs, module) + + @property + def extras(self): + match = self.pattern.match(self.value) + return list(re.finditer(r'\w+', match.group('extras') or '')) + + @classmethod + def _from_config(cls, config): + return [ + cls(name, value, group) + for group in config.sections() + for name, value in config.items(group) + ] + + @classmethod + def _from_text(cls, text): + config = ConfigParser(delimiters='=') + # case sensitive: https://stackoverflow.com/q/1611799/812183 + config.optionxform = str + try: + config.read_string(text) + except AttributeError: # pragma: nocover + # Python 2 has no read_string + config.readfp(io.StringIO(text)) + return EntryPoint._from_config(config) + + def __iter__(self): + """ + Supply iter so one may construct dicts of EntryPoints easily. + """ + return iter((self.name, self)) + + def __reduce__(self): + return ( + self.__class__, + (self.name, self.value, self.group), + ) + + +class PackagePath(pathlib.PurePosixPath): + """A reference to a path in a package""" + + def read_text(self, encoding='utf-8'): + with self.locate().open(encoding=encoding) as stream: + return stream.read() + + def read_binary(self): + with self.locate().open('rb') as stream: + return stream.read() + + def locate(self): + """Return a path-like object for this path""" + return self.dist.locate_file(self) + + +class FileHash: + def __init__(self, spec): + self.mode, _, self.value = spec.partition('=') + + def __repr__(self): + return ''.format(self.mode, self.value) + + +class Distribution: + """A Python distribution package.""" + + @abc.abstractmethod + def read_text(self, filename): + """Attempt to load metadata file given by the name. + + :param filename: The name of the file in the distribution info. + :return: The text if found, otherwise None. + """ + + @abc.abstractmethod + def locate_file(self, path): + """ + Given a path to a file in this distribution, return a path + to it. + """ + + @classmethod + def from_name(cls, name): + """Return the Distribution for the given package name. + + :param name: The name of the distribution package to search for. + :return: The Distribution instance (or subclass thereof) for the named + package, if found. + :raises PackageNotFoundError: When the named package's distribution + metadata cannot be found. + """ + for resolver in cls._discover_resolvers(): + dists = resolver(DistributionFinder.Context(name=name)) + dist = next(dists, None) + if dist is not None: + return dist + else: + raise PackageNotFoundError(name) + + @classmethod + def discover(cls, **kwargs): + """Return an iterable of Distribution objects for all packages. + + Pass a ``context`` or pass keyword arguments for constructing + a context. + + :context: A ``DistributionFinder.Context`` object. + :return: Iterable of Distribution objects for all packages. + """ + context = kwargs.pop('context', None) + if context and kwargs: + raise ValueError("cannot accept context and kwargs") + context = context or DistributionFinder.Context(**kwargs) + return itertools.chain.from_iterable( + resolver(context) + for resolver in cls._discover_resolvers() + ) + + @staticmethod + def at(path): + """Return a Distribution for the indicated metadata path + + :param path: a string or path-like object + :return: a concrete Distribution instance for the path + """ + return PathDistribution(ensure_is_path(path)) + + @staticmethod + def _discover_resolvers(): + """Search the meta_path for resolvers.""" + declared = ( + getattr(finder, 'find_distributions', None) + for finder in sys.meta_path + ) + return filter(None, declared) + + @property + def metadata(self): + """Return the parsed metadata for this Distribution. + + The returned object will have keys that name the various bits of + metadata. See PEP 566 for details. + """ + text = ( + self.read_text('METADATA') + or self.read_text('PKG-INFO') + # This last clause is here to support old egg-info files. Its + # effect is to just end up using the PathDistribution's self._path + # (which points to the egg-info file) attribute unchanged. + or self.read_text('') + ) + return email_message_from_string(text) + + @property + def version(self): + """Return the 'Version' metadata for the distribution package.""" + return self.metadata['Version'] + + @property + def entry_points(self): + return EntryPoint._from_text(self.read_text('entry_points.txt')) + + @property + def files(self): + """Files in this distribution. + + :return: List of PackagePath for this distribution or None + + Result is `None` if the metadata file that enumerates files + (i.e. RECORD for dist-info or SOURCES.txt for egg-info) is + missing. + Result may be empty if the metadata exists but is empty. + """ + file_lines = self._read_files_distinfo() or self._read_files_egginfo() + + def make_file(name, hash=None, size_str=None): + result = PackagePath(name) + result.hash = FileHash(hash) if hash else None + result.size = int(size_str) if size_str else None + result.dist = self + return result + + return file_lines and list(starmap(make_file, csv.reader(file_lines))) + + def _read_files_distinfo(self): + """ + Read the lines of RECORD + """ + text = self.read_text('RECORD') + return text and text.splitlines() + + def _read_files_egginfo(self): + """ + SOURCES.txt might contain literal commas, so wrap each line + in quotes. + """ + text = self.read_text('SOURCES.txt') + return text and map('"{}"'.format, text.splitlines()) + + @property + def requires(self): + """Generated requirements specified for this Distribution""" + reqs = self._read_dist_info_reqs() or self._read_egg_info_reqs() + return reqs and list(reqs) + + def _read_dist_info_reqs(self): + return self.metadata.get_all('Requires-Dist') + + def _read_egg_info_reqs(self): + source = self.read_text('requires.txt') + return source and self._deps_from_requires_text(source) + + @classmethod + def _deps_from_requires_text(cls, source): + section_pairs = cls._read_sections(source.splitlines()) + sections = { + section: list(map(operator.itemgetter('line'), results)) + for section, results in + itertools.groupby(section_pairs, operator.itemgetter('section')) + } + return cls._convert_egg_info_reqs_to_simple_reqs(sections) + + @staticmethod + def _read_sections(lines): + section = None + for line in filter(None, lines): + section_match = re.match(r'\[(.*)\]$', line) + if section_match: + section = section_match.group(1) + continue + yield locals() + + @staticmethod + def _convert_egg_info_reqs_to_simple_reqs(sections): + """ + Historically, setuptools would solicit and store 'extra' + requirements, including those with environment markers, + in separate sections. More modern tools expect each + dependency to be defined separately, with any relevant + extras and environment markers attached directly to that + requirement. This method converts the former to the + latter. See _test_deps_from_requires_text for an example. + """ + def make_condition(name): + return name and 'extra == "{name}"'.format(name=name) + + def parse_condition(section): + section = section or '' + extra, sep, markers = section.partition(':') + if extra and markers: + markers = '({markers})'.format(markers=markers) + conditions = list(filter(None, [markers, make_condition(extra)])) + return '; ' + ' and '.join(conditions) if conditions else '' + + for section, deps in sections.items(): + for dep in deps: + yield dep + parse_condition(section) + + +class DistributionFinder(MetaPathFinder): + """ + A MetaPathFinder capable of discovering installed distributions. + """ + + class Context: + + name = None + """ + Specific name for which a distribution finder should match. + """ + + def __init__(self, **kwargs): + vars(self).update(kwargs) + + @property + def path(self): + """ + The path that a distribution finder should search. + """ + return vars(self).get('path', sys.path) + + @property + def pattern(self): + return '.*' if self.name is None else re.escape(self.name) + + @abc.abstractmethod + def find_distributions(self, context=Context()): + """ + Find distributions. + + Return an iterable of all Distribution instances capable of + loading the metadata for packages matching the ``context``, + a DistributionFinder.Context instance. + """ + + +@install +class MetadataPathFinder(NullFinder, DistributionFinder): + """A degenerate finder for distribution packages on the file system. + + This finder supplies only a find_distributions() method for versions + of Python that do not have a PathFinder find_distributions(). + """ + + def find_distributions(self, context=DistributionFinder.Context()): + """ + Find distributions. + + Return an iterable of all Distribution instances capable of + loading the metadata for packages matching ``context.name`` + (or all names if ``None`` indicated) along the paths in the list + of directories ``context.path``. + """ + found = self._search_paths(context.pattern, context.path) + return map(PathDistribution, found) + + @classmethod + def _search_paths(cls, pattern, paths): + """Find metadata directories in paths heuristically.""" + return itertools.chain.from_iterable( + cls._search_path(path, pattern) + for path in map(cls._switch_path, paths) + ) + + @staticmethod + def _switch_path(path): + if not PYPY_OPEN_BUG or os.path.isfile(path): # pragma: no branch + with suppress(Exception): + return zipp.Path(path) + return pathlib.Path(path) + + @classmethod + def _matches_info(cls, normalized, item): + template = r'{pattern}(-.*)?\.(dist|egg)-info' + manifest = template.format(pattern=normalized) + return re.match(manifest, item.name, flags=re.IGNORECASE) + + @classmethod + def _matches_legacy(cls, normalized, item): + template = r'{pattern}-.*\.egg[\\/]EGG-INFO' + manifest = template.format(pattern=normalized) + return re.search(manifest, str(item), flags=re.IGNORECASE) + + @classmethod + def _search_path(cls, root, pattern): + if not root.is_dir(): + return () + normalized = pattern.replace('-', '_') + return (item for item in root.iterdir() + if cls._matches_info(normalized, item) + or cls._matches_legacy(normalized, item)) + + +class PathDistribution(Distribution): + def __init__(self, path): + """Construct a distribution from a path to the metadata directory. + + :param path: A pathlib.Path or similar object supporting + .joinpath(), __div__, .parent, and .read_text(). + """ + self._path = path + + def read_text(self, filename): + with suppress(FileNotFoundError, IsADirectoryError, KeyError, + NotADirectoryError, PermissionError): + return self._path.joinpath(filename).read_text(encoding='utf-8') + read_text.__doc__ = Distribution.read_text.__doc__ + + def locate_file(self, path): + return self._path.parent / path + + +def distribution(distribution_name): + """Get the ``Distribution`` instance for the named package. + + :param distribution_name: The name of the distribution package as a string. + :return: A ``Distribution`` instance (or subclass thereof). + """ + return Distribution.from_name(distribution_name) + + +def distributions(**kwargs): + """Get all ``Distribution`` instances in the current environment. + + :return: An iterable of ``Distribution`` instances. + """ + return Distribution.discover(**kwargs) + + +def metadata(distribution_name): + """Get the metadata for the named package. + + :param distribution_name: The name of the distribution package to query. + :return: An email.Message containing the parsed metadata. + """ + return Distribution.from_name(distribution_name).metadata + + +def version(distribution_name): + """Get the version string for the named package. + + :param distribution_name: The name of the distribution package to query. + :return: The version string for the package as defined in the package's + "Version" metadata key. + """ + return distribution(distribution_name).version + + +def entry_points(): + """Return EntryPoint objects for all installed packages. + + :return: EntryPoint objects for all installed packages. + """ + eps = itertools.chain.from_iterable( + dist.entry_points for dist in distributions()) + by_group = operator.attrgetter('group') + ordered = sorted(eps, key=by_group) + grouped = itertools.groupby(ordered, by_group) + return { + group: tuple(eps) + for group, eps in grouped + } + + +def files(distribution_name): + """Return a list of files for the named package. + + :param distribution_name: The name of the distribution package to query. + :return: List of files composing the distribution. + """ + return distribution(distribution_name).files + + +def requires(distribution_name): + """ + Return a list of requirements for the named package. + + :return: An iterator of requirements, suitable for + packaging.requirement.Requirement. + """ + return distribution(distribution_name).requires + + +__version__ = version(__name__) diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata/_compat.py b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata/_compat.py new file mode 100644 index 000000000000..6e663662d280 --- /dev/null +++ b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata/_compat.py @@ -0,0 +1,143 @@ +from __future__ import absolute_import + +import io +import abc +import sys +import email + + +if sys.version_info > (3,): # pragma: nocover + import builtins + from configparser import ConfigParser + from contextlib import suppress + FileNotFoundError = builtins.FileNotFoundError + IsADirectoryError = builtins.IsADirectoryError + NotADirectoryError = builtins.NotADirectoryError + PermissionError = builtins.PermissionError + map = builtins.map +else: # pragma: nocover + from backports.configparser import ConfigParser + from itertools import imap as map # type: ignore + from contextlib2 import suppress # noqa + FileNotFoundError = IOError, OSError + IsADirectoryError = IOError, OSError + NotADirectoryError = IOError, OSError + PermissionError = IOError, OSError + +if sys.version_info > (3, 5): # pragma: nocover + import pathlib +else: # pragma: nocover + import pathlib2 as pathlib + +try: + ModuleNotFoundError = builtins.FileNotFoundError +except (NameError, AttributeError): # pragma: nocover + ModuleNotFoundError = ImportError # type: ignore + + +if sys.version_info >= (3,): # pragma: nocover + from importlib.abc import MetaPathFinder +else: # pragma: nocover + class MetaPathFinder(object): + __metaclass__ = abc.ABCMeta + + +__metaclass__ = type +__all__ = [ + 'install', 'NullFinder', 'MetaPathFinder', 'ModuleNotFoundError', + 'pathlib', 'ConfigParser', 'map', 'suppress', 'FileNotFoundError', + 'NotADirectoryError', 'email_message_from_string', + ] + + +def install(cls): + """ + Class decorator for installation on sys.meta_path. + + Adds the backport DistributionFinder to sys.meta_path and + attempts to disable the finder functionality of the stdlib + DistributionFinder. + """ + sys.meta_path.append(cls()) + disable_stdlib_finder() + return cls + + +def disable_stdlib_finder(): + """ + Give the backport primacy for discovering path-based distributions + by monkey-patching the stdlib O_O. + + See #91 for more background for rationale on this sketchy + behavior. + """ + def matches(finder): + return ( + finder.__module__ == '_frozen_importlib_external' + and hasattr(finder, 'find_distributions') + ) + for finder in filter(matches, sys.meta_path): # pragma: nocover + del finder.find_distributions + + +class NullFinder: + """ + A "Finder" (aka "MetaClassFinder") that never finds any modules, + but may find distributions. + """ + @staticmethod + def find_spec(*args, **kwargs): + return None + + # In Python 2, the import system requires finders + # to have a find_module() method, but this usage + # is deprecated in Python 3 in favor of find_spec(). + # For the purposes of this finder (i.e. being present + # on sys.meta_path but having no other import + # system functionality), the two methods are identical. + find_module = find_spec + + +def py2_message_from_string(text): # nocoverpy3 + # Work around https://bugs.python.org/issue25545 where + # email.message_from_string cannot handle Unicode on Python 2. + io_buffer = io.StringIO(text) + return email.message_from_file(io_buffer) + + +email_message_from_string = ( + py2_message_from_string + if sys.version_info < (3,) else + email.message_from_string + ) + +# https://bitbucket.org/pypy/pypy/issues/3021/ioopen-directory-leaks-a-file-descriptor +PYPY_OPEN_BUG = getattr(sys, 'pypy_version_info', (9, 9, 9))[:3] <= (7, 1, 1) + + +def ensure_is_path(ob): + """Construct a Path from ob even if it's already one. + Specialized for Python 3.4. + """ + if (3,) < sys.version_info < (3, 5): + ob = str(ob) # pragma: nocover + return pathlib.Path(ob) + + +class PyPy_repr: + """ + Override repr for EntryPoint objects on PyPy to avoid __iter__ access. + Ref #97, #102. + """ + affected = hasattr(sys, 'pypy_version_info') + + def __compat_repr__(self): # pragma: nocover + def make_param(name): + value = getattr(self, name) + return '{name}={value!r}'.format(**locals()) + params = ', '.join(map(make_param, self._fields)) + return 'EntryPoint({params})'.format(**locals()) + + if affected: # pragma: nocover + __repr__ = __compat_repr__ + del affected diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata-3.1.1.dist-info/LICENSE b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata-3.1.1.dist-info/LICENSE new file mode 100644 index 000000000000..be7e092b0b05 --- /dev/null +++ b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata-3.1.1.dist-info/LICENSE @@ -0,0 +1,13 @@ +Copyright 2017-2019 Jason R. Coombs, Barry Warsaw + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_metadata-4.6.2-py3-none-any/importlib_metadata-4.6.2.dist-info/METADATA b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata-3.1.1.dist-info/METADATA similarity index 65% rename from third_party/python/virtualenv/__virtualenv__/importlib_metadata-4.6.2-py3-none-any/importlib_metadata-4.6.2.dist-info/METADATA rename to third_party/python/virtualenv/__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata-3.1.1.dist-info/METADATA index fdbfe1e66cf0..ce9f563a781a 100644 --- a/third_party/python/virtualenv/__virtualenv__/importlib_metadata-4.6.2-py3-none-any/importlib_metadata-4.6.2.dist-info/METADATA +++ b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata-3.1.1.dist-info/METADATA @@ -1,6 +1,6 @@ Metadata-Version: 2.1 Name: importlib-metadata -Version: 4.6.2 +Version: 3.1.1 Summary: Read metadata from Python packages Home-page: https://github.com/python/importlib_metadata Author: Jason R. Coombs @@ -13,28 +13,23 @@ Classifier: License :: OSI Approved :: Apache Software License Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3 :: Only Requires-Python: >=3.6 -License-File: LICENSE Requires-Dist: zipp (>=0.5) -Requires-Dist: typing-extensions (>=3.6.4) ; python_version < "3.8" Provides-Extra: docs Requires-Dist: sphinx ; extra == 'docs' -Requires-Dist: jaraco.packaging (>=8.2) ; extra == 'docs' +Requires-Dist: jaraco.packaging (>=3.2) ; extra == 'docs' Requires-Dist: rst.linker (>=1.9) ; extra == 'docs' -Provides-Extra: perf -Requires-Dist: ipython ; extra == 'perf' Provides-Extra: testing -Requires-Dist: pytest (>=4.6) ; extra == 'testing' -Requires-Dist: pytest-checkdocs (>=2.4) ; extra == 'testing' +Requires-Dist: pytest (!=3.7.3,>=3.5) ; extra == 'testing' +Requires-Dist: pytest-checkdocs (>=1.2.3) ; extra == 'testing' Requires-Dist: pytest-flake8 ; extra == 'testing' Requires-Dist: pytest-cov ; extra == 'testing' -Requires-Dist: pytest-enabler (>=1.0.1) ; extra == 'testing' +Requires-Dist: jaraco.test (>=3.2.0) ; extra == 'testing' Requires-Dist: packaging ; extra == 'testing' Requires-Dist: pep517 ; extra == 'testing' Requires-Dist: pyfakefs ; extra == 'testing' Requires-Dist: flufl.flake8 ; extra == 'testing' -Requires-Dist: pytest-perf (>=0.9.2) ; extra == 'testing' -Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy" and python_version < "3.10") and extra == 'testing' -Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy" and python_version < "3.10") and extra == 'testing' +Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing' +Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy") and extra == 'testing' Requires-Dist: importlib-resources (>=1.3) ; (python_version < "3.9") and extra == 'testing' .. image:: https://img.shields.io/pypi/v/importlib_metadata.svg @@ -45,9 +40,9 @@ Requires-Dist: importlib-resources (>=1.3) ; (python_version < "3.9") and extra .. _PyPI link: https://pypi.org/project/importlib_metadata -.. image:: https://github.com/python/importlib_metadata/workflows/tests/badge.svg - :target: https://github.com/python/importlib_metadata/actions?query=workflow%3A%22tests%22 - :alt: tests +.. image:: https://github.com/python/importlib_metadata/workflows/Automated%20Tests/badge.svg + :target: https://github.com/python/importlib_metadata/actions?query=workflow%3A%22Automated+Tests%22 + :alt: Automated Tests .. image:: https://img.shields.io/badge/code%20style-black-000000.svg :target: https://github.com/psf/black @@ -56,33 +51,15 @@ Requires-Dist: importlib-resources (>=1.3) ; (python_version < "3.9") and extra .. image:: https://readthedocs.org/projects/importlib-metadata/badge/?version=latest :target: https://importlib-metadata.readthedocs.io/en/latest/?badge=latest -.. image:: https://img.shields.io/badge/skeleton-2021-informational - :target: https://blog.jaraco.com/skeleton +``importlib_metadata`` is a library to access the metadata for a +Python package. -Library to access the metadata for a Python package. - -This package supplies third-party access to the functionality of -`importlib.metadata `_ -including improvements added to subsequent Python versions. - - -Compatibility -============= - -New features are introduced in this third-party library and later merged -into CPython. The following table indicates which versions of this library -were contributed to different versions in the standard library: - -.. list-table:: - :header-rows: 1 - - * - importlib_metadata - - stdlib - * - 4.4 - - 3.10 - * - 1.4 - - 3.8 +As of Python 3.8, this functionality has been added to the +`Python standard library +`_. +This package supplies backports of that functionality including +improvements added to subsequent Python versions. Usage diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata-3.1.1.dist-info/RECORD b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata-3.1.1.dist-info/RECORD new file mode 100644 index 000000000000..89bbf7e38660 --- /dev/null +++ b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata-3.1.1.dist-info/RECORD @@ -0,0 +1,7 @@ +importlib_metadata/__init__.py,sha256=QM4Oo096u6JYeokkDUwHgazI_h3o0w9tISPjHtVko_U,19266 +importlib_metadata/_compat.py,sha256=OS4joET_vaQClxhumw0NWYdS5N3FX1Ii895aZXLpQaA,2028 +importlib_metadata-3.1.1.dist-info/LICENSE,sha256=wNe6dAchmJ1VvVB8D9oTc-gHHadCuaSBAev36sYEM6U,571 +importlib_metadata-3.1.1.dist-info/METADATA,sha256=rdblRVlpAdjDcYkqWhn2yVNwrpBqpamdKvxrgA6EWE0,3442 +importlib_metadata-3.1.1.dist-info/WHEEL,sha256=gm79cMopkncyn0iSnI0vQNiDJ8t9on0H4_iz-CrpXMk,92 +importlib_metadata-3.1.1.dist-info/top_level.txt,sha256=CO3fD9yylANiXkrMo4qHLV_mqXL2sC5JFKgt1yWAT-A,19 +importlib_metadata-3.1.1.dist-info/RECORD,, diff --git a/third_party/python/taskcluster/taskcluster-6.0.0.dist-info/WHEEL b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata-3.1.1.dist-info/WHEEL similarity index 65% rename from third_party/python/taskcluster/taskcluster-6.0.0.dist-info/WHEEL rename to third_party/python/virtualenv/__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata-3.1.1.dist-info/WHEEL index 4eeaea1f7335..0863016bc236 100644 --- a/third_party/python/taskcluster/taskcluster-6.0.0.dist-info/WHEEL +++ b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata-3.1.1.dist-info/WHEEL @@ -1,5 +1,5 @@ Wheel-Version: 1.0 -Generator: bdist_wheel (0.32.3) +Generator: bdist_wheel (0.36.0) Root-Is-Purelib: true Tag: py3-none-any diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata-3.1.1.dist-info/top_level.txt b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata-3.1.1.dist-info/top_level.txt new file mode 100644 index 000000000000..bbb07547a19c --- /dev/null +++ b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata-3.1.1.dist-info/top_level.txt @@ -0,0 +1 @@ +importlib_metadata diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_metadata-4.6.2-py3-none-any/importlib_metadata/__init__.py b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata/__init__.py similarity index 52% rename from third_party/python/virtualenv/__virtualenv__/importlib_metadata-4.6.2-py3-none-any/importlib_metadata/__init__.py rename to third_party/python/virtualenv/__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata/__init__.py index 1705129d0aad..eec91953670c 100644 --- a/third_party/python/virtualenv/__virtualenv__/importlib_metadata-4.6.2-py3-none-any/importlib_metadata/__init__.py +++ b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata/__init__.py @@ -1,3 +1,4 @@ +import io import os import re import abc @@ -7,63 +8,44 @@ import zipp import email import pathlib import operator -import platform -import textwrap -import warnings import functools import itertools import posixpath import collections -from . import _adapters, _meta -from ._collections import FreezableDefaultDict, Pair from ._compat import ( NullFinder, PyPy_repr, install, ) -from ._functools import method_cache -from ._itertools import unique_everseen -from ._meta import PackageMetadata, SimplePath +from configparser import ConfigParser from contextlib import suppress from importlib import import_module from importlib.abc import MetaPathFinder from itertools import starmap -from typing import List, Mapping, Optional, Union __all__ = [ 'Distribution', 'DistributionFinder', - 'PackageMetadata', 'PackageNotFoundError', 'distribution', 'distributions', 'entry_points', 'files', 'metadata', - 'packages_distributions', 'requires', 'version', ] -def _pypy_partial(val): - """ - Adjust for variable stacklevel on partial under PyPy. - - Workaround for #327. - """ - is_pypy = platform.python_implementation() == 'PyPy' - return val + is_pypy - - class PackageNotFoundError(ModuleNotFoundError): """The package was not found.""" def __str__(self): - return f"No package metadata was found for {self.name}" + tmpl = "No package metadata was found for {self.name}" + return tmpl.format(**locals()) @property def name(self): @@ -71,71 +53,6 @@ class PackageNotFoundError(ModuleNotFoundError): return name -class Sectioned: - """ - A simple entry point config parser for performance - - >>> for item in Sectioned.read(Sectioned._sample): - ... print(item) - Pair(name='sec1', value='# comments ignored') - Pair(name='sec1', value='a = 1') - Pair(name='sec1', value='b = 2') - Pair(name='sec2', value='a = 2') - - >>> res = Sectioned.section_pairs(Sectioned._sample) - >>> item = next(res) - >>> item.name - 'sec1' - >>> item.value - Pair(name='a', value='1') - >>> item = next(res) - >>> item.value - Pair(name='b', value='2') - >>> item = next(res) - >>> item.name - 'sec2' - >>> item.value - Pair(name='a', value='2') - >>> list(res) - [] - """ - - _sample = textwrap.dedent( - """ - [sec1] - # comments ignored - a = 1 - b = 2 - - [sec2] - a = 2 - """ - ).lstrip() - - @classmethod - def section_pairs(cls, text): - return ( - section._replace(value=Pair.parse(section.value)) - for section in cls.read(text, filter_=cls.valid) - if section.name is not None - ) - - @staticmethod - def read(text, filter_=None): - lines = filter(filter_, map(str.strip, text.splitlines())) - name = None - for value in lines: - section_match = value.startswith('[') and value.endswith(']') - if section_match: - name = value.strip('[]') - continue - yield Pair(name, value) - - @staticmethod - def valid(line): - return line and not line.startswith('#') - - class EntryPoint( PyPy_repr, collections.namedtuple('EntryPointBase', 'name value group') ): @@ -167,8 +84,6 @@ class EntryPoint( following the attr, and following any extras. """ - dist: Optional['Distribution'] = None - def load(self): """Load the entry point from its definition. If only a module is indicated by the value, return that module. Otherwise, @@ -194,19 +109,30 @@ class EntryPoint( match = self.pattern.match(self.value) return list(re.finditer(r'\w+', match.group('extras') or '')) - def _for(self, dist): - self.dist = dist - return self + @classmethod + def _from_config(cls, config): + return [ + cls(name, value, group) + for group in config.sections() + for name, value in config.items(group) + ] + + @classmethod + def _from_text(cls, text): + config = ConfigParser(delimiters='=') + # case sensitive: https://stackoverflow.com/q/1611799/812183 + config.optionxform = str + try: + config.read_string(text) + except AttributeError: # pragma: nocover + # Python 2 has no read_string + config.readfp(io.StringIO(text)) + return EntryPoint._from_config(config) def __iter__(self): """ - Supply iter so one may construct dicts of EntryPoints by name. + Supply iter so one may construct dicts of EntryPoints easily. """ - msg = ( - "Construction of dict of EntryPoints is deprecated in " - "favor of EntryPoints." - ) - warnings.warn(msg, DeprecationWarning) return iter((self.name, self)) def __reduce__(self): @@ -215,263 +141,6 @@ class EntryPoint( (self.name, self.value, self.group), ) - def matches(self, **params): - attrs = (getattr(self, param) for param in params) - return all(map(operator.eq, params.values(), attrs)) - - -class DeprecatedList(list): - """ - Allow an otherwise immutable object to implement mutability - for compatibility. - - >>> recwarn = getfixture('recwarn') - >>> dl = DeprecatedList(range(3)) - >>> dl[0] = 1 - >>> dl.append(3) - >>> del dl[3] - >>> dl.reverse() - >>> dl.sort() - >>> dl.extend([4]) - >>> dl.pop(-1) - 4 - >>> dl.remove(1) - >>> dl += [5] - >>> dl + [6] - [1, 2, 5, 6] - >>> dl + (6,) - [1, 2, 5, 6] - >>> dl.insert(0, 0) - >>> dl - [0, 1, 2, 5] - >>> dl == [0, 1, 2, 5] - True - >>> dl == (0, 1, 2, 5) - True - >>> len(recwarn) - 1 - """ - - _warn = functools.partial( - warnings.warn, - "EntryPoints list interface is deprecated. Cast to list if needed.", - DeprecationWarning, - stacklevel=_pypy_partial(2), - ) - - def __setitem__(self, *args, **kwargs): - self._warn() - return super().__setitem__(*args, **kwargs) - - def __delitem__(self, *args, **kwargs): - self._warn() - return super().__delitem__(*args, **kwargs) - - def append(self, *args, **kwargs): - self._warn() - return super().append(*args, **kwargs) - - def reverse(self, *args, **kwargs): - self._warn() - return super().reverse(*args, **kwargs) - - def extend(self, *args, **kwargs): - self._warn() - return super().extend(*args, **kwargs) - - def pop(self, *args, **kwargs): - self._warn() - return super().pop(*args, **kwargs) - - def remove(self, *args, **kwargs): - self._warn() - return super().remove(*args, **kwargs) - - def __iadd__(self, *args, **kwargs): - self._warn() - return super().__iadd__(*args, **kwargs) - - def __add__(self, other): - if not isinstance(other, tuple): - self._warn() - other = tuple(other) - return self.__class__(tuple(self) + other) - - def insert(self, *args, **kwargs): - self._warn() - return super().insert(*args, **kwargs) - - def sort(self, *args, **kwargs): - self._warn() - return super().sort(*args, **kwargs) - - def __eq__(self, other): - if not isinstance(other, tuple): - self._warn() - other = tuple(other) - - return tuple(self).__eq__(other) - - -class EntryPoints(DeprecatedList): - """ - An immutable collection of selectable EntryPoint objects. - """ - - __slots__ = () - - def __getitem__(self, name): # -> EntryPoint: - """ - Get the EntryPoint in self matching name. - """ - if isinstance(name, int): - warnings.warn( - "Accessing entry points by index is deprecated. " - "Cast to tuple if needed.", - DeprecationWarning, - stacklevel=2, - ) - return super().__getitem__(name) - try: - return next(iter(self.select(name=name))) - except StopIteration: - raise KeyError(name) - - def select(self, **params): - """ - Select entry points from self that match the - given parameters (typically group and/or name). - """ - return EntryPoints(ep for ep in self if ep.matches(**params)) - - @property - def names(self): - """ - Return the set of all names of all entry points. - """ - return set(ep.name for ep in self) - - @property - def groups(self): - """ - Return the set of all groups of all entry points. - - For coverage while SelectableGroups is present. - >>> EntryPoints().groups - set() - """ - return set(ep.group for ep in self) - - @classmethod - def _from_text_for(cls, text, dist): - return cls(ep._for(dist) for ep in cls._from_text(text)) - - @classmethod - def _from_text(cls, text): - return itertools.starmap(EntryPoint, cls._parse_groups(text or '')) - - @staticmethod - def _parse_groups(text): - return ( - (item.value.name, item.value.value, item.name) - for item in Sectioned.section_pairs(text) - ) - - -class Deprecated: - """ - Compatibility add-in for mapping to indicate that - mapping behavior is deprecated. - - >>> recwarn = getfixture('recwarn') - >>> class DeprecatedDict(Deprecated, dict): pass - >>> dd = DeprecatedDict(foo='bar') - >>> dd.get('baz', None) - >>> dd['foo'] - 'bar' - >>> list(dd) - ['foo'] - >>> list(dd.keys()) - ['foo'] - >>> 'foo' in dd - True - >>> list(dd.values()) - ['bar'] - >>> len(recwarn) - 1 - """ - - _warn = functools.partial( - warnings.warn, - "SelectableGroups dict interface is deprecated. Use select.", - DeprecationWarning, - stacklevel=_pypy_partial(2), - ) - - def __getitem__(self, name): - self._warn() - return super().__getitem__(name) - - def get(self, name, default=None): - self._warn() - return super().get(name, default) - - def __iter__(self): - self._warn() - return super().__iter__() - - def __contains__(self, *args): - self._warn() - return super().__contains__(*args) - - def keys(self): - self._warn() - return super().keys() - - def values(self): - self._warn() - return super().values() - - -class SelectableGroups(Deprecated, dict): - """ - A backward- and forward-compatible result from - entry_points that fully implements the dict interface. - """ - - @classmethod - def load(cls, eps): - by_group = operator.attrgetter('group') - ordered = sorted(eps, key=by_group) - grouped = itertools.groupby(ordered, by_group) - return cls((group, EntryPoints(eps)) for group, eps in grouped) - - @property - def _all(self): - """ - Reconstruct a list of all entrypoints from the groups. - """ - groups = super(Deprecated, self).values() - return EntryPoints(itertools.chain.from_iterable(groups)) - - @property - def groups(self): - return self._all.groups - - @property - def names(self): - """ - for coverage: - >>> SelectableGroups().names - set() - """ - return self._all.names - - def select(self, **params): - if not params: - return self - return self._all.select(**params) - class PackagePath(pathlib.PurePosixPath): """A reference to a path in a package""" @@ -494,7 +163,7 @@ class FileHash: self.mode, _, self.value = spec.partition('=') def __repr__(self): - return f'' + return ''.format(self.mode, self.value) class Distribution: @@ -581,7 +250,7 @@ class Distribution: return PathDistribution(zipp.Path(meta.build_as_zip(builder))) @property - def metadata(self) -> _meta.PackageMetadata: + def metadata(self): """Return the parsed metadata for this Distribution. The returned object will have keys that name the various bits of @@ -595,17 +264,7 @@ class Distribution: # (which points to the egg-info file) attribute unchanged. or self.read_text('') ) - return _adapters.Message(email.message_from_string(text)) - - @property - def name(self): - """Return the 'Name' metadata for the distribution package.""" - return self.metadata['Name'] - - @property - def _normalized_name(self): - """Return a normalized version of the name.""" - return Prepared.normalize(self.name) + return email.message_from_string(text) @property def version(self): @@ -614,7 +273,7 @@ class Distribution: @property def entry_points(self): - return EntryPoints._from_text_for(self.read_text('entry_points.txt'), self) + return EntryPoint._from_text(self.read_text('entry_points.txt')) @property def files(self): @@ -668,7 +327,24 @@ class Distribution: @classmethod def _deps_from_requires_text(cls, source): - return cls._convert_egg_info_reqs_to_simple_reqs(Sectioned.read(source)) + section_pairs = cls._read_sections(source.splitlines()) + sections = { + section: list(map(operator.itemgetter('line'), results)) + for section, results in itertools.groupby( + section_pairs, operator.itemgetter('section') + ) + } + return cls._convert_egg_info_reqs_to_simple_reqs(sections) + + @staticmethod + def _read_sections(lines): + section = None + for line in filter(None, lines): + section_match = re.match(r'\[(.*)\]$', line) + if section_match: + section = section_match.group(1) + continue + yield locals() @staticmethod def _convert_egg_info_reqs_to_simple_reqs(sections): @@ -683,18 +359,19 @@ class Distribution: """ def make_condition(name): - return name and f'extra == "{name}"' + return name and 'extra == "{name}"'.format(name=name) def parse_condition(section): section = section or '' extra, sep, markers = section.partition(':') if extra and markers: - markers = f'({markers})' + markers = '({markers})'.format(markers=markers) conditions = list(filter(None, [markers, make_condition(extra)])) return '; ' + ' and '.join(conditions) if conditions else '' - for section in sections: - yield section.value + parse_condition(section.name) + for section, deps in sections.items(): + for dep in deps: + yield dep + parse_condition(section) class DistributionFinder(MetaPathFinder): @@ -726,11 +403,10 @@ class DistributionFinder(MetaPathFinder): @property def path(self): """ - The sequence of directory path that a distribution finder - should search. + The path that a distribution finder should search. - Typically refers to Python installed package paths such as - "site-packages" directories and defaults to ``sys.path``. + Typically refers to Python package paths and defaults + to ``sys.path``. """ return vars(self).get('path', sys.path) @@ -751,12 +427,9 @@ class FastPath: children. """ - @functools.lru_cache() # type: ignore - def __new__(cls, root): - return super().__new__(cls) - def __init__(self, root): self.root = str(root) + self.base = os.path.basename(self.root).lower() def joinpath(self, child): return pathlib.Path(self.root, child) @@ -776,53 +449,11 @@ class FastPath: return dict.fromkeys(child.split(posixpath.sep, 1)[0] for child in names) def search(self, name): - return self.lookup(self.mtime).search(name) - - @property - def mtime(self): - with suppress(OSError): - return os.stat(self.root).st_mtime - self.lookup.cache_clear() - - @method_cache - def lookup(self, mtime): - return Lookup(self) - - -class Lookup: - def __init__(self, path: FastPath): - base = os.path.basename(path.root).lower() - base_is_egg = base.endswith(".egg") - self.infos = FreezableDefaultDict(list) - self.eggs = FreezableDefaultDict(list) - - for child in path.children(): - low = child.lower() - if low.endswith((".dist-info", ".egg-info")): - # rpartition is faster than splitext and suitable for this purpose. - name = low.rpartition(".")[0].partition("-")[0] - normalized = Prepared.normalize(name) - self.infos[normalized].append(path.joinpath(child)) - elif base_is_egg and low == "egg-info": - name = base.rpartition(".")[0].partition("-")[0] - legacy_normalized = Prepared.legacy_normalize(name) - self.eggs[legacy_normalized].append(path.joinpath(child)) - - self.infos.freeze() - self.eggs.freeze() - - def search(self, prepared): - infos = ( - self.infos[prepared.normalized] - if prepared - else itertools.chain.from_iterable(self.infos.values()) + return ( + self.joinpath(child) + for child in self.children() + if name.matches(child, self.base) ) - eggs = ( - self.eggs[prepared.legacy_normalized] - if prepared - else itertools.chain.from_iterable(self.eggs.values()) - ) - return itertools.chain(infos, eggs) class Prepared: @@ -831,14 +462,15 @@ class Prepared: """ normalized = None - legacy_normalized = None + suffixes = '.dist-info', '.egg-info' + exact_matches = [''][:0] def __init__(self, name): self.name = name if name is None: return self.normalized = self.normalize(name) - self.legacy_normalized = self.legacy_normalize(name) + self.exact_matches = [self.normalized + suffix for suffix in self.suffixes] @staticmethod def normalize(name): @@ -855,8 +487,28 @@ class Prepared: """ return name.lower().replace('-', '_') - def __bool__(self): - return bool(self.name) + def matches(self, cand, base): + low = cand.lower() + pre, ext = os.path.splitext(low) + name, sep, rest = pre.partition('-') + return ( + low in self.exact_matches + or ext in self.suffixes + and (not self.normalized or name.replace('.', '_') == self.normalized) + # legacy case: + or self.is_egg(base) + and low == 'egg-info' + ) + + def is_egg(self, base): + normalized = self.legacy_normalize(self.name or '') + prefix = normalized + '-' if normalized else '' + versionless_egg_name = normalized + '.egg' if self.name else '' + return ( + base == versionless_egg_name + or base.startswith(prefix) + and base.endswith('.egg') + ) @install @@ -882,20 +534,17 @@ class MetadataPathFinder(NullFinder, DistributionFinder): @classmethod def _search_paths(cls, name, paths): """Find metadata directories in paths heuristically.""" - prepared = Prepared(name) return itertools.chain.from_iterable( - path.search(prepared) for path in map(FastPath, paths) + path.search(Prepared(name)) for path in map(FastPath, paths) ) - def invalidate_caches(cls): - FastPath.__new__.cache_clear() - class PathDistribution(Distribution): - def __init__(self, path: SimplePath): - """Construct a distribution. + def __init__(self, path): + """Construct a distribution from a path to the metadata directory. - :param path: SimplePath indicating the metadata directory. + :param path: A pathlib.Path or similar object supporting + .joinpath(), __div__, .parent, and .read_text(). """ self._path = path @@ -914,22 +563,6 @@ class PathDistribution(Distribution): def locate_file(self, path): return self._path.parent / path - @property - def _normalized_name(self): - """ - Performance optimization: where possible, resolve the - normalized name from the file system path. - """ - stem = os.path.basename(str(self._path)) - return self._name_from_stem(stem) or super()._normalized_name - - def _name_from_stem(self, stem): - name, ext = os.path.splitext(stem) - if ext not in ('.dist-info', '.egg-info'): - return - name, sep, rest = stem.partition('-') - return name - def distribution(distribution_name): """Get the ``Distribution`` instance for the named package. @@ -948,11 +581,11 @@ def distributions(**kwargs): return Distribution.discover(**kwargs) -def metadata(distribution_name) -> _meta.PackageMetadata: +def metadata(distribution_name): """Get the metadata for the named package. :param distribution_name: The name of the distribution package to query. - :return: A PackageMetadata containing the parsed metadata. + :return: An email.Message containing the parsed metadata. """ return Distribution.from_name(distribution_name).metadata @@ -967,29 +600,16 @@ def version(distribution_name): return distribution(distribution_name).version -def entry_points(**params) -> Union[EntryPoints, SelectableGroups]: +def entry_points(): """Return EntryPoint objects for all installed packages. - Pass selection parameters (group or name) to filter the - result to entry points matching those properties (see - EntryPoints.select()). - - For compatibility, returns ``SelectableGroups`` object unless - selection parameters are supplied. In the future, this function - will return ``EntryPoints`` instead of ``SelectableGroups`` - even when no selection parameters are supplied. - - For maximum future compatibility, pass selection parameters - or invoke ``.select`` with parameters on the result. - - :return: EntryPoints or SelectableGroups for all installed packages. + :return: EntryPoint objects for all installed packages. """ - norm_name = operator.attrgetter('_normalized_name') - unique = functools.partial(unique_everseen, key=norm_name) - eps = itertools.chain.from_iterable( - dist.entry_points for dist in unique(distributions()) - ) - return SelectableGroups.load(eps).select(**params) + eps = itertools.chain.from_iterable(dist.entry_points for dist in distributions()) + by_group = operator.attrgetter('group') + ordered = sorted(eps, key=by_group) + grouped = itertools.groupby(ordered, by_group) + return {group: tuple(eps) for group, eps in grouped} def files(distribution_name): @@ -1006,23 +626,6 @@ def requires(distribution_name): Return a list of requirements for the named package. :return: An iterator of requirements, suitable for - packaging.requirement.Requirement. + packaging.requirement.Requirement. """ return distribution(distribution_name).requires - - -def packages_distributions() -> Mapping[str, List[str]]: - """ - Return a mapping of top-level packages to their - distributions. - - >>> import collections.abc - >>> pkgs = packages_distributions() - >>> all(isinstance(dist, collections.abc.Sequence) for dist in pkgs.values()) - True - """ - pkg_to_dist = collections.defaultdict(list) - for dist in distributions(): - for pkg in (dist.read_text('top_level.txt') or '').split(): - pkg_to_dist[pkg].append(dist.metadata['Name']) - return dict(pkg_to_dist) diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_metadata-4.6.2-py3-none-any/importlib_metadata/_compat.py b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata/_compat.py similarity index 79% rename from third_party/python/virtualenv/__virtualenv__/importlib_metadata-4.6.2-py3-none-any/importlib_metadata/_compat.py rename to third_party/python/virtualenv/__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata/_compat.py index 043ece028e81..c1362d53604d 100644 --- a/third_party/python/virtualenv/__virtualenv__/importlib_metadata-4.6.2-py3-none-any/importlib_metadata/_compat.py +++ b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata/_compat.py @@ -1,18 +1,7 @@ import sys -__all__ = ['install', 'NullFinder', 'PyPy_repr', 'Protocol'] - - -try: - from typing import Protocol -except ImportError: # pragma: no cover - """ - pytest-mypy complains here because: - error: Incompatible import of "Protocol" (imported name has type - "typing_extensions._SpecialForm", local name has type "typing._SpecialForm") - """ - from typing_extensions import Protocol # type: ignore +__all__ = ['install', 'NullFinder', 'PyPy_repr'] def install(cls): @@ -76,10 +65,10 @@ class PyPy_repr: def __compat_repr__(self): # pragma: nocover def make_param(name): value = getattr(self, name) - return f'{name}={value!r}' + return '{name}={value!r}'.format(**locals()) params = ', '.join(map(make_param, self._fields)) - return f'EntryPoint({params})' + return 'EntryPoint({params})'.format(**locals()) if affected: # pragma: nocover __repr__ = __compat_repr__ diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_metadata-4.6.2-py3-none-any/importlib_metadata-4.6.2.dist-info/RECORD b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-4.6.2-py3-none-any/importlib_metadata-4.6.2.dist-info/RECORD deleted file mode 100644 index f5ee8ae54134..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/importlib_metadata-4.6.2-py3-none-any/importlib_metadata-4.6.2.dist-info/RECORD +++ /dev/null @@ -1,14 +0,0 @@ -importlib_metadata/__init__.py,sha256=a6QwI2vH_UaSy6xt2jNWk5t8_R8ZneqAICL4hgIgTVw,29597 -importlib_metadata/_adapters.py,sha256=B6fCi5-8mLVDFUZj3krI5nAo-mKp1dH_qIavyIyFrJs,1862 -importlib_metadata/_collections.py,sha256=CJ0OTCHIjWA0ZIVS4voORAsn2R4R2cQBEtPsZEJpASY,743 -importlib_metadata/_compat.py,sha256=2wonzhS4HQ0rxgJ3GNQxYGFOv0kOXfriY8BlTUo9_Ao,2348 -importlib_metadata/_functools.py,sha256=mJCTrU4JMrCndpDoqN3zbI6P9tAKU116LopceWedi1c,2501 -importlib_metadata/_itertools.py,sha256=5TUj_APJHq3pvjn04hnP2oYBebP2No7HmNH_hkOGwLQ,607 -importlib_metadata/_meta.py,sha256=ga8kgPmxAbf6HyJWOJzTD0uMkDo_fXwNPuwdW1HevpQ,1150 -importlib_metadata/_text.py,sha256=8OAQl69ZcDQ-4cTx-RVvHw9DZh6dv3KsKJM22bfDKfk,2198 -importlib_metadata/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -importlib_metadata-4.6.2.dist-info/LICENSE,sha256=wNe6dAchmJ1VvVB8D9oTc-gHHadCuaSBAev36sYEM6U,571 -importlib_metadata-4.6.2.dist-info/METADATA,sha256=Xx4Cj_QH55hCsEXfdPzkkt9J9GtNaSFNcj4OMw8dlTY,4066 -importlib_metadata-4.6.2.dist-info/WHEEL,sha256=OqRkF0eY5GHssMorFjlbTIq072vpHpF60fIQA6lS9xA,92 -importlib_metadata-4.6.2.dist-info/top_level.txt,sha256=CO3fD9yylANiXkrMo4qHLV_mqXL2sC5JFKgt1yWAT-A,19 -importlib_metadata-4.6.2.dist-info/RECORD,, diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_metadata-4.6.2-py3-none-any/importlib_metadata/_adapters.py b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-4.6.2-py3-none-any/importlib_metadata/_adapters.py deleted file mode 100644 index aa460d3eda50..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/importlib_metadata-4.6.2-py3-none-any/importlib_metadata/_adapters.py +++ /dev/null @@ -1,68 +0,0 @@ -import re -import textwrap -import email.message - -from ._text import FoldedCase - - -class Message(email.message.Message): - multiple_use_keys = set( - map( - FoldedCase, - [ - 'Classifier', - 'Obsoletes-Dist', - 'Platform', - 'Project-URL', - 'Provides-Dist', - 'Provides-Extra', - 'Requires-Dist', - 'Requires-External', - 'Supported-Platform', - 'Dynamic', - ], - ) - ) - """ - Keys that may be indicated multiple times per PEP 566. - """ - - def __new__(cls, orig: email.message.Message): - res = super().__new__(cls) - vars(res).update(vars(orig)) - return res - - def __init__(self, *args, **kwargs): - self._headers = self._repair_headers() - - # suppress spurious error from mypy - def __iter__(self): - return super().__iter__() - - def _repair_headers(self): - def redent(value): - "Correct for RFC822 indentation" - if not value or '\n' not in value: - return value - return textwrap.dedent(' ' * 8 + value) - - headers = [(key, redent(value)) for key, value in vars(self)['_headers']] - if self._payload: - headers.append(('Description', self.get_payload())) - return headers - - @property - def json(self): - """ - Convert PackageMetadata to a JSON-compatible format - per PEP 0566. - """ - - def transform(key): - value = self.get_all(key) if key in self.multiple_use_keys else self[key] - if key == 'Keywords': - value = re.split(r'\s+', value) - tk = key.lower().replace('-', '_') - return tk, value - - return dict(map(transform, map(FoldedCase, self))) diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_metadata-4.6.2-py3-none-any/importlib_metadata/_collections.py b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-4.6.2-py3-none-any/importlib_metadata/_collections.py deleted file mode 100644 index cf0954e1a305..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/importlib_metadata-4.6.2-py3-none-any/importlib_metadata/_collections.py +++ /dev/null @@ -1,30 +0,0 @@ -import collections - - -# from jaraco.collections 3.3 -class FreezableDefaultDict(collections.defaultdict): - """ - Often it is desirable to prevent the mutation of - a default dict after its initial construction, such - as to prevent mutation during iteration. - - >>> dd = FreezableDefaultDict(list) - >>> dd[0].append('1') - >>> dd.freeze() - >>> dd[1] - [] - >>> len(dd) - 1 - """ - - def __missing__(self, key): - return getattr(self, '_frozen', super().__missing__)(key) - - def freeze(self): - self._frozen = lambda key: self.default_factory() - - -class Pair(collections.namedtuple('Pair', 'name value')): - @classmethod - def parse(cls, text): - return cls(*map(str.strip, text.split("=", 1))) diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_metadata-4.6.2-py3-none-any/importlib_metadata/_functools.py b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-4.6.2-py3-none-any/importlib_metadata/_functools.py deleted file mode 100644 index 73f50d00bc04..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/importlib_metadata-4.6.2-py3-none-any/importlib_metadata/_functools.py +++ /dev/null @@ -1,85 +0,0 @@ -import types -import functools - - -# from jaraco.functools 3.3 -def method_cache(method, cache_wrapper=None): - """ - Wrap lru_cache to support storing the cache data in the object instances. - - Abstracts the common paradigm where the method explicitly saves an - underscore-prefixed protected property on first call and returns that - subsequently. - - >>> class MyClass: - ... calls = 0 - ... - ... @method_cache - ... def method(self, value): - ... self.calls += 1 - ... return value - - >>> a = MyClass() - >>> a.method(3) - 3 - >>> for x in range(75): - ... res = a.method(x) - >>> a.calls - 75 - - Note that the apparent behavior will be exactly like that of lru_cache - except that the cache is stored on each instance, so values in one - instance will not flush values from another, and when an instance is - deleted, so are the cached values for that instance. - - >>> b = MyClass() - >>> for x in range(35): - ... res = b.method(x) - >>> b.calls - 35 - >>> a.method(0) - 0 - >>> a.calls - 75 - - Note that if method had been decorated with ``functools.lru_cache()``, - a.calls would have been 76 (due to the cached value of 0 having been - flushed by the 'b' instance). - - Clear the cache with ``.cache_clear()`` - - >>> a.method.cache_clear() - - Same for a method that hasn't yet been called. - - >>> c = MyClass() - >>> c.method.cache_clear() - - Another cache wrapper may be supplied: - - >>> cache = functools.lru_cache(maxsize=2) - >>> MyClass.method2 = method_cache(lambda self: 3, cache_wrapper=cache) - >>> a = MyClass() - >>> a.method2() - 3 - - Caution - do not subsequently wrap the method with another decorator, such - as ``@property``, which changes the semantics of the function. - - See also - http://code.activestate.com/recipes/577452-a-memoize-decorator-for-instance-methods/ - for another implementation and additional justification. - """ - cache_wrapper = cache_wrapper or functools.lru_cache() - - def wrapper(self, *args, **kwargs): - # it's the first call, replace the method with a cached, bound method - bound_method = types.MethodType(method, self) - cached_method = cache_wrapper(bound_method) - setattr(self, method.__name__, cached_method) - return cached_method(*args, **kwargs) - - # Support cache clear even before cache has been created. - wrapper.cache_clear = lambda: None - - return wrapper diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_metadata-4.6.2-py3-none-any/importlib_metadata/_itertools.py b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-4.6.2-py3-none-any/importlib_metadata/_itertools.py deleted file mode 100644 index dd45f2f09663..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/importlib_metadata-4.6.2-py3-none-any/importlib_metadata/_itertools.py +++ /dev/null @@ -1,19 +0,0 @@ -from itertools import filterfalse - - -def unique_everseen(iterable, key=None): - "List unique elements, preserving order. Remember all elements ever seen." - # unique_everseen('AAAABBBCCDAABBB') --> A B C D - # unique_everseen('ABBCcAD', str.lower) --> A B C D - seen = set() - seen_add = seen.add - if key is None: - for element in filterfalse(seen.__contains__, iterable): - seen_add(element) - yield element - else: - for element in iterable: - k = key(element) - if k not in seen: - seen_add(k) - yield element diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_metadata-4.6.2-py3-none-any/importlib_metadata/_meta.py b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-4.6.2-py3-none-any/importlib_metadata/_meta.py deleted file mode 100644 index dd68c429703c..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/importlib_metadata-4.6.2-py3-none-any/importlib_metadata/_meta.py +++ /dev/null @@ -1,48 +0,0 @@ -from ._compat import Protocol -from typing import Any, Dict, Iterator, List, TypeVar, Union - - -_T = TypeVar("_T") - - -class PackageMetadata(Protocol): - def __len__(self) -> int: - ... # pragma: no cover - - def __contains__(self, item: str) -> bool: - ... # pragma: no cover - - def __getitem__(self, key: str) -> str: - ... # pragma: no cover - - def __iter__(self) -> Iterator[str]: - ... # pragma: no cover - - def get_all(self, name: str, failobj: _T = ...) -> Union[List[Any], _T]: - """ - Return all values associated with a possibly multi-valued key. - """ - - @property - def json(self) -> Dict[str, Union[str, List[str]]]: - """ - A JSON-compatible form of the metadata. - """ - - -class SimplePath(Protocol): - """ - A minimal subset of pathlib.Path required by PathDistribution. - """ - - def joinpath(self) -> 'SimplePath': - ... # pragma: no cover - - def __div__(self) -> 'SimplePath': - ... # pragma: no cover - - def parent(self) -> 'SimplePath': - ... # pragma: no cover - - def read_text(self) -> str: - ... # pragma: no cover diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_metadata-4.6.2-py3-none-any/importlib_metadata/_text.py b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-4.6.2-py3-none-any/importlib_metadata/_text.py deleted file mode 100644 index 766979d93c16..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/importlib_metadata-4.6.2-py3-none-any/importlib_metadata/_text.py +++ /dev/null @@ -1,99 +0,0 @@ -import re - -from ._functools import method_cache - - -# from jaraco.text 3.5 -class FoldedCase(str): - """ - A case insensitive string class; behaves just like str - except compares equal when the only variation is case. - - >>> s = FoldedCase('hello world') - - >>> s == 'Hello World' - True - - >>> 'Hello World' == s - True - - >>> s != 'Hello World' - False - - >>> s.index('O') - 4 - - >>> s.split('O') - ['hell', ' w', 'rld'] - - >>> sorted(map(FoldedCase, ['GAMMA', 'alpha', 'Beta'])) - ['alpha', 'Beta', 'GAMMA'] - - Sequence membership is straightforward. - - >>> "Hello World" in [s] - True - >>> s in ["Hello World"] - True - - You may test for set inclusion, but candidate and elements - must both be folded. - - >>> FoldedCase("Hello World") in {s} - True - >>> s in {FoldedCase("Hello World")} - True - - String inclusion works as long as the FoldedCase object - is on the right. - - >>> "hello" in FoldedCase("Hello World") - True - - But not if the FoldedCase object is on the left: - - >>> FoldedCase('hello') in 'Hello World' - False - - In that case, use in_: - - >>> FoldedCase('hello').in_('Hello World') - True - - >>> FoldedCase('hello') > FoldedCase('Hello') - False - """ - - def __lt__(self, other): - return self.lower() < other.lower() - - def __gt__(self, other): - return self.lower() > other.lower() - - def __eq__(self, other): - return self.lower() == other.lower() - - def __ne__(self, other): - return self.lower() != other.lower() - - def __hash__(self): - return hash(self.lower()) - - def __contains__(self, other): - return super(FoldedCase, self).lower().__contains__(other.lower()) - - def in_(self, other): - "Does self appear in other?" - return self in FoldedCase(other) - - # cache lower since it's likely to be called frequently. - @method_cache - def lower(self): - return super(FoldedCase, self).lower() - - def index(self, sub): - return self.lower().index(sub.lower()) - - def split(self, splitter=' ', maxsplit=0): - pattern = re.compile(re.escape(splitter), re.I) - return pattern.split(self, maxsplit) diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources-5.2.2.dist-info/LICENSE b/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources-1.0.2.dist-info/LICENSE similarity index 91% rename from third_party/python/virtualenv/__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources-5.2.2.dist-info/LICENSE rename to third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources-1.0.2.dist-info/LICENSE index 378b991a4d94..7e4791068d58 100644 --- a/third_party/python/virtualenv/__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources-5.2.2.dist-info/LICENSE +++ b/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources-1.0.2.dist-info/LICENSE @@ -1,4 +1,4 @@ -Copyright 2017-2019 Brett Cannon, Barry Warsaw +Copyright 2017-2018 Brett Cannon, Barry Warsaw Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources-1.0.2.dist-info/METADATA b/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources-1.0.2.dist-info/METADATA new file mode 100644 index 000000000000..8eb23366fa38 --- /dev/null +++ b/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources-1.0.2.dist-info/METADATA @@ -0,0 +1,49 @@ +Metadata-Version: 2.1 +Name: importlib-resources +Version: 1.0.2 +Summary: Read resources from Python packages +Home-page: http://importlib-resources.readthedocs.io/ +Author: Barry Warsaw +Author-email: barry@python.org +License: Apache Software License +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Topic :: Software Development :: Libraries +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Requires-Python: >=2.7,!=3.0,!=3.1,!=3.2,!=3.3 +Requires-Dist: pathlib2; python_version < "3" +Requires-Dist: typing; python_version < "3.5" + +========================= + ``importlib_resources`` +========================= + +``importlib_resources`` is a backport of Python 3.7's standard library +`importlib.resources +`_ +module for Python 2.7, and 3.4 through 3.6. Users of Python 3.7 and beyond +should use the standard library module, since for these versions, +``importlib_resources`` just delegates to that module. + +The key goal of this module is to replace parts of `pkg_resources +`_ with a +solution in Python's stdlib that relies on well-defined APIs. This makes +reading resources included in packages easier, with more stable and consistent +semantics. + +Note that ``pip 10`` is required if you are going to ``pip install +importlib_resources``. + + +Project details +=============== + + * Project home: https://gitlab.com/python-devs/importlib_resources + * Report bugs at: https://gitlab.com/python-devs/importlib_resources/issues + * Code hosting: https://gitlab.com/python-devs/importlib_resources.git + * Documentation: http://importlib_resources.readthedocs.io/ + + diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources-1.0.2.dist-info/RECORD b/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources-1.0.2.dist-info/RECORD new file mode 100644 index 000000000000..b728f0e05a8d --- /dev/null +++ b/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources-1.0.2.dist-info/RECORD @@ -0,0 +1,39 @@ +importlib_resources/__init__.py,sha256=rzQAetwEwMAwz3RonnegirJDoyexftQNVgVAtPqs91k,1064 +importlib_resources/_compat.py,sha256=ldJ5ebXghEZdDKuFIbigsNuSp4VrBi8a6XAG2tXtebc,581 +importlib_resources/_py2.py,sha256=EypZLeKb03aScgvtpzJxcr-E6CjL8DJLWTls8ql3QVY,11601 +importlib_resources/_py3.py,sha256=hUfpyjcsu13D57VyJKSRzYnA4RTp7FaQYwPous33yEk,12882 +importlib_resources/abc.py,sha256=U9Q4qZImO0rpCF9aoV1a5tS1IrXDhrAoT5PUFReSZs0,1946 +importlib_resources/version.txt,sha256=n9KGQtOsoZHlx_wjg8_W-rsqrIdD8Cnau4mJrFhOMbw,6 +importlib_resources/docs/changelog.rst,sha256=uWSJrcIlTNTj2tGRpGLzeaz9eLcM3pu_6yVqnQH_F94,2020 +importlib_resources/docs/conf.py,sha256=x7IPypqIitt3ztWBP4KKAxDHMfDI6eEVSD1K-fs000w,5557 +importlib_resources/docs/index.rst,sha256=ZgWQVxUPNyYZYUS5pRZXboxfc1-S0z8NBhcCQz0_YTQ,2138 +importlib_resources/docs/migration.rst,sha256=RdJE8S_bh50d6-63UrjrKuojcfYxv2gx3qcyHSy42DA,6329 +importlib_resources/docs/using.rst,sha256=epgk0GWhEwKGWCzL3DvU3GnGalp1jwxiU-XZL5eaC5w,8586 +importlib_resources/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_resources/tests/test_open.py,sha256=yDXmTGXQspByj6WU0prnoVwab1yWWEA3fwz_XIx7TQU,2288 +importlib_resources/tests/test_path.py,sha256=yVYMwuECJiivtubCGnYA0-6e-LSpbnTKjcBHuKk-oMc,1178 +importlib_resources/tests/test_read.py,sha256=DpA7tzxSQlU0_YQuWibB3E5PDL9fQUdzeKoEUGnAx78,2046 +importlib_resources/tests/test_resource.py,sha256=X77DzU2BRoM6d59iEh74zDHHw3pKOBGLCg3lP3dH4BI,6467 +importlib_resources/tests/util.py,sha256=f0RZU-RkEkybJjXRd7C5HcWMsoLFRWJL4FIUF1CJ2wo,6980 +importlib_resources/tests/data01/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_resources/tests/data01/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4 +importlib_resources/tests/data01/utf-16.file,sha256=t5q9qhxX0rYqItBOM8D3ylwG-RHrnOYteTLtQr6sF7g,44 +importlib_resources/tests/data01/utf-8.file,sha256=kwWgYG4yQ-ZF2X_WA66EjYPmxJRn-w8aSOiS9e8tKYY,20 +importlib_resources/tests/data01/subdirectory/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_resources/tests/data01/subdirectory/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4 +importlib_resources/tests/data02/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_resources/tests/data02/one/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_resources/tests/data02/one/resource1.txt,sha256=10flKac7c-XXFzJ3t-AB5MJjlBy__dSZvPE_dOm2q6U,13 +importlib_resources/tests/data02/two/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_resources/tests/data02/two/resource2.txt,sha256=lt2jbN3TMn9QiFKM832X39bU_62UptDdUkoYzkvEbl0,13 +importlib_resources/tests/data03/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_resources/tests/data03/namespace/resource1.txt,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_resources/tests/zipdata01/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_resources/tests/zipdata01/ziptestdata.zip,sha256=gAC1vleFnNtdAHuNyYQ30gvIZ5itNRfZtaF0hxGHAi4,876 +importlib_resources/tests/zipdata02/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_resources/tests/zipdata02/ziptestdata.zip,sha256=kL-RiB7ndv8FHBCJde6oj34_C90gtrSCYgYk98osm6M,698 +importlib_resources-1.0.2.dist-info/LICENSE,sha256=xS4YxCplVSZiTNBwkotq9YkkHJ8nlkctJpFZvlLA9NM,568 +importlib_resources-1.0.2.dist-info/METADATA,sha256=WiWlAvBr3XA3pXUg2NJ08qHO-NM93m6v1aXlega5BMk,1881 +importlib_resources-1.0.2.dist-info/WHEEL,sha256=CihQvCnsGZQBGAHLEUMf0IdA4fRduS_NBUTMgCTtvPM,110 +importlib_resources-1.0.2.dist-info/top_level.txt,sha256=fHIjHU1GZwAjvcydpmUnUrTnbvdiWjG4OEVZK8by0TQ,20 +importlib_resources-1.0.2.dist-info/RECORD,, diff --git a/third_party/python/requests_unixsocket/requests_unixsocket-0.2.0.dist-info/WHEEL b/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources-1.0.2.dist-info/WHEEL similarity index 70% rename from third_party/python/requests_unixsocket/requests_unixsocket-0.2.0.dist-info/WHEEL rename to third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources-1.0.2.dist-info/WHEEL index c8240f03e87f..dea0e20ccdfe 100644 --- a/third_party/python/requests_unixsocket/requests_unixsocket-0.2.0.dist-info/WHEEL +++ b/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources-1.0.2.dist-info/WHEEL @@ -1,5 +1,5 @@ Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) +Generator: bdist_wheel (0.32.2) Root-Is-Purelib: true Tag: py2-none-any Tag: py3-none-any diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.1-py2.py3-none-any/importlib_resources-3.3.1.dist-info/top_level.txt b/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources-1.0.2.dist-info/top_level.txt similarity index 100% rename from third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.1-py2.py3-none-any/importlib_resources-3.3.1.dist-info/top_level.txt rename to third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources-1.0.2.dist-info/top_level.txt diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/__init__.py b/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/__init__.py new file mode 100644 index 000000000000..fab437a4ad93 --- /dev/null +++ b/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/__init__.py @@ -0,0 +1,36 @@ +"""Read resources contained within a package.""" + +import sys + + +__all__ = [ + 'contents', + 'is_resource', + 'open_binary', + 'open_text', + 'path', + 'read_binary', + 'read_text', + ] + + +# Use the Python 3.7 stdlib implementation if available. +if sys.version_info >= (3, 7): + from importlib.resources import ( + Package, Resource, contents, is_resource, open_binary, open_text, path, + read_binary, read_text) + from importlib.abc import ResourceReader + __all__.extend(['Package', 'Resource', 'ResourceReader']) +elif sys.version_info >= (3,): + from importlib_resources._py3 import ( + Package, Resource, contents, is_resource, open_binary, open_text, path, + read_binary, read_text) + from importlib_resources.abc import ResourceReader + __all__.extend(['Package', 'Resource', 'ResourceReader']) +else: + from importlib_resources._py2 import ( + contents, is_resource, open_binary, open_text, path, read_binary, + read_text) + + +__version__ = read_text('importlib_resources', 'version.txt').strip() diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/_compat.py b/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/_compat.py new file mode 100644 index 000000000000..28d61276e072 --- /dev/null +++ b/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/_compat.py @@ -0,0 +1,23 @@ +from __future__ import absolute_import + +# flake8: noqa + +try: + from pathlib import Path, PurePath +except ImportError: + from pathlib2 import Path, PurePath # type: ignore + + +try: + from abc import ABC # type: ignore +except ImportError: + from abc import ABCMeta + + class ABC(object): # type: ignore + __metaclass__ = ABCMeta + + +try: + FileNotFoundError = FileNotFoundError # type: ignore +except NameError: + FileNotFoundError = OSError diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/_py2.py b/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/_py2.py new file mode 100644 index 000000000000..376f0e3813e1 --- /dev/null +++ b/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/_py2.py @@ -0,0 +1,270 @@ +import os +import errno +import tempfile + +from ._compat import FileNotFoundError +from contextlib import contextmanager +from importlib import import_module +from io import BytesIO, TextIOWrapper, open as io_open +from pathlib2 import Path +from zipfile import ZipFile + + +def _get_package(package): + """Normalize a path by ensuring it is a string. + + If the resulting string contains path separators, an exception is raised. + """ + if isinstance(package, basestring): # noqa: F821 + module = import_module(package) + else: + module = package + if not hasattr(module, '__path__'): + raise TypeError("{!r} is not a package".format(package)) + return module + + +def _normalize_path(path): + """Normalize a path by ensuring it is a string. + + If the resulting string contains path separators, an exception is raised. + """ + str_path = str(path) + parent, file_name = os.path.split(str_path) + if parent: + raise ValueError("{!r} must be only a file name".format(path)) + else: + return file_name + + +def open_binary(package, resource): + """Return a file-like object opened for binary reading of the resource.""" + resource = _normalize_path(resource) + package = _get_package(package) + # Using pathlib doesn't work well here due to the lack of 'strict' argument + # for pathlib.Path.resolve() prior to Python 3.6. + package_path = os.path.dirname(package.__file__) + relative_path = os.path.join(package_path, resource) + full_path = os.path.abspath(relative_path) + try: + return io_open(full_path, 'rb') + except IOError: + # This might be a package in a zip file. zipimport provides a loader + # with a functioning get_data() method, however we have to strip the + # archive (i.e. the .zip file's name) off the front of the path. This + # is because the zipimport loader in Python 2 doesn't actually follow + # PEP 302. It should allow the full path, but actually requires that + # the path be relative to the zip file. + try: + loader = package.__loader__ + full_path = relative_path[len(loader.archive)+1:] + data = loader.get_data(full_path) + except (IOError, AttributeError): + package_name = package.__name__ + message = '{!r} resource not found in {!r}'.format( + resource, package_name) + raise FileNotFoundError(message) + else: + return BytesIO(data) + + +def open_text(package, resource, encoding='utf-8', errors='strict'): + """Return a file-like object opened for text reading of the resource.""" + resource = _normalize_path(resource) + package = _get_package(package) + # Using pathlib doesn't work well here due to the lack of 'strict' argument + # for pathlib.Path.resolve() prior to Python 3.6. + package_path = os.path.dirname(package.__file__) + relative_path = os.path.join(package_path, resource) + full_path = os.path.abspath(relative_path) + try: + return io_open(full_path, mode='r', encoding=encoding, errors=errors) + except IOError: + # This might be a package in a zip file. zipimport provides a loader + # with a functioning get_data() method, however we have to strip the + # archive (i.e. the .zip file's name) off the front of the path. This + # is because the zipimport loader in Python 2 doesn't actually follow + # PEP 302. It should allow the full path, but actually requires that + # the path be relative to the zip file. + try: + loader = package.__loader__ + full_path = relative_path[len(loader.archive)+1:] + data = loader.get_data(full_path) + except (IOError, AttributeError): + package_name = package.__name__ + message = '{!r} resource not found in {!r}'.format( + resource, package_name) + raise FileNotFoundError(message) + else: + return TextIOWrapper(BytesIO(data), encoding, errors) + + +def read_binary(package, resource): + """Return the binary contents of the resource.""" + resource = _normalize_path(resource) + package = _get_package(package) + with open_binary(package, resource) as fp: + return fp.read() + + +def read_text(package, resource, encoding='utf-8', errors='strict'): + """Return the decoded string of the resource. + + The decoding-related arguments have the same semantics as those of + bytes.decode(). + """ + resource = _normalize_path(resource) + package = _get_package(package) + with open_text(package, resource, encoding, errors) as fp: + return fp.read() + + +@contextmanager +def path(package, resource): + """A context manager providing a file path object to the resource. + + If the resource does not already exist on its own on the file system, + a temporary file will be created. If the file was created, the file + will be deleted upon exiting the context manager (no exception is + raised if the file was deleted prior to the context manager + exiting). + """ + resource = _normalize_path(resource) + package = _get_package(package) + package_directory = Path(package.__file__).parent + file_path = package_directory / resource + # If the file actually exists on the file system, just return it. + # Otherwise, it's probably in a zip file, so we need to create a temporary + # file and copy the contents into that file, hence the contextmanager to + # clean up the temp file resource. + if file_path.exists(): + yield file_path + else: + with open_binary(package, resource) as fp: + data = fp.read() + # Not using tempfile.NamedTemporaryFile as it leads to deeper 'try' + # blocks due to the need to close the temporary file to work on Windows + # properly. + fd, raw_path = tempfile.mkstemp() + try: + os.write(fd, data) + os.close(fd) + yield Path(raw_path) + finally: + try: + os.remove(raw_path) + except FileNotFoundError: + pass + + +def is_resource(package, name): + """True if name is a resource inside package. + + Directories are *not* resources. + """ + package = _get_package(package) + _normalize_path(name) + try: + package_contents = set(contents(package)) + except OSError as error: + if error.errno not in (errno.ENOENT, errno.ENOTDIR): + # We won't hit this in the Python 2 tests, so it'll appear + # uncovered. We could mock os.listdir() to return a non-ENOENT or + # ENOTDIR, but then we'd have to depend on another external + # library since Python 2 doesn't have unittest.mock. It's not + # worth it. + raise # pragma: nocover + return False + if name not in package_contents: + return False + # Just because the given file_name lives as an entry in the package's + # contents doesn't necessarily mean it's a resource. Directories are not + # resources, so let's try to find out if it's a directory or not. + path = Path(package.__file__).parent / name + if path.is_file(): + return True + if path.is_dir(): + return False + # If it's not a file and it's not a directory, what is it? Well, this + # means the file doesn't exist on the file system, so it probably lives + # inside a zip file. We have to crack open the zip, look at its table of + # contents, and make sure that this entry doesn't have sub-entries. + archive_path = package.__loader__.archive # type: ignore + package_directory = Path(package.__file__).parent + with ZipFile(archive_path) as zf: + toc = zf.namelist() + relpath = package_directory.relative_to(archive_path) + candidate_path = relpath / name + for entry in toc: # pragma: nobranch + try: + relative_to_candidate = Path(entry).relative_to(candidate_path) + except ValueError: + # The two paths aren't relative to each other so we can ignore it. + continue + # Since directories aren't explicitly listed in the zip file, we must + # infer their 'directory-ness' by looking at the number of path + # components in the path relative to the package resource we're + # looking up. If there are zero additional parts, it's a file, i.e. a + # resource. If there are more than zero it's a directory, i.e. not a + # resource. It has to be one of these two cases. + return len(relative_to_candidate.parts) == 0 + # I think it's impossible to get here. It would mean that we are looking + # for a resource in a zip file, there's an entry matching it in the return + # value of contents(), but we never actually found it in the zip's table of + # contents. + raise AssertionError('Impossible situation') + + +def contents(package): + """Return an iterable of entries in `package`. + + Note that not all entries are resources. Specifically, directories are + not considered resources. Use `is_resource()` on each entry returned here + to check if it is a resource or not. + """ + package = _get_package(package) + package_directory = Path(package.__file__).parent + try: + return os.listdir(str(package_directory)) + except OSError as error: + if error.errno not in (errno.ENOENT, errno.ENOTDIR): + # We won't hit this in the Python 2 tests, so it'll appear + # uncovered. We could mock os.listdir() to return a non-ENOENT or + # ENOTDIR, but then we'd have to depend on another external + # library since Python 2 doesn't have unittest.mock. It's not + # worth it. + raise # pragma: nocover + # The package is probably in a zip file. + archive_path = getattr(package.__loader__, 'archive', None) + if archive_path is None: + raise + relpath = package_directory.relative_to(archive_path) + with ZipFile(archive_path) as zf: + toc = zf.namelist() + subdirs_seen = set() # type: Set + subdirs_returned = [] + for filename in toc: + path = Path(filename) + # Strip off any path component parts that are in common with the + # package directory, relative to the zip archive's file system + # path. This gives us all the parts that live under the named + # package inside the zip file. If the length of these subparts is + # exactly 1, then it is situated inside the package. The resulting + # length will be 0 if it's above the package, and it will be + # greater than 1 if it lives in a subdirectory of the package + # directory. + # + # However, since directories themselves don't appear in the zip + # archive as a separate entry, we need to return the first path + # component for any case that has > 1 subparts -- but only once! + if path.parts[:len(relpath.parts)] != relpath.parts: + continue + subparts = path.parts[len(relpath.parts):] + if len(subparts) == 1: + subdirs_returned.append(subparts[0]) + elif len(subparts) > 1: # pragma: nobranch + subdir = subparts[0] + if subdir not in subdirs_seen: + subdirs_seen.add(subdir) + subdirs_returned.append(subdir) + return subdirs_returned diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/_py3.py b/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/_py3.py new file mode 100644 index 000000000000..00781bd918ed --- /dev/null +++ b/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/_py3.py @@ -0,0 +1,312 @@ +import os +import sys +import tempfile + +from . import abc as resources_abc +from contextlib import contextmanager, suppress +from importlib import import_module +from importlib.abc import ResourceLoader +from io import BytesIO, TextIOWrapper +from pathlib import Path +from types import ModuleType +from typing import Iterable, Iterator, Optional, Set, Union # noqa: F401 +from typing import cast +from typing.io import BinaryIO, TextIO +from zipfile import ZipFile + + +Package = Union[ModuleType, str] +if sys.version_info >= (3, 6): + Resource = Union[str, os.PathLike] # pragma: <=35 +else: + Resource = str # pragma: >=36 + + +def _get_package(package) -> ModuleType: + """Take a package name or module object and return the module. + + If a name, the module is imported. If the passed or imported module + object is not a package, raise an exception. + """ + if hasattr(package, '__spec__'): + if package.__spec__.submodule_search_locations is None: + raise TypeError('{!r} is not a package'.format( + package.__spec__.name)) + else: + return package + else: + module = import_module(package) + if module.__spec__.submodule_search_locations is None: + raise TypeError('{!r} is not a package'.format(package)) + else: + return module + + +def _normalize_path(path) -> str: + """Normalize a path by ensuring it is a string. + + If the resulting string contains path separators, an exception is raised. + """ + str_path = str(path) + parent, file_name = os.path.split(str_path) + if parent: + raise ValueError('{!r} must be only a file name'.format(path)) + else: + return file_name + + +def _get_resource_reader( + package: ModuleType) -> Optional[resources_abc.ResourceReader]: + # Return the package's loader if it's a ResourceReader. We can't use + # a issubclass() check here because apparently abc.'s __subclasscheck__() + # hook wants to create a weak reference to the object, but + # zipimport.zipimporter does not support weak references, resulting in a + # TypeError. That seems terrible. + spec = package.__spec__ + reader = getattr(spec.loader, 'get_resource_reader', None) + if reader is None: + return None + return cast(resources_abc.ResourceReader, reader(spec.name)) + + +def open_binary(package: Package, resource: Resource) -> BinaryIO: + """Return a file-like object opened for binary reading of the resource.""" + resource = _normalize_path(resource) + package = _get_package(package) + reader = _get_resource_reader(package) + if reader is not None: + return reader.open_resource(resource) + # Using pathlib doesn't work well here due to the lack of 'strict' + # argument for pathlib.Path.resolve() prior to Python 3.6. + absolute_package_path = os.path.abspath(package.__spec__.origin) + package_path = os.path.dirname(absolute_package_path) + full_path = os.path.join(package_path, resource) + try: + return open(full_path, mode='rb') + except OSError: + # Just assume the loader is a resource loader; all the relevant + # importlib.machinery loaders are and an AttributeError for + # get_data() will make it clear what is needed from the loader. + loader = cast(ResourceLoader, package.__spec__.loader) + data = None + if hasattr(package.__spec__.loader, 'get_data'): + with suppress(OSError): + data = loader.get_data(full_path) + if data is None: + package_name = package.__spec__.name + message = '{!r} resource not found in {!r}'.format( + resource, package_name) + raise FileNotFoundError(message) + else: + return BytesIO(data) + + +def open_text(package: Package, + resource: Resource, + encoding: str = 'utf-8', + errors: str = 'strict') -> TextIO: + """Return a file-like object opened for text reading of the resource.""" + resource = _normalize_path(resource) + package = _get_package(package) + reader = _get_resource_reader(package) + if reader is not None: + return TextIOWrapper(reader.open_resource(resource), encoding, errors) + # Using pathlib doesn't work well here due to the lack of 'strict' + # argument for pathlib.Path.resolve() prior to Python 3.6. + absolute_package_path = os.path.abspath(package.__spec__.origin) + package_path = os.path.dirname(absolute_package_path) + full_path = os.path.join(package_path, resource) + try: + return open(full_path, mode='r', encoding=encoding, errors=errors) + except OSError: + # Just assume the loader is a resource loader; all the relevant + # importlib.machinery loaders are and an AttributeError for + # get_data() will make it clear what is needed from the loader. + loader = cast(ResourceLoader, package.__spec__.loader) + data = None + if hasattr(package.__spec__.loader, 'get_data'): + with suppress(OSError): + data = loader.get_data(full_path) + if data is None: + package_name = package.__spec__.name + message = '{!r} resource not found in {!r}'.format( + resource, package_name) + raise FileNotFoundError(message) + else: + return TextIOWrapper(BytesIO(data), encoding, errors) + + +def read_binary(package: Package, resource: Resource) -> bytes: + """Return the binary contents of the resource.""" + resource = _normalize_path(resource) + package = _get_package(package) + with open_binary(package, resource) as fp: + return fp.read() + + +def read_text(package: Package, + resource: Resource, + encoding: str = 'utf-8', + errors: str = 'strict') -> str: + """Return the decoded string of the resource. + + The decoding-related arguments have the same semantics as those of + bytes.decode(). + """ + resource = _normalize_path(resource) + package = _get_package(package) + with open_text(package, resource, encoding, errors) as fp: + return fp.read() + + +@contextmanager +def path(package: Package, resource: Resource) -> Iterator[Path]: + """A context manager providing a file path object to the resource. + + If the resource does not already exist on its own on the file system, + a temporary file will be created. If the file was created, the file + will be deleted upon exiting the context manager (no exception is + raised if the file was deleted prior to the context manager + exiting). + """ + resource = _normalize_path(resource) + package = _get_package(package) + reader = _get_resource_reader(package) + if reader is not None: + try: + yield Path(reader.resource_path(resource)) + return + except FileNotFoundError: + pass + # Fall-through for both the lack of resource_path() *and* if + # resource_path() raises FileNotFoundError. + package_directory = Path(package.__spec__.origin).parent + file_path = package_directory / resource + if file_path.exists(): + yield file_path + else: + with open_binary(package, resource) as fp: + data = fp.read() + # Not using tempfile.NamedTemporaryFile as it leads to deeper 'try' + # blocks due to the need to close the temporary file to work on + # Windows properly. + fd, raw_path = tempfile.mkstemp() + try: + os.write(fd, data) + os.close(fd) + yield Path(raw_path) + finally: + try: + os.remove(raw_path) + except FileNotFoundError: + pass + + +def is_resource(package: Package, name: str) -> bool: + """True if `name` is a resource inside `package`. + + Directories are *not* resources. + """ + package = _get_package(package) + _normalize_path(name) + reader = _get_resource_reader(package) + if reader is not None: + return reader.is_resource(name) + try: + package_contents = set(contents(package)) + except (NotADirectoryError, FileNotFoundError): + return False + if name not in package_contents: + return False + # Just because the given file_name lives as an entry in the package's + # contents doesn't necessarily mean it's a resource. Directories are not + # resources, so let's try to find out if it's a directory or not. + path = Path(package.__spec__.origin).parent / name + if path.is_file(): + return True + if path.is_dir(): + return False + # If it's not a file and it's not a directory, what is it? Well, this + # means the file doesn't exist on the file system, so it probably lives + # inside a zip file. We have to crack open the zip, look at its table of + # contents, and make sure that this entry doesn't have sub-entries. + archive_path = package.__spec__.loader.archive # type: ignore + package_directory = Path(package.__spec__.origin).parent + with ZipFile(archive_path) as zf: + toc = zf.namelist() + relpath = package_directory.relative_to(archive_path) + candidate_path = relpath / name + for entry in toc: # pragma: nobranch + try: + relative_to_candidate = Path(entry).relative_to(candidate_path) + except ValueError: + # The two paths aren't relative to each other so we can ignore it. + continue + # Since directories aren't explicitly listed in the zip file, we must + # infer their 'directory-ness' by looking at the number of path + # components in the path relative to the package resource we're + # looking up. If there are zero additional parts, it's a file, i.e. a + # resource. If there are more than zero it's a directory, i.e. not a + # resource. It has to be one of these two cases. + return len(relative_to_candidate.parts) == 0 + # I think it's impossible to get here. It would mean that we are looking + # for a resource in a zip file, there's an entry matching it in the return + # value of contents(), but we never actually found it in the zip's table of + # contents. + raise AssertionError('Impossible situation') + + +def contents(package: Package) -> Iterable[str]: + """Return an iterable of entries in `package`. + + Note that not all entries are resources. Specifically, directories are + not considered resources. Use `is_resource()` on each entry returned here + to check if it is a resource or not. + """ + package = _get_package(package) + reader = _get_resource_reader(package) + if reader is not None: + return reader.contents() + # Is the package a namespace package? By definition, namespace packages + # cannot have resources. + if (package.__spec__.origin == 'namespace' and + not package.__spec__.has_location): + return () + package_directory = Path(package.__spec__.origin).parent + try: + return os.listdir(str(package_directory)) + except (NotADirectoryError, FileNotFoundError): + # The package is probably in a zip file. + archive_path = getattr(package.__spec__.loader, 'archive', None) + if archive_path is None: + raise + relpath = package_directory.relative_to(archive_path) + with ZipFile(archive_path) as zf: + toc = zf.namelist() + subdirs_seen = set() # type: Set + subdirs_returned = [] + for filename in toc: + path = Path(filename) + # Strip off any path component parts that are in common with the + # package directory, relative to the zip archive's file system + # path. This gives us all the parts that live under the named + # package inside the zip file. If the length of these subparts is + # exactly 1, then it is situated inside the package. The resulting + # length will be 0 if it's above the package, and it will be + # greater than 1 if it lives in a subdirectory of the package + # directory. + # + # However, since directories themselves don't appear in the zip + # archive as a separate entry, we need to return the first path + # component for any case that has > 1 subparts -- but only once! + if path.parts[:len(relpath.parts)] != relpath.parts: + continue + subparts = path.parts[len(relpath.parts):] + if len(subparts) == 1: + subdirs_returned.append(subparts[0]) + elif len(subparts) > 1: # pragma: nobranch + subdir = subparts[0] + if subdir not in subdirs_seen: + subdirs_seen.add(subdir) + subdirs_returned.append(subdir) + return subdirs_returned diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/abc.py b/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/abc.py new file mode 100644 index 000000000000..f49e8c700818 --- /dev/null +++ b/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/abc.py @@ -0,0 +1,58 @@ +from __future__ import absolute_import + +from ._compat import ABC, FileNotFoundError +from abc import abstractmethod + +# We use mypy's comment syntax here since this file must be compatible with +# both Python 2 and 3. +try: + from typing import BinaryIO, Iterable, Text # noqa: F401 +except ImportError: + # Python 2 + pass + + +class ResourceReader(ABC): + """Abstract base class for loaders to provide resource reading support.""" + + @abstractmethod + def open_resource(self, resource): + # type: (Text) -> BinaryIO + """Return an opened, file-like object for binary reading. + + The 'resource' argument is expected to represent only a file name. + If the resource cannot be found, FileNotFoundError is raised. + """ + # This deliberately raises FileNotFoundError instead of + # NotImplementedError so that if this method is accidentally called, + # it'll still do the right thing. + raise FileNotFoundError + + @abstractmethod + def resource_path(self, resource): + # type: (Text) -> Text + """Return the file system path to the specified resource. + + The 'resource' argument is expected to represent only a file name. + If the resource does not exist on the file system, raise + FileNotFoundError. + """ + # This deliberately raises FileNotFoundError instead of + # NotImplementedError so that if this method is accidentally called, + # it'll still do the right thing. + raise FileNotFoundError + + @abstractmethod + def is_resource(self, path): + # type: (Text) -> bool + """Return True if the named 'path' is a resource. + + Files are resources, directories are not. + """ + raise FileNotFoundError + + @abstractmethod + def contents(self): + # type: () -> Iterable[str] + """Return an iterable of entries in `package`.""" + raise FileNotFoundError diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/version.txt b/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/version.txt new file mode 100644 index 000000000000..6d7de6e6abef --- /dev/null +++ b/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/version.txt @@ -0,0 +1 @@ +1.0.2 diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.1-py2.py3-none-any/importlib_resources-3.3.1.dist-info/LICENSE b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources-3.3.0.dist-info/LICENSE similarity index 100% rename from third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.1-py2.py3-none-any/importlib_resources-3.3.1.dist-info/LICENSE rename to third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources-3.3.0.dist-info/LICENSE diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.1-py2.py3-none-any/importlib_resources-3.3.1.dist-info/METADATA b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources-3.3.0.dist-info/METADATA similarity index 95% rename from third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.1-py2.py3-none-any/importlib_resources-3.3.1.dist-info/METADATA rename to third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources-3.3.0.dist-info/METADATA index 9ae0922c7a5d..66db8b78aac8 100644 --- a/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.1-py2.py3-none-any/importlib_resources-3.3.1.dist-info/METADATA +++ b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources-3.3.0.dist-info/METADATA @@ -1,6 +1,6 @@ Metadata-Version: 2.1 Name: importlib-resources -Version: 3.3.1 +Version: 3.3.0 Summary: Read resources from Python packages Home-page: https://github.com/python/importlib_resources Author: Barry Warsaw @@ -28,7 +28,7 @@ Requires-Dist: jaraco.packaging ; extra == 'docs' ``importlib_resources`` is a backport of Python standard library `importlib.resources `_ -module for Python 2.7, and 3.6 through 3.8. Users of Python 3.9 and beyond +module for Python 2.7, and 3.4 through 3.8. Users of Python 3.9 and beyond should use the standard library module, since for these versions, ``importlib_resources`` just delegates to that module. diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.1-py2.py3-none-any/importlib_resources-3.3.1.dist-info/RECORD b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources-3.3.0.dist-info/RECORD similarity index 90% rename from third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.1-py2.py3-none-any/importlib_resources-3.3.1.dist-info/RECORD rename to third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources-3.3.0.dist-info/RECORD index c457b349da36..20e1b9b44a2d 100644 --- a/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.1-py2.py3-none-any/importlib_resources-3.3.1.dist-info/RECORD +++ b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources-3.3.0.dist-info/RECORD @@ -35,8 +35,8 @@ importlib_resources/tests/zipdata01/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQ importlib_resources/tests/zipdata01/ziptestdata.zip,sha256=AYf51fj80OKCRis93v2DlZjt5rM-VQOPptSHJbFtkXw,1131 importlib_resources/tests/zipdata02/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 importlib_resources/tests/zipdata02/ziptestdata.zip,sha256=e6HXvTEObXvJcNxyX5I8tu5M8_6mSN8ALahHfqE7ADA,698 -importlib_resources-3.3.1.dist-info/LICENSE,sha256=uWRjFdYGataJX2ziXk048ItUglQmjng3GWBALaWA36U,568 -importlib_resources-3.3.1.dist-info/METADATA,sha256=ulI0eHuldtC-h2_WiQal2AE2eoE91x_xKzbz9kpWvlk,1791 -importlib_resources-3.3.1.dist-info/WHEEL,sha256=oh0NKYrTcu1i1-wgrI1cnhkjYIi8WJ-8qd9Jrr5_y4E,110 -importlib_resources-3.3.1.dist-info/top_level.txt,sha256=fHIjHU1GZwAjvcydpmUnUrTnbvdiWjG4OEVZK8by0TQ,20 -importlib_resources-3.3.1.dist-info/RECORD,, +importlib_resources-3.3.0.dist-info/LICENSE,sha256=uWRjFdYGataJX2ziXk048ItUglQmjng3GWBALaWA36U,568 +importlib_resources-3.3.0.dist-info/METADATA,sha256=GxPMbCwUwlCuHNCiPJvP4IC_mTKqP4b_W7UqqNidcF4,1791 +importlib_resources-3.3.0.dist-info/WHEEL,sha256=ADKeyaGyKF5DwBNE0sRE5pvW-bSkFMJfBuhzZ3rceP4,110 +importlib_resources-3.3.0.dist-info/top_level.txt,sha256=fHIjHU1GZwAjvcydpmUnUrTnbvdiWjG4OEVZK8by0TQ,20 +importlib_resources-3.3.0.dist-info/RECORD,, diff --git a/third_party/python/cbor2/cbor2-4.0.1.dist-info/WHEEL b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources-3.3.0.dist-info/WHEEL similarity index 70% rename from third_party/python/cbor2/cbor2-4.0.1.dist-info/WHEEL rename to third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources-3.3.0.dist-info/WHEEL index 8b6dd1b5a884..6d38aa0601b3 100644 --- a/third_party/python/cbor2/cbor2-4.0.1.dist-info/WHEEL +++ b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources-3.3.0.dist-info/WHEEL @@ -1,5 +1,5 @@ Wheel-Version: 1.0 -Generator: bdist_wheel (0.29.0) +Generator: bdist_wheel (0.35.1) Root-Is-Purelib: true Tag: py2-none-any Tag: py3-none-any diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources-5.2.2.dist-info/top_level.txt b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources-3.3.0.dist-info/top_level.txt similarity index 100% rename from third_party/python/virtualenv/__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources-5.2.2.dist-info/top_level.txt rename to third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources-3.3.0.dist-info/top_level.txt diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.1-py2.py3-none-any/importlib_resources/__init__.py b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/__init__.py similarity index 100% rename from third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.1-py2.py3-none-any/importlib_resources/__init__.py rename to third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/__init__.py diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.1-py2.py3-none-any/importlib_resources/_common.py b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/_common.py similarity index 100% rename from third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.1-py2.py3-none-any/importlib_resources/_common.py rename to third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/_common.py diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.1-py2.py3-none-any/importlib_resources/_compat.py b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/_compat.py similarity index 100% rename from third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.1-py2.py3-none-any/importlib_resources/_compat.py rename to third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/_compat.py diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.1-py2.py3-none-any/importlib_resources/_py2.py b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/_py2.py similarity index 100% rename from third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.1-py2.py3-none-any/importlib_resources/_py2.py rename to third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/_py2.py diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.1-py2.py3-none-any/importlib_resources/_py3.py b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/_py3.py similarity index 100% rename from third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.1-py2.py3-none-any/importlib_resources/_py3.py rename to third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/_py3.py diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.1-py2.py3-none-any/importlib_resources/abc.py b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/abc.py similarity index 100% rename from third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.1-py2.py3-none-any/importlib_resources/abc.py rename to third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/abc.py diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources/py.typed b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/py.typed similarity index 100% rename from third_party/python/virtualenv/__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources/py.typed rename to third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/py.typed diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.1-py2.py3-none-any/importlib_resources/readers.py b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/readers.py similarity index 100% rename from third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.1-py2.py3-none-any/importlib_resources/readers.py rename to third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/readers.py diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.1-py2.py3-none-any/importlib_resources/trees.py b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/trees.py similarity index 100% rename from third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.1-py2.py3-none-any/importlib_resources/trees.py rename to third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/trees.py diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources-5.2.2.dist-info/METADATA b/third_party/python/virtualenv/__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources-5.2.2.dist-info/METADATA deleted file mode 100644 index 165eef1f8160..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources-5.2.2.dist-info/METADATA +++ /dev/null @@ -1,67 +0,0 @@ -Metadata-Version: 2.1 -Name: importlib-resources -Version: 5.2.2 -Summary: Read resources from Python packages -Home-page: https://github.com/python/importlib_resources -Author: Barry Warsaw -Author-email: barry@python.org -License: UNKNOWN -Project-URL: Documentation, https://importlib-resources.readthedocs.io/ -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: Apache Software License -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3 :: Only -Requires-Python: >=3.6 -License-File: LICENSE -Requires-Dist: zipp (>=3.1.0) ; python_version < "3.10" -Provides-Extra: docs -Requires-Dist: sphinx ; extra == 'docs' -Requires-Dist: jaraco.packaging (>=8.2) ; extra == 'docs' -Requires-Dist: rst.linker (>=1.9) ; extra == 'docs' -Provides-Extra: testing -Requires-Dist: pytest (>=4.6) ; extra == 'testing' -Requires-Dist: pytest-checkdocs (>=2.4) ; extra == 'testing' -Requires-Dist: pytest-flake8 ; extra == 'testing' -Requires-Dist: pytest-cov ; extra == 'testing' -Requires-Dist: pytest-enabler (>=1.0.1) ; extra == 'testing' -Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing' -Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy") and extra == 'testing' - -.. image:: https://img.shields.io/pypi/v/importlib_resources.svg - :target: `PyPI link`_ - -.. image:: https://img.shields.io/pypi/pyversions/importlib_resources.svg - :target: `PyPI link`_ - -.. _PyPI link: https://pypi.org/project/importlib_resources - -.. image:: https://github.com/python/importlib_resources/workflows/tests/badge.svg - :target: https://github.com/python/importlib_resources/actions?query=workflow%3A%22tests%22 - :alt: tests - -.. image:: https://img.shields.io/badge/code%20style-black-000000.svg - :target: https://github.com/psf/black - :alt: Code style: Black - -.. image:: https://readthedocs.org/projects/importlib-resources/badge/?version=latest - :target: https://importlib-resources.readthedocs.io/en/latest/?badge=latest - -.. image:: https://img.shields.io/badge/skeleton-2021-informational - :target: https://blog.jaraco.com/skeleton - -``importlib_resources`` is a backport of Python standard library -`importlib.resources -`_ -module for older Pythons. Users of Python 3.9 and beyond -should use the standard library module, since for these versions, -``importlib_resources`` just delegates to that module. - -The key goal of this module is to replace parts of `pkg_resources -`_ with a -solution in Python's stdlib that relies on well-defined APIs. This makes -reading resources included in packages easier, with more stable and consistent -semantics. - - diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources-5.2.2.dist-info/RECORD b/third_party/python/virtualenv/__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources-5.2.2.dist-info/RECORD deleted file mode 100644 index b527f31d004a..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources-5.2.2.dist-info/RECORD +++ /dev/null @@ -1,45 +0,0 @@ -importlib_resources/__init__.py,sha256=zxWIX2A3cIl4sjgN7oBtODZzVDR6P_-iBJ3YGssI5Fg,525 -importlib_resources/_adapters.py,sha256=UC7gvIzeqV7BBqsbOURHLuLhayJEraLHaW7fS33ZEZc,4503 -importlib_resources/_common.py,sha256=AFddi9ERQuHS9XB1W7ZdvIT3y4Ah3AeGDWUglVjK2gc,3145 -importlib_resources/_compat.py,sha256=3LpkIfeN9x4oXjRea5TxZP5VYhPlzuVRhGe-hEv-S0s,2704 -importlib_resources/_itertools.py,sha256=5TUj_APJHq3pvjn04hnP2oYBebP2No7HmNH_hkOGwLQ,607 -importlib_resources/_legacy.py,sha256=Dyfk-FFvF0XUcO1A6fbu3kPxxc6lM6vW2d9syQcaMb8,2620 -importlib_resources/abc.py,sha256=A6PCumk7_pNepblhft1XkCAOAe7fK59UVED55k-UCC4,3880 -importlib_resources/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -importlib_resources/readers.py,sha256=_9QLGQ5AzrED3PY8S2Zf8V6yLR0-nqqYqtQmgleDJzY,3566 -importlib_resources/simple.py,sha256=xt0qhXbwt3bZ86zuaaKbTiE9A0mDbwu0saRjUq_pcY0,2836 -importlib_resources/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -importlib_resources/tests/_compat.py,sha256=QGI_4p0DXybypoYvw0kr3jfQqvls3p8u4wy4Wvf0Z_o,435 -importlib_resources/tests/test_compatibilty_files.py,sha256=NWkbIsylI8Wz3Dwsxo1quT4ZI6ToXFA2mojCG6Dzuxw,3260 -importlib_resources/tests/test_contents.py,sha256=lAqZsdIl7h1zjGggp0o5Cw0E0SF_4Gys9MryqqDIuGo,914 -importlib_resources/tests/test_files.py,sha256=1Nqv6VM_MjfwrmtXYL1a1CMT0QhCxi3hNMqwXlfMQTg,1184 -importlib_resources/tests/test_open.py,sha256=qi9trwyzkRsvRJ2B53Gh-357OFKYv0aR6_p-71Fyegk,2359 -importlib_resources/tests/test_path.py,sha256=KGXgFjjrqZV8Gu3blc9fHf8AM3mHfWxeIJv9WTJxZL4,1939 -importlib_resources/tests/test_read.py,sha256=HyCg_nB99Pyk2KzSAdMht4XCunxha4FXJ4MX8nlngn8,2158 -importlib_resources/tests/test_reader.py,sha256=hgXHquqAEnioemv20ZZcDlVaiOrcZKADO37_FkiQ00Y,4286 -importlib_resources/tests/test_resource.py,sha256=4ytERMTbzKEwbLcrgSKIdMk0v39pVFY5cDUcVNNW_So,8371 -importlib_resources/tests/update-zips.py,sha256=x3iJVqWnMM5qp4Oob2Pl3o6Yi03sUjEv_5Wf-UCg3ps,1415 -importlib_resources/tests/util.py,sha256=iswefvDyo_qMpVP-ofeKO8kPKJNhuVxnvLCRETDm1jc,5731 -importlib_resources/tests/data01/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -importlib_resources/tests/data01/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4 -importlib_resources/tests/data01/utf-16.file,sha256=t5q9qhxX0rYqItBOM8D3ylwG-RHrnOYteTLtQr6sF7g,44 -importlib_resources/tests/data01/utf-8.file,sha256=kwWgYG4yQ-ZF2X_WA66EjYPmxJRn-w8aSOiS9e8tKYY,20 -importlib_resources/tests/data01/subdirectory/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -importlib_resources/tests/data01/subdirectory/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4 -importlib_resources/tests/data02/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -importlib_resources/tests/data02/one/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -importlib_resources/tests/data02/one/resource1.txt,sha256=10flKac7c-XXFzJ3t-AB5MJjlBy__dSZvPE_dOm2q6U,13 -importlib_resources/tests/data02/two/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -importlib_resources/tests/data02/two/resource2.txt,sha256=lt2jbN3TMn9QiFKM832X39bU_62UptDdUkoYzkvEbl0,13 -importlib_resources/tests/namespacedata01/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4 -importlib_resources/tests/namespacedata01/utf-16.file,sha256=t5q9qhxX0rYqItBOM8D3ylwG-RHrnOYteTLtQr6sF7g,44 -importlib_resources/tests/namespacedata01/utf-8.file,sha256=kwWgYG4yQ-ZF2X_WA66EjYPmxJRn-w8aSOiS9e8tKYY,20 -importlib_resources/tests/zipdata01/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -importlib_resources/tests/zipdata01/ziptestdata.zip,sha256=z5Of4dsv3T0t-46B0MsVhxlhsPGMz28aUhJDWpj3_oY,876 -importlib_resources/tests/zipdata02/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -importlib_resources/tests/zipdata02/ziptestdata.zip,sha256=ydI-_j-xgQ7tDxqBp9cjOqXBGxUp6ZBbwVJu6Xj-nrY,698 -importlib_resources-5.2.2.dist-info/LICENSE,sha256=uWRjFdYGataJX2ziXk048ItUglQmjng3GWBALaWA36U,568 -importlib_resources-5.2.2.dist-info/METADATA,sha256=fY-_tw95Jut84LNhqbkOl1dNhmGHFNo9VVfS_QZTZjQ,2876 -importlib_resources-5.2.2.dist-info/WHEEL,sha256=OqRkF0eY5GHssMorFjlbTIq072vpHpF60fIQA6lS9xA,92 -importlib_resources-5.2.2.dist-info/top_level.txt,sha256=fHIjHU1GZwAjvcydpmUnUrTnbvdiWjG4OEVZK8by0TQ,20 -importlib_resources-5.2.2.dist-info/RECORD,, diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources-5.2.2.dist-info/WHEEL b/third_party/python/virtualenv/__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources-5.2.2.dist-info/WHEEL deleted file mode 100644 index 385faab0525c..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources-5.2.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.36.2) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources/__init__.py b/third_party/python/virtualenv/__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources/__init__.py deleted file mode 100644 index 2468f5754d15..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources/__init__.py +++ /dev/null @@ -1,36 +0,0 @@ -"""Read resources contained within a package.""" - -from ._common import ( - as_file, - files, - Package, - Resource, -) - -from ._legacy import ( - contents, - open_binary, - read_binary, - open_text, - read_text, - is_resource, - path, -) - -from importlib_resources.abc import ResourceReader - - -__all__ = [ - 'Package', - 'Resource', - 'ResourceReader', - 'as_file', - 'contents', - 'files', - 'is_resource', - 'open_binary', - 'open_text', - 'path', - 'read_binary', - 'read_text', -] diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources/_adapters.py b/third_party/python/virtualenv/__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources/_adapters.py deleted file mode 100644 index 9907b148b396..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources/_adapters.py +++ /dev/null @@ -1,170 +0,0 @@ -from contextlib import suppress -from io import TextIOWrapper - -from . import abc - - -class SpecLoaderAdapter: - """ - Adapt a package spec to adapt the underlying loader. - """ - - def __init__(self, spec, adapter=lambda spec: spec.loader): - self.spec = spec - self.loader = adapter(spec) - - def __getattr__(self, name): - return getattr(self.spec, name) - - -class TraversableResourcesLoader: - """ - Adapt a loader to provide TraversableResources. - """ - - def __init__(self, spec): - self.spec = spec - - def get_resource_reader(self, name): - return CompatibilityFiles(self.spec)._native() - - -def _io_wrapper(file, mode='r', *args, **kwargs): - if mode == 'r': - return TextIOWrapper(file, *args, **kwargs) - elif mode == 'rb': - return file - raise ValueError( - "Invalid mode value '{}', only 'r' and 'rb' are supported".format(mode) - ) - - -class CompatibilityFiles: - """ - Adapter for an existing or non-existant resource reader - to provide a compability .files(). - """ - - class SpecPath(abc.Traversable): - """ - Path tied to a module spec. - Can be read and exposes the resource reader children. - """ - - def __init__(self, spec, reader): - self._spec = spec - self._reader = reader - - def iterdir(self): - if not self._reader: - return iter(()) - return iter( - CompatibilityFiles.ChildPath(self._reader, path) - for path in self._reader.contents() - ) - - def is_file(self): - return False - - is_dir = is_file - - def joinpath(self, other): - if not self._reader: - return CompatibilityFiles.OrphanPath(other) - return CompatibilityFiles.ChildPath(self._reader, other) - - @property - def name(self): - return self._spec.name - - def open(self, mode='r', *args, **kwargs): - return _io_wrapper(self._reader.open_resource(None), mode, *args, **kwargs) - - class ChildPath(abc.Traversable): - """ - Path tied to a resource reader child. - Can be read but doesn't expose any meaningfull children. - """ - - def __init__(self, reader, name): - self._reader = reader - self._name = name - - def iterdir(self): - return iter(()) - - def is_file(self): - return self._reader.is_resource(self.name) - - def is_dir(self): - return not self.is_file() - - def joinpath(self, other): - return CompatibilityFiles.OrphanPath(self.name, other) - - @property - def name(self): - return self._name - - def open(self, mode='r', *args, **kwargs): - return _io_wrapper( - self._reader.open_resource(self.name), mode, *args, **kwargs - ) - - class OrphanPath(abc.Traversable): - """ - Orphan path, not tied to a module spec or resource reader. - Can't be read and doesn't expose any meaningful children. - """ - - def __init__(self, *path_parts): - if len(path_parts) < 1: - raise ValueError('Need at least one path part to construct a path') - self._path = path_parts - - def iterdir(self): - return iter(()) - - def is_file(self): - return False - - is_dir = is_file - - def joinpath(self, other): - return CompatibilityFiles.OrphanPath(*self._path, other) - - @property - def name(self): - return self._path[-1] - - def open(self, mode='r', *args, **kwargs): - raise FileNotFoundError("Can't open orphan path") - - def __init__(self, spec): - self.spec = spec - - @property - def _reader(self): - with suppress(AttributeError): - return self.spec.loader.get_resource_reader(self.spec.name) - - def _native(self): - """ - Return the native reader if it supports files(). - """ - reader = self._reader - return reader if hasattr(reader, 'files') else self - - def __getattr__(self, attr): - return getattr(self._reader, attr) - - def files(self): - return CompatibilityFiles.SpecPath(self.spec, self._reader) - - -def wrap_spec(package): - """ - Construct a package spec with traversable compatibility - on the spec/loader/reader. - """ - return SpecLoaderAdapter(package.__spec__, TraversableResourcesLoader) diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources/_common.py b/third_party/python/virtualenv/__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources/_common.py deleted file mode 100644 index 25511672ebb1..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources/_common.py +++ /dev/null @@ -1,118 +0,0 @@ -import os -import pathlib -import tempfile -import functools -import contextlib -import types -import importlib - -from typing import Union, Any, Optional -from .abc import ResourceReader, Traversable - -from ._compat import wrap_spec - -Package = Union[types.ModuleType, str] -Resource = Union[str, os.PathLike] - - -def files(package): - # type: (Package) -> Traversable - """ - Get a Traversable resource from a package - """ - return from_package(get_package(package)) - - -def normalize_path(path): - # type: (Any) -> str - """Normalize a path by ensuring it is a string. - - If the resulting string contains path separators, an exception is raised. - """ - str_path = str(path) - parent, file_name = os.path.split(str_path) - if parent: - raise ValueError(f'{path!r} must be only a file name') - return file_name - - -def get_resource_reader(package): - # type: (types.ModuleType) -> Optional[ResourceReader] - """ - Return the package's loader if it's a ResourceReader. - """ - # We can't use - # a issubclass() check here because apparently abc.'s __subclasscheck__() - # hook wants to create a weak reference to the object, but - # zipimport.zipimporter does not support weak references, resulting in a - # TypeError. That seems terrible. - spec = package.__spec__ - reader = getattr(spec.loader, 'get_resource_reader', None) # type: ignore - if reader is None: - return None - return reader(spec.name) # type: ignore - - -def resolve(cand): - # type: (Package) -> types.ModuleType - return cand if isinstance(cand, types.ModuleType) else importlib.import_module(cand) - - -def get_package(package): - # type: (Package) -> types.ModuleType - """Take a package name or module object and return the module. - - Raise an exception if the resolved module is not a package. - """ - resolved = resolve(package) - if wrap_spec(resolved).submodule_search_locations is None: - raise TypeError(f'{package!r} is not a package') - return resolved - - -def from_package(package): - """ - Return a Traversable object for the given package. - - """ - spec = wrap_spec(package) - reader = spec.loader.get_resource_reader(spec.name) - return reader.files() - - -@contextlib.contextmanager -def _tempfile(reader, suffix=''): - # Not using tempfile.NamedTemporaryFile as it leads to deeper 'try' - # blocks due to the need to close the temporary file to work on Windows - # properly. - fd, raw_path = tempfile.mkstemp(suffix=suffix) - try: - try: - os.write(fd, reader()) - finally: - os.close(fd) - del reader - yield pathlib.Path(raw_path) - finally: - try: - os.remove(raw_path) - except FileNotFoundError: - pass - - -@functools.singledispatch -def as_file(path): - """ - Given a Traversable object, return that object as a - path on the local file system in a context manager. - """ - return _tempfile(path.read_bytes, suffix=path.name) - - -@as_file.register(pathlib.Path) -@contextlib.contextmanager -def _(path): - """ - Degenerate behavior for pathlib.Path objects. - """ - yield path diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources/_compat.py b/third_party/python/virtualenv/__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources/_compat.py deleted file mode 100644 index 61e48d47d3a4..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources/_compat.py +++ /dev/null @@ -1,98 +0,0 @@ -# flake8: noqa - -import abc -import sys -import pathlib -from contextlib import suppress - -if sys.version_info >= (3, 10): - from zipfile import Path as ZipPath # type: ignore -else: - from zipp import Path as ZipPath # type: ignore - - -try: - from typing import runtime_checkable # type: ignore -except ImportError: - - def runtime_checkable(cls): # type: ignore - return cls - - -try: - from typing import Protocol # type: ignore -except ImportError: - Protocol = abc.ABC # type: ignore - - -class TraversableResourcesLoader: - """ - Adapt loaders to provide TraversableResources and other - compatibility. - - Used primarily for Python 3.9 and earlier where the native - loaders do not yet implement TraversableResources. - """ - - def __init__(self, spec): - self.spec = spec - - @property - def path(self): - return self.spec.origin - - def get_resource_reader(self, name): - from . import readers, _adapters - - def _zip_reader(spec): - with suppress(AttributeError): - return readers.ZipReader(spec.loader, spec.name) - - def _namespace_reader(spec): - with suppress(AttributeError, ValueError): - return readers.NamespaceReader(spec.submodule_search_locations) - - def _available_reader(spec): - with suppress(AttributeError): - return spec.loader.get_resource_reader(spec.name) - - def _native_reader(spec): - reader = _available_reader(spec) - return reader if hasattr(reader, 'files') else None - - def _file_reader(spec): - try: - path = pathlib.Path(self.path) - except TypeError: - return None - if path.exists(): - return readers.FileReader(self) - - return ( - # native reader if it supplies 'files' - _native_reader(self.spec) - or - # local ZipReader if a zip module - _zip_reader(self.spec) - or - # local NamespaceReader if a namespace module - _namespace_reader(self.spec) - or - # local FileReader - _file_reader(self.spec) - # fallback - adapt the spec ResourceReader to TraversableReader - or _adapters.CompatibilityFiles(self.spec) - ) - - -def wrap_spec(package): - """ - Construct a package spec with traversable compatibility - on the spec/loader/reader. - - Supersedes _adapters.wrap_spec to use TraversableResourcesLoader - from above for older Python compatibility (<3.10). - """ - from . import _adapters - - return _adapters.SpecLoaderAdapter(package.__spec__, TraversableResourcesLoader) diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources/_itertools.py b/third_party/python/virtualenv/__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources/_itertools.py deleted file mode 100644 index dd45f2f09663..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources/_itertools.py +++ /dev/null @@ -1,19 +0,0 @@ -from itertools import filterfalse - - -def unique_everseen(iterable, key=None): - "List unique elements, preserving order. Remember all elements ever seen." - # unique_everseen('AAAABBBCCDAABBB') --> A B C D - # unique_everseen('ABBCcAD', str.lower) --> A B C D - seen = set() - seen_add = seen.add - if key is None: - for element in filterfalse(seen.__contains__, iterable): - seen_add(element) - yield element - else: - for element in iterable: - k = key(element) - if k not in seen: - seen_add(k) - yield element diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources/_legacy.py b/third_party/python/virtualenv/__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources/_legacy.py deleted file mode 100644 index 2ddec5f90a32..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources/_legacy.py +++ /dev/null @@ -1,84 +0,0 @@ -import os -import pathlib -import types - -from typing import Union, Iterable, ContextManager, BinaryIO, TextIO - -from . import _common - -Package = Union[types.ModuleType, str] -Resource = Union[str, os.PathLike] - - -def open_binary(package: Package, resource: Resource) -> BinaryIO: - """Return a file-like object opened for binary reading of the resource.""" - return (_common.files(package) / _common.normalize_path(resource)).open('rb') - - -def read_binary(package: Package, resource: Resource) -> bytes: - """Return the binary contents of the resource.""" - return (_common.files(package) / _common.normalize_path(resource)).read_bytes() - - -def open_text( - package: Package, - resource: Resource, - encoding: str = 'utf-8', - errors: str = 'strict', -) -> TextIO: - """Return a file-like object opened for text reading of the resource.""" - return (_common.files(package) / _common.normalize_path(resource)).open( - 'r', encoding=encoding, errors=errors - ) - - -def read_text( - package: Package, - resource: Resource, - encoding: str = 'utf-8', - errors: str = 'strict', -) -> str: - """Return the decoded string of the resource. - - The decoding-related arguments have the same semantics as those of - bytes.decode(). - """ - with open_text(package, resource, encoding, errors) as fp: - return fp.read() - - -def contents(package: Package) -> Iterable[str]: - """Return an iterable of entries in `package`. - - Note that not all entries are resources. Specifically, directories are - not considered resources. Use `is_resource()` on each entry returned here - to check if it is a resource or not. - """ - return [path.name for path in _common.files(package).iterdir()] - - -def is_resource(package: Package, name: str) -> bool: - """True if `name` is a resource inside `package`. - - Directories are *not* resources. - """ - resource = _common.normalize_path(name) - return any( - traversable.name == resource and traversable.is_file() - for traversable in _common.files(package).iterdir() - ) - - -def path( - package: Package, - resource: Resource, -) -> ContextManager[pathlib.Path]: - """A context manager providing a file path object to the resource. - - If the resource does not already exist on its own on the file system, - a temporary file will be created. If the file was created, the file - will be deleted upon exiting the context manager (no exception is - raised if the file was deleted prior to the context manager - exiting). - """ - return _common.as_file(_common.files(package) / _common.normalize_path(resource)) diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources/abc.py b/third_party/python/virtualenv/__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources/abc.py deleted file mode 100644 index 56dc81274f6a..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources/abc.py +++ /dev/null @@ -1,137 +0,0 @@ -import abc -from typing import BinaryIO, Iterable, Text - -from ._compat import runtime_checkable, Protocol - - -class ResourceReader(metaclass=abc.ABCMeta): - """Abstract base class for loaders to provide resource reading support.""" - - @abc.abstractmethod - def open_resource(self, resource: Text) -> BinaryIO: - """Return an opened, file-like object for binary reading. - - The 'resource' argument is expected to represent only a file name. - If the resource cannot be found, FileNotFoundError is raised. - """ - # This deliberately raises FileNotFoundError instead of - # NotImplementedError so that if this method is accidentally called, - # it'll still do the right thing. - raise FileNotFoundError - - @abc.abstractmethod - def resource_path(self, resource: Text) -> Text: - """Return the file system path to the specified resource. - - The 'resource' argument is expected to represent only a file name. - If the resource does not exist on the file system, raise - FileNotFoundError. - """ - # This deliberately raises FileNotFoundError instead of - # NotImplementedError so that if this method is accidentally called, - # it'll still do the right thing. - raise FileNotFoundError - - @abc.abstractmethod - def is_resource(self, path: Text) -> bool: - """Return True if the named 'path' is a resource. - - Files are resources, directories are not. - """ - raise FileNotFoundError - - @abc.abstractmethod - def contents(self) -> Iterable[str]: - """Return an iterable of entries in `package`.""" - raise FileNotFoundError - - -@runtime_checkable -class Traversable(Protocol): - """ - An object with a subset of pathlib.Path methods suitable for - traversing directories and opening files. - """ - - @abc.abstractmethod - def iterdir(self): - """ - Yield Traversable objects in self - """ - - def read_bytes(self): - """ - Read contents of self as bytes - """ - with self.open('rb') as strm: - return strm.read() - - def read_text(self, encoding=None): - """ - Read contents of self as text - """ - with self.open(encoding=encoding) as strm: - return strm.read() - - @abc.abstractmethod - def is_dir(self) -> bool: - """ - Return True if self is a dir - """ - - @abc.abstractmethod - def is_file(self) -> bool: - """ - Return True if self is a file - """ - - @abc.abstractmethod - def joinpath(self, child): - """ - Return Traversable child in self - """ - - def __truediv__(self, child): - """ - Return Traversable child in self - """ - return self.joinpath(child) - - @abc.abstractmethod - def open(self, mode='r', *args, **kwargs): - """ - mode may be 'r' or 'rb' to open as text or binary. Return a handle - suitable for reading (same as pathlib.Path.open). - - When opening as text, accepts encoding parameters such as those - accepted by io.TextIOWrapper. - """ - - @abc.abstractproperty - def name(self) -> str: - """ - The base name of this object without any parent references. - """ - - -class TraversableResources(ResourceReader): - """ - The required interface for providing traversable - resources. - """ - - @abc.abstractmethod - def files(self): - """Return a Traversable object for the loaded package.""" - - def open_resource(self, resource): - return self.files().joinpath(resource).open('rb') - - def resource_path(self, resource): - raise FileNotFoundError(resource) - - def is_resource(self, path): - return self.files().joinpath(path).is_file() - - def contents(self): - return (item.name for item in self.files().iterdir()) diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources/readers.py b/third_party/python/virtualenv/__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources/readers.py deleted file mode 100644 index f1190ca452a1..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources/readers.py +++ /dev/null @@ -1,122 +0,0 @@ -import collections -import pathlib -import operator - -from . import abc - -from ._itertools import unique_everseen -from ._compat import ZipPath - - -def remove_duplicates(items): - return iter(collections.OrderedDict.fromkeys(items)) - - -class FileReader(abc.TraversableResources): - def __init__(self, loader): - self.path = pathlib.Path(loader.path).parent - - def resource_path(self, resource): - """ - Return the file system path to prevent - `resources.path()` from creating a temporary - copy. - """ - return str(self.path.joinpath(resource)) - - def files(self): - return self.path - - -class ZipReader(abc.TraversableResources): - def __init__(self, loader, module): - _, _, name = module.rpartition('.') - self.prefix = loader.prefix.replace('\\', '/') + name + '/' - self.archive = loader.archive - - def open_resource(self, resource): - try: - return super().open_resource(resource) - except KeyError as exc: - raise FileNotFoundError(exc.args[0]) - - def is_resource(self, path): - # workaround for `zipfile.Path.is_file` returning true - # for non-existent paths. - target = self.files().joinpath(path) - return target.is_file() and target.exists() - - def files(self): - return ZipPath(self.archive, self.prefix) - - -class MultiplexedPath(abc.Traversable): - """ - Given a series of Traversable objects, implement a merged - version of the interface across all objects. Useful for - namespace packages which may be multihomed at a single - name. - """ - - def __init__(self, *paths): - self._paths = list(map(pathlib.Path, remove_duplicates(paths))) - if not self._paths: - message = 'MultiplexedPath must contain at least one path' - raise FileNotFoundError(message) - if not all(path.is_dir() for path in self._paths): - raise NotADirectoryError('MultiplexedPath only supports directories') - - def iterdir(self): - files = (file for path in self._paths for file in path.iterdir()) - return unique_everseen(files, key=operator.attrgetter('name')) - - def read_bytes(self): - raise FileNotFoundError(f'{self} is not a file') - - def read_text(self, *args, **kwargs): - raise FileNotFoundError(f'{self} is not a file') - - def is_dir(self): - return True - - def is_file(self): - return False - - def joinpath(self, child): - # first try to find child in current paths - for file in self.iterdir(): - if file.name == child: - return file - # if it does not exist, construct it with the first path - return self._paths[0] / child - - __truediv__ = joinpath - - def open(self, *args, **kwargs): - raise FileNotFoundError(f'{self} is not a file') - - @property - def name(self): - return self._paths[0].name - - def __repr__(self): - paths = ', '.join(f"'{path}'" for path in self._paths) - return f'MultiplexedPath({paths})' - - -class NamespaceReader(abc.TraversableResources): - def __init__(self, namespace_path): - if 'NamespacePath' not in str(namespace_path): - raise ValueError('Invalid path') - self.path = MultiplexedPath(*list(namespace_path)) - - def resource_path(self, resource): - """ - Return the file system path to prevent - `resources.path()` from creating a temporary - copy. - """ - return str(self.path.joinpath(resource)) - - def files(self): - return self.path diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources/simple.py b/third_party/python/virtualenv/__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources/simple.py deleted file mode 100644 index da073cbdb11e..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources/simple.py +++ /dev/null @@ -1,116 +0,0 @@ -""" -Interface adapters for low-level readers. -""" - -import abc -import io -import itertools -from typing import BinaryIO, List - -from .abc import Traversable, TraversableResources - - -class SimpleReader(abc.ABC): - """ - The minimum, low-level interface required from a resource - provider. - """ - - @abc.abstractproperty - def package(self): - # type: () -> str - """ - The name of the package for which this reader loads resources. - """ - - @abc.abstractmethod - def children(self): - # type: () -> List['SimpleReader'] - """ - Obtain an iterable of SimpleReader for available - child containers (e.g. directories). - """ - - @abc.abstractmethod - def resources(self): - # type: () -> List[str] - """ - Obtain available named resources for this virtual package. - """ - - @abc.abstractmethod - def open_binary(self, resource): - # type: (str) -> BinaryIO - """ - Obtain a File-like for a named resource. - """ - - @property - def name(self): - return self.package.split('.')[-1] - - -class ResourceHandle(Traversable): - """ - Handle to a named resource in a ResourceReader. - """ - - def __init__(self, parent, name): - # type: (ResourceContainer, str) -> None - self.parent = parent - self.name = name # type: ignore - - def is_file(self): - return True - - def is_dir(self): - return False - - def open(self, mode='r', *args, **kwargs): - stream = self.parent.reader.open_binary(self.name) - if 'b' not in mode: - stream = io.TextIOWrapper(*args, **kwargs) - return stream - - def joinpath(self, name): - raise RuntimeError("Cannot traverse into a resource") - - -class ResourceContainer(Traversable): - """ - Traversable container for a package's resources via its reader. - """ - - def __init__(self, reader): - # type: (SimpleReader) -> None - self.reader = reader - - def is_dir(self): - return True - - def is_file(self): - return False - - def iterdir(self): - files = (ResourceHandle(self, name) for name in self.reader.resources) - dirs = map(ResourceContainer, self.reader.children()) - return itertools.chain(files, dirs) - - def open(self, *args, **kwargs): - raise IsADirectoryError() - - def joinpath(self, name): - return next( - traversable for traversable in self.iterdir() if traversable.name == name - ) - - -class TraversableReader(TraversableResources, SimpleReader): - """ - A TraversableResources based on SimpleReader. Resource providers - may derive from this class to provide the TraversableResources - interface by supplying the SimpleReader interface. - """ - - def files(self): - return ResourceContainer(self) diff --git a/third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2-2.3.5.dist-info/DESCRIPTION.rst b/third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2-2.3.5.dist-info/DESCRIPTION.rst new file mode 100644 index 000000000000..74e3bab198ae --- /dev/null +++ b/third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2-2.3.5.dist-info/DESCRIPTION.rst @@ -0,0 +1,61 @@ +The `old pathlib `_ +module on bitbucket is in bugfix-only mode. +The goal of pathlib2 is to provide a backport of +`standard pathlib `_ +module which tracks the standard library module, +so all the newest features of the standard pathlib can be +used also on older Python versions. + +Download +-------- + +Standalone releases are available on PyPI: +http://pypi.python.org/pypi/pathlib2/ + +Development +----------- + +The main development takes place in the Python standard library: see +the `Python developer's guide `_. +In particular, new features should be submitted to the +`Python bug tracker `_. + +Issues that occur in this backport, but that do not occur not in the +standard Python pathlib module can be submitted on +the `pathlib2 bug tracker `_. + +Documentation +------------- + +Refer to the +`standard pathlib `_ +documentation. + +Known Issues +------------ + +For historic reasons, pathlib2 still uses bytes to represent file paths internally. +Unfortunately, on Windows with Python 2.7, the file system encoder (``mcbs``) +has only poor support for non-ascii characters, +and can silently replace non-ascii characters without warning. +For example, ``u'тест'.encode(sys.getfilesystemencoding())`` results in ``????`` +which is obviously completely useless. + +Therefore, on Windows with Python 2.7, until this problem is fixed upstream, +unfortunately you cannot rely on pathlib2 to support the full unicode range for filenames. +See `issue #56 `_ for more details. + +.. |travis| image:: https://travis-ci.org/mcmtroffaes/pathlib2.png?branch=develop + :target: https://travis-ci.org/mcmtroffaes/pathlib2 + :alt: travis-ci + +.. |appveyor| image:: https://ci.appveyor.com/api/projects/status/baddx3rpet2wyi2c?svg=true + :target: https://ci.appveyor.com/project/mcmtroffaes/pathlib2 + :alt: appveyor + +.. |codecov| image:: https://codecov.io/gh/mcmtroffaes/pathlib2/branch/develop/graph/badge.svg + :target: https://codecov.io/gh/mcmtroffaes/pathlib2 + :alt: codecov + + + diff --git a/third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.6-py2.py3-none-any/pathlib2-2.3.6.dist-info/METADATA b/third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2-2.3.5.dist-info/METADATA similarity index 85% rename from third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.6-py2.py3-none-any/pathlib2-2.3.6.dist-info/METADATA rename to third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2-2.3.5.dist-info/METADATA index cbe76c60ebff..df7284e07856 100644 --- a/third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.6-py2.py3-none-any/pathlib2-2.3.6.dist-info/METADATA +++ b/third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2-2.3.5.dist-info/METADATA @@ -1,6 +1,6 @@ -Metadata-Version: 2.1 +Metadata-Version: 2.0 Name: pathlib2 -Version: 2.3.6 +Version: 2.3.5 Summary: Object-oriented filesystem paths Home-page: https://github.com/mcmtroffaes/pathlib2 Author: Matthias C. M. Troffaes @@ -14,18 +14,16 @@ Classifier: Operating System :: OS Independent Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 2 Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 2.6 Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3.4 Classifier: Programming Language :: Python :: 3.5 Classifier: Programming Language :: Python :: 3.6 Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 Classifier: Topic :: Software Development :: Libraries Classifier: Topic :: System :: Filesystems Requires-Dist: six -Requires-Dist: scandir ; python_version<"3.5" - -**As of January 1 2020, this repository will no longer receive any further updates, as Python 2 is no longer supported.** +Requires-Dist: scandir; python_version<"3.5" The `old pathlib `_ module on bitbucket is in bugfix-only mode. @@ -74,9 +72,9 @@ Therefore, on Windows with Python 2.7, until this problem is fixed upstream, unfortunately you cannot rely on pathlib2 to support the full unicode range for filenames. See `issue #56 `_ for more details. -.. |github| image:: https://github.com/mcmtroffaes/pathlib2/actions/workflows/python-package.yml/badge.svg - :target: https://github.com/mcmtroffaes/pathlib2/actions/workflows/python-package.yml - :alt: github +.. |travis| image:: https://travis-ci.org/mcmtroffaes/pathlib2.png?branch=develop + :target: https://travis-ci.org/mcmtroffaes/pathlib2 + :alt: travis-ci .. |appveyor| image:: https://ci.appveyor.com/api/projects/status/baddx3rpet2wyi2c?svg=true :target: https://ci.appveyor.com/project/mcmtroffaes/pathlib2 diff --git a/third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2-2.3.5.dist-info/RECORD b/third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2-2.3.5.dist-info/RECORD new file mode 100644 index 000000000000..6f922ae1f778 --- /dev/null +++ b/third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2-2.3.5.dist-info/RECORD @@ -0,0 +1,7 @@ +pathlib2-2.3.5.dist-info/metadata.json,sha256=yGoZ-uMKSmkZuZplsz2mNc8SWNIVEbnBaSyya01u5PI,1177 +pathlib2-2.3.5.dist-info/top_level.txt,sha256=tNPkisFiGBFsPUnCIHg62vSFlkx_1NO86Id8lbJmfFQ,9 +pathlib2-2.3.5.dist-info/METADATA,sha256=PEsNR-yYpbPUheyBje2_-GdAJfwXPDtWMSeSsR9VMY0,3300 +pathlib2-2.3.5.dist-info/RECORD,, +pathlib2-2.3.5.dist-info/WHEEL,sha256=AvR0WeTpDaxT645bl5FQxUK6NPsTls2ttpcGJg3j1Xg,110 +pathlib2-2.3.5.dist-info/DESCRIPTION.rst,sha256=E6WnieIR9MTnqUQ1746RCpdq3fqlkvqX0Z51-Wpxga8,2250 +pathlib2/__init__.py,sha256=NBfu5wacps1y1YtlXVSPJ8FbE4WtIXucrp5uOYNOO-U,59133 diff --git a/third_party/python/cram/cram-0.7.dist-info/WHEEL b/third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2-2.3.5.dist-info/WHEEL similarity index 70% rename from third_party/python/cram/cram-0.7.dist-info/WHEEL rename to third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2-2.3.5.dist-info/WHEEL index 8b6dd1b5a884..9dff69d86102 100644 --- a/third_party/python/cram/cram-0.7.dist-info/WHEEL +++ b/third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2-2.3.5.dist-info/WHEEL @@ -1,5 +1,5 @@ Wheel-Version: 1.0 -Generator: bdist_wheel (0.29.0) +Generator: bdist_wheel (0.24.0) Root-Is-Purelib: true Tag: py2-none-any Tag: py3-none-any diff --git a/third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2-2.3.5.dist-info/metadata.json b/third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2-2.3.5.dist-info/metadata.json new file mode 100644 index 000000000000..575c5271d2ca --- /dev/null +++ b/third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2-2.3.5.dist-info/metadata.json @@ -0,0 +1 @@ +{"license": "MIT", "name": "pathlib2", "metadata_version": "2.0", "generator": "bdist_wheel (0.24.0)", "summary": "Object-oriented filesystem paths", "run_requires": [{"environment": "python_version<\"3.5\"", "requires": ["scandir"]}, {"requires": ["six"]}], "version": "2.3.5", "extensions": {"python.details": {"project_urls": {"Home": "https://github.com/mcmtroffaes/pathlib2"}, "document_names": {"description": "DESCRIPTION.rst"}, "contacts": [{"role": "author", "email": "matthias.troffaes@gmail.com", "name": "Matthias C. M. Troffaes"}]}}, "classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 3", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Topic :: Software Development :: Libraries", "Topic :: System :: Filesystems"], "extras": []} \ No newline at end of file diff --git a/third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2-2.3.5.dist-info/top_level.txt b/third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2-2.3.5.dist-info/top_level.txt new file mode 100644 index 000000000000..83f3ebe0dd36 --- /dev/null +++ b/third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2-2.3.5.dist-info/top_level.txt @@ -0,0 +1 @@ +pathlib2 diff --git a/third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.6-py2.py3-none-any/pathlib2/__init__.py b/third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2/__init__.py similarity index 94% rename from third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.6-py2.py3-none-any/pathlib2/__init__.py rename to third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2/__init__.py index c4a54caf1d79..d5a47a66c667 100644 --- a/third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.6-py2.py3-none-any/pathlib2/__init__.py +++ b/third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2/__init__.py @@ -1,1820 +1,1809 @@ -# Copyright (c) 2014-2017 Matthias C. M. Troffaes -# Copyright (c) 2012-2014 Antoine Pitrou and contributors -# Distributed under the terms of the MIT License. - -import ctypes -import fnmatch -import functools -import io -import ntpath -import os -import posixpath -import re -import six -import sys - -from errno import EINVAL, ENOENT, ENOTDIR, EBADF -from errno import EEXIST, EPERM, EACCES -from operator import attrgetter -from stat import ( - S_ISDIR, S_ISLNK, S_ISREG, S_ISSOCK, S_ISBLK, S_ISCHR, S_ISFIFO) - -try: - from collections.abc import Sequence # type: ignore -except ImportError: - from collections import Sequence - -try: - from urllib import quote as urlquote_from_bytes # type: ignore -except ImportError: - from urllib.parse \ - import quote_from_bytes as urlquote_from_bytes # type: ignore - - -try: - intern = intern # type: ignore -except NameError: - intern = sys.intern # type: ignore - -supports_symlinks = True -if os.name == 'nt': - import nt # type: ignore - if sys.getwindowsversion().major >= 6 \ - and sys.version_info >= (3, 2): # type: ignore - from nt import _getfinalpathname - else: - supports_symlinks = False - _getfinalpathname = None -else: - nt = None - -try: - from os import scandir as os_scandir # type: ignore -except ImportError: - from scandir import scandir as os_scandir # type: ignore - -__all__ = [ - "PurePath", "PurePosixPath", "PureWindowsPath", - "Path", "PosixPath", "WindowsPath", - ] - -# -# Internals -# - -# EBADF - guard agains macOS `stat` throwing EBADF -_IGNORED_ERROS = (ENOENT, ENOTDIR, EBADF) - -_IGNORED_WINERRORS = ( - 21, # ERROR_NOT_READY - drive exists but is not accessible -) - - -def _ignore_error(exception): - return (getattr(exception, 'errno', None) in _IGNORED_ERROS or - getattr(exception, 'winerror', None) in _IGNORED_WINERRORS) - - -def _py2_fsencode(parts): - # py2 => minimal unicode support - assert six.PY2 - return [part.encode(sys.getfilesystemencoding() or 'ascii') - if isinstance(part, six.text_type) else part for part in parts] - - -def _try_except_fileexistserror(try_func, except_func, else_func=None): - if sys.version_info >= (3, 3): - try: - try_func() - except FileExistsError as exc: # noqa: F821 - except_func(exc) - else: - if else_func is not None: - else_func() - else: - try: - try_func() - except EnvironmentError as exc: - if exc.errno != EEXIST: - raise - else: - except_func(exc) - else: - if else_func is not None: - else_func() - - -def _try_except_filenotfounderror(try_func, except_func): - if sys.version_info >= (3, 3): - try: - try_func() - except FileNotFoundError as exc: # noqa: F821 - except_func(exc) - elif os.name != 'nt': - try: - try_func() - except EnvironmentError as exc: - if exc.errno != ENOENT: - raise - else: - except_func(exc) - else: - try: - try_func() - except WindowsError as exc: - # errno contains winerror - # 2 = file not found - # 3 = path not found - if exc.errno not in (2, 3): - raise - else: - except_func(exc) - except EnvironmentError as exc: - if exc.errno != ENOENT: - raise - else: - except_func(exc) - - -def _try_except_permissionerror_iter(try_iter, except_iter): - if sys.version_info >= (3, 3): - try: - for x in try_iter(): - yield x - except PermissionError as exc: # noqa: F821 - for x in except_iter(exc): - yield x - else: - try: - for x in try_iter(): - yield x - except EnvironmentError as exc: - if exc.errno not in (EPERM, EACCES): - raise - else: - for x in except_iter(exc): - yield x - - -def _win32_get_unique_path_id(path): - # get file information, needed for samefile on older Python versions - # see http://timgolden.me.uk/python/win32_how_do_i/ - # see_if_two_files_are_the_same_file.html - from ctypes import POINTER, Structure, WinError - from ctypes.wintypes import DWORD, HANDLE, BOOL - - class FILETIME(Structure): - _fields_ = [("datetime_lo", DWORD), - ("datetime_hi", DWORD), - ] - - class BY_HANDLE_FILE_INFORMATION(Structure): - _fields_ = [("attributes", DWORD), - ("created_at", FILETIME), - ("accessed_at", FILETIME), - ("written_at", FILETIME), - ("volume", DWORD), - ("file_hi", DWORD), - ("file_lo", DWORD), - ("n_links", DWORD), - ("index_hi", DWORD), - ("index_lo", DWORD), - ] - - CreateFile = ctypes.windll.kernel32.CreateFileW - CreateFile.argtypes = [ctypes.c_wchar_p, DWORD, DWORD, ctypes.c_void_p, - DWORD, DWORD, HANDLE] - CreateFile.restype = HANDLE - GetFileInformationByHandle = ( - ctypes.windll.kernel32.GetFileInformationByHandle) - GetFileInformationByHandle.argtypes = [ - HANDLE, POINTER(BY_HANDLE_FILE_INFORMATION)] - GetFileInformationByHandle.restype = BOOL - CloseHandle = ctypes.windll.kernel32.CloseHandle - CloseHandle.argtypes = [HANDLE] - CloseHandle.restype = BOOL - GENERIC_READ = 0x80000000 - FILE_SHARE_READ = 0x00000001 - FILE_FLAG_BACKUP_SEMANTICS = 0x02000000 - OPEN_EXISTING = 3 - if os.path.isdir(path): - flags = FILE_FLAG_BACKUP_SEMANTICS - else: - flags = 0 - hfile = CreateFile(path, GENERIC_READ, FILE_SHARE_READ, - None, OPEN_EXISTING, flags, None) - if hfile == 0xffffffff: - if sys.version_info >= (3, 3): - raise FileNotFoundError(path) # noqa: F821 - else: - exc = OSError("file not found: path") - exc.errno = ENOENT - raise exc - info = BY_HANDLE_FILE_INFORMATION() - success = GetFileInformationByHandle(hfile, info) - CloseHandle(hfile) - if success == 0: - raise WinError() - return info.volume, info.index_hi, info.index_lo - - -def _is_wildcard_pattern(pat): - # Whether this pattern needs actual matching using fnmatch, or can - # be looked up directly as a file. - return "*" in pat or "?" in pat or "[" in pat - - -class _Flavour(object): - - """A flavour implements a particular (platform-specific) set of path - semantics.""" - - def __init__(self): - self.join = self.sep.join - - def parse_parts(self, parts): - if six.PY2: - parts = _py2_fsencode(parts) - parsed = [] - sep = self.sep - altsep = self.altsep - drv = root = '' - it = reversed(parts) - for part in it: - if not part: - continue - if altsep: - part = part.replace(altsep, sep) - drv, root, rel = self.splitroot(part) - if sep in rel: - for x in reversed(rel.split(sep)): - if x and x != '.': - parsed.append(intern(x)) - else: - if rel and rel != '.': - parsed.append(intern(rel)) - if drv or root: - if not drv: - # If no drive is present, try to find one in the previous - # parts. This makes the result of parsing e.g. - # ("C:", "/", "a") reasonably intuitive. - for part in it: - if not part: - continue - if altsep: - part = part.replace(altsep, sep) - drv = self.splitroot(part)[0] - if drv: - break - break - if drv or root: - parsed.append(drv + root) - parsed.reverse() - return drv, root, parsed - - def join_parsed_parts(self, drv, root, parts, drv2, root2, parts2): - """ - Join the two paths represented by the respective - (drive, root, parts) tuples. Return a new (drive, root, parts) tuple. - """ - if root2: - if not drv2 and drv: - return drv, root2, [drv + root2] + parts2[1:] - elif drv2: - if drv2 == drv or self.casefold(drv2) == self.casefold(drv): - # Same drive => second path is relative to the first - return drv, root, parts + parts2[1:] - else: - # Second path is non-anchored (common case) - return drv, root, parts + parts2 - return drv2, root2, parts2 - - -class _WindowsFlavour(_Flavour): - # Reference for Windows paths can be found at - # http://msdn.microsoft.com/en-us/library/aa365247%28v=vs.85%29.aspx - - sep = '\\' - altsep = '/' - has_drv = True - pathmod = ntpath - - is_supported = (os.name == 'nt') - - drive_letters = set('abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ') - ext_namespace_prefix = '\\\\?\\' - - reserved_names = ( - set(['CON', 'PRN', 'AUX', 'NUL']) | - set(['COM%d' % i for i in range(1, 10)]) | - set(['LPT%d' % i for i in range(1, 10)]) - ) - - # Interesting findings about extended paths: - # - '\\?\c:\a', '//?/c:\a' and '//?/c:/a' are all supported - # but '\\?\c:/a' is not - # - extended paths are always absolute; "relative" extended paths will - # fail. - - def splitroot(self, part, sep=sep): - first = part[0:1] - second = part[1:2] - if (second == sep and first == sep): - # XXX extended paths should also disable the collapsing of "." - # components (according to MSDN docs). - prefix, part = self._split_extended_path(part) - first = part[0:1] - second = part[1:2] - else: - prefix = '' - third = part[2:3] - if (second == sep and first == sep and third != sep): - # is a UNC path: - # vvvvvvvvvvvvvvvvvvvvv root - # \\machine\mountpoint\directory\etc\... - # directory ^^^^^^^^^^^^^^ - index = part.find(sep, 2) - if index != -1: - index2 = part.find(sep, index + 1) - # a UNC path can't have two slashes in a row - # (after the initial two) - if index2 != index + 1: - if index2 == -1: - index2 = len(part) - if prefix: - return prefix + part[1:index2], sep, part[index2 + 1:] - else: - return part[:index2], sep, part[index2 + 1:] - drv = root = '' - if second == ':' and first in self.drive_letters: - drv = part[:2] - part = part[2:] - first = third - if first == sep: - root = first - part = part.lstrip(sep) - return prefix + drv, root, part - - def casefold(self, s): - return s.lower() - - def casefold_parts(self, parts): - return [p.lower() for p in parts] - - def resolve(self, path, strict=False): - s = str(path) - if not s: - return os.getcwd() - previous_s = None - if _getfinalpathname is not None: - if strict: - return self._ext_to_normal(_getfinalpathname(s)) - else: - # End of the path after the first one not found - tail_parts = [] - - def _try_func(): - result[0] = self._ext_to_normal(_getfinalpathname(s)) - # if there was no exception, set flag to 0 - result[1] = 0 - - def _exc_func(exc): - pass - - while True: - result = [None, 1] - _try_except_filenotfounderror(_try_func, _exc_func) - if result[1] == 1: # file not found exception raised - previous_s = s - s, tail = os.path.split(s) - tail_parts.append(tail) - if previous_s == s: - return path - else: - s = result[0] - return os.path.join(s, *reversed(tail_parts)) - # Means fallback on absolute - return None - - def _split_extended_path(self, s, ext_prefix=ext_namespace_prefix): - prefix = '' - if s.startswith(ext_prefix): - prefix = s[:4] - s = s[4:] - if s.startswith('UNC\\'): - prefix += s[:3] - s = '\\' + s[3:] - return prefix, s - - def _ext_to_normal(self, s): - # Turn back an extended path into a normal DOS-like path - return self._split_extended_path(s)[1] - - def is_reserved(self, parts): - # NOTE: the rules for reserved names seem somewhat complicated - # (e.g. r"..\NUL" is reserved but not r"foo\NUL"). - # We err on the side of caution and return True for paths which are - # not considered reserved by Windows. - if not parts: - return False - if parts[0].startswith('\\\\'): - # UNC paths are never reserved - return False - return parts[-1].partition('.')[0].upper() in self.reserved_names - - def make_uri(self, path): - # Under Windows, file URIs use the UTF-8 encoding. - drive = path.drive - if len(drive) == 2 and drive[1] == ':': - # It's a path on a local drive => 'file:///c:/a/b' - rest = path.as_posix()[2:].lstrip('/') - return 'file:///%s/%s' % ( - drive, urlquote_from_bytes(rest.encode('utf-8'))) - else: - # It's a path on a network drive => 'file://host/share/a/b' - return 'file:' + urlquote_from_bytes( - path.as_posix().encode('utf-8')) - - def gethomedir(self, username): - if 'HOME' in os.environ: - userhome = os.environ['HOME'] - elif 'USERPROFILE' in os.environ: - userhome = os.environ['USERPROFILE'] - elif 'HOMEPATH' in os.environ: - try: - drv = os.environ['HOMEDRIVE'] - except KeyError: - drv = '' - userhome = drv + os.environ['HOMEPATH'] - else: - raise RuntimeError("Can't determine home directory") - - if username: - # Try to guess user home directory. By default all users - # directories are located in the same place and are named by - # corresponding usernames. If current user home directory points - # to nonstandard place, this guess is likely wrong. - if os.environ['USERNAME'] != username: - drv, root, parts = self.parse_parts((userhome,)) - if parts[-1] != os.environ['USERNAME']: - raise RuntimeError("Can't determine home directory " - "for %r" % username) - parts[-1] = username - if drv or root: - userhome = drv + root + self.join(parts[1:]) - else: - userhome = self.join(parts) - return userhome - - -class _PosixFlavour(_Flavour): - sep = '/' - altsep = '' - has_drv = False - pathmod = posixpath - - is_supported = (os.name != 'nt') - - def splitroot(self, part, sep=sep): - if part and part[0] == sep: - stripped_part = part.lstrip(sep) - # According to POSIX path resolution: - # http://pubs.opengroup.org/onlinepubs/009695399/basedefs/ - # xbd_chap04.html#tag_04_11 - # "A pathname that begins with two successive slashes may be - # interpreted in an implementation-defined manner, although more - # than two leading slashes shall be treated as a single slash". - if len(part) - len(stripped_part) == 2: - return '', sep * 2, stripped_part - else: - return '', sep, stripped_part - else: - return '', '', part - - def casefold(self, s): - return s - - def casefold_parts(self, parts): - return parts - - def resolve(self, path, strict=False): - sep = self.sep - accessor = path._accessor - seen = {} - - def _resolve(path, rest): - if rest.startswith(sep): - path = '' - - for name in rest.split(sep): - if not name or name == '.': - # current dir - continue - if name == '..': - # parent dir - path, _, _ = path.rpartition(sep) - continue - newpath = path + sep + name - if newpath in seen: - # Already seen this path - path = seen[newpath] - if path is not None: - # use cached value - continue - # The symlink is not resolved, so we must have a symlink - # loop. - raise RuntimeError("Symlink loop from %r" % newpath) - # Resolve the symbolic link - try: - target = accessor.readlink(newpath) - except OSError as e: - if e.errno != EINVAL and strict: - raise - # Not a symlink, or non-strict mode. We just leave the path - # untouched. - path = newpath - else: - seen[newpath] = None # not resolved symlink - path = _resolve(path, target) - seen[newpath] = path # resolved symlink - - return path - # NOTE: according to POSIX, getcwd() cannot contain path components - # which are symlinks. - base = '' if path.is_absolute() else os.getcwd() - return _resolve(base, str(path)) or sep - - def is_reserved(self, parts): - return False - - def make_uri(self, path): - # We represent the path using the local filesystem encoding, - # for portability to other applications. - bpath = bytes(path) - return 'file://' + urlquote_from_bytes(bpath) - - def gethomedir(self, username): - if not username: - try: - return os.environ['HOME'] - except KeyError: - import pwd - return pwd.getpwuid(os.getuid()).pw_dir - else: - import pwd - try: - return pwd.getpwnam(username).pw_dir - except KeyError: - raise RuntimeError("Can't determine home directory " - "for %r" % username) - - -_windows_flavour = _WindowsFlavour() -_posix_flavour = _PosixFlavour() - - -class _Accessor: - - """An accessor implements a particular (system-specific or not) way of - accessing paths on the filesystem.""" - - -def _wrap_strfunc(strfunc): - @functools.wraps(strfunc) - def wrapped(pathobj, *args): - return strfunc(str(pathobj), *args) - return staticmethod(wrapped) - - -def _wrap_binary_strfunc(strfunc): - @functools.wraps(strfunc) - def wrapped(pathobjA, pathobjB, *args): - return strfunc(str(pathobjA), str(pathobjB), *args) - return staticmethod(wrapped) - - -class _NormalAccessor(_Accessor): - - stat = _wrap_strfunc(os.stat) - - lstat = _wrap_strfunc(os.lstat) - - open = _wrap_strfunc(os.open) - - listdir = _wrap_strfunc(os.listdir) - - scandir = _wrap_strfunc(os_scandir) - - chmod = _wrap_strfunc(os.chmod) - - if hasattr(os, "lchmod"): - lchmod = _wrap_strfunc(os.lchmod) - else: - def lchmod(self, pathobj, mode): - raise NotImplementedError("lchmod() not available on this system") - - mkdir = _wrap_strfunc(os.mkdir) - - unlink = _wrap_strfunc(os.unlink) - - rmdir = _wrap_strfunc(os.rmdir) - - rename = _wrap_binary_strfunc(os.rename) - - if sys.version_info >= (3, 3): - replace = _wrap_binary_strfunc(os.replace) - - if nt: - if supports_symlinks: - symlink = _wrap_binary_strfunc(os.symlink) - else: - @staticmethod - def symlink(a, b, target_is_directory): - raise NotImplementedError( - "symlink() not available on this system") - else: - # Under POSIX, os.symlink() takes two args - @staticmethod - def symlink(a, b, target_is_directory): - return os.symlink(str(a), str(b)) - - utime = _wrap_strfunc(os.utime) - - # Helper for resolve() - def readlink(self, path): - return os.readlink(path) - - -_normal_accessor = _NormalAccessor() - - -# -# Globbing helpers -# - -def _make_selector(pattern_parts): - pat = pattern_parts[0] - child_parts = pattern_parts[1:] - if pat == '**': - cls = _RecursiveWildcardSelector - elif '**' in pat: - raise ValueError( - "Invalid pattern: '**' can only be an entire path component") - elif _is_wildcard_pattern(pat): - cls = _WildcardSelector - else: - cls = _PreciseSelector - return cls(pat, child_parts) - - -if hasattr(functools, "lru_cache"): - _make_selector = functools.lru_cache()(_make_selector) # type: ignore - - -class _Selector: - - """A selector matches a specific glob pattern part against the children - of a given path.""" - - def __init__(self, child_parts): - self.child_parts = child_parts - if child_parts: - self.successor = _make_selector(child_parts) - self.dironly = True - else: - self.successor = _TerminatingSelector() - self.dironly = False - - def select_from(self, parent_path): - """Iterate over all child paths of `parent_path` matched by this - selector. This can contain parent_path itself.""" - path_cls = type(parent_path) - is_dir = path_cls.is_dir - exists = path_cls.exists - scandir = parent_path._accessor.scandir - if not is_dir(parent_path): - return iter([]) - return self._select_from(parent_path, is_dir, exists, scandir) - - -class _TerminatingSelector: - - def _select_from(self, parent_path, is_dir, exists, scandir): - yield parent_path - - -class _PreciseSelector(_Selector): - - def __init__(self, name, child_parts): - self.name = name - _Selector.__init__(self, child_parts) - - def _select_from(self, parent_path, is_dir, exists, scandir): - def try_iter(): - path = parent_path._make_child_relpath(self.name) - if (is_dir if self.dironly else exists)(path): - for p in self.successor._select_from( - path, is_dir, exists, scandir): - yield p - - def except_iter(exc): - return - yield - - for x in _try_except_permissionerror_iter(try_iter, except_iter): - yield x - - -class _WildcardSelector(_Selector): - - def __init__(self, pat, child_parts): - self.pat = re.compile(fnmatch.translate(pat)) - _Selector.__init__(self, child_parts) - - def _select_from(self, parent_path, is_dir, exists, scandir): - def try_iter(): - cf = parent_path._flavour.casefold - entries = list(scandir(parent_path)) - for entry in entries: - if not self.dironly or entry.is_dir(): - name = entry.name - casefolded = cf(name) - if self.pat.match(casefolded): - path = parent_path._make_child_relpath(name) - for p in self.successor._select_from( - path, is_dir, exists, scandir): - yield p - - def except_iter(exc): - return - yield - - for x in _try_except_permissionerror_iter(try_iter, except_iter): - yield x - - -class _RecursiveWildcardSelector(_Selector): - - def __init__(self, pat, child_parts): - _Selector.__init__(self, child_parts) - - def _iterate_directories(self, parent_path, is_dir, scandir): - yield parent_path - - def try_iter(): - entries = list(scandir(parent_path)) - for entry in entries: - entry_is_dir = False - try: - entry_is_dir = entry.is_dir() - except OSError as e: - if not _ignore_error(e): - raise - if entry_is_dir and not entry.is_symlink(): - path = parent_path._make_child_relpath(entry.name) - for p in self._iterate_directories(path, is_dir, scandir): - yield p - - def except_iter(exc): - return - yield - - for x in _try_except_permissionerror_iter(try_iter, except_iter): - yield x - - def _select_from(self, parent_path, is_dir, exists, scandir): - def try_iter(): - yielded = set() - try: - successor_select = self.successor._select_from - for starting_point in self._iterate_directories( - parent_path, is_dir, scandir): - for p in successor_select( - starting_point, is_dir, exists, scandir): - if p not in yielded: - yield p - yielded.add(p) - finally: - yielded.clear() - - def except_iter(exc): - return - yield - - for x in _try_except_permissionerror_iter(try_iter, except_iter): - yield x - - -# -# Public API -# - -class _PathParents(Sequence): - - """This object provides sequence-like access to the logical ancestors - of a path. Don't try to construct it yourself.""" - __slots__ = ('_pathcls', '_drv', '_root', '_parts') - - def __init__(self, path): - # We don't store the instance to avoid reference cycles - self._pathcls = type(path) - self._drv = path._drv - self._root = path._root - self._parts = path._parts - - def __len__(self): - if self._drv or self._root: - return len(self._parts) - 1 - else: - return len(self._parts) - - def __getitem__(self, idx): - if idx < 0 or idx >= len(self): - raise IndexError(idx) - return self._pathcls._from_parsed_parts(self._drv, self._root, - self._parts[:-idx - 1]) - - def __repr__(self): - return "<{0}.parents>".format(self._pathcls.__name__) - - -class PurePath(object): - - """PurePath represents a filesystem path and offers operations which - don't imply any actual filesystem I/O. Depending on your system, - instantiating a PurePath will return either a PurePosixPath or a - PureWindowsPath object. You can also instantiate either of these classes - directly, regardless of your system. - """ - __slots__ = ( - '_drv', '_root', '_parts', - '_str', '_hash', '_pparts', '_cached_cparts', - ) - - def __new__(cls, *args): - """Construct a PurePath from one or several strings and or existing - PurePath objects. The strings and path objects are combined so as - to yield a canonicalized path, which is incorporated into the - new PurePath object. - """ - if cls is PurePath: - cls = PureWindowsPath if os.name == 'nt' else PurePosixPath - return cls._from_parts(args) - - def __reduce__(self): - # Using the parts tuple helps share interned path parts - # when pickling related paths. - return (self.__class__, tuple(self._parts)) - - @classmethod - def _parse_args(cls, args): - # This is useful when you don't want to create an instance, just - # canonicalize some constructor arguments. - parts = [] - for a in args: - if isinstance(a, PurePath): - parts += a._parts - else: - if sys.version_info >= (3, 6): - a = os.fspath(a) - else: - # duck typing for older Python versions - if hasattr(a, "__fspath__"): - a = a.__fspath__() - if isinstance(a, str): - # Force-cast str subclasses to str (issue #21127) - parts.append(str(a)) - # also handle unicode for PY2 (six.text_type = unicode) - elif six.PY2 and isinstance(a, six.text_type): - # cast to str using filesystem encoding - # note: in rare circumstances, on Python < 3.2, - # getfilesystemencoding can return None, in that - # case fall back to ascii - parts.append(a.encode( - sys.getfilesystemencoding() or "ascii")) - else: - raise TypeError( - "argument should be a str object or an os.PathLike " - "object returning str, not %r" - % type(a)) - return cls._flavour.parse_parts(parts) - - @classmethod - def _from_parts(cls, args, init=True): - # We need to call _parse_args on the instance, so as to get the - # right flavour. - self = object.__new__(cls) - drv, root, parts = self._parse_args(args) - self._drv = drv - self._root = root - self._parts = parts - if init: - self._init() - return self - - @classmethod - def _from_parsed_parts(cls, drv, root, parts, init=True): - self = object.__new__(cls) - self._drv = drv - self._root = root - self._parts = parts - if init: - self._init() - return self - - @classmethod - def _format_parsed_parts(cls, drv, root, parts): - if drv or root: - return drv + root + cls._flavour.join(parts[1:]) - else: - return cls._flavour.join(parts) - - def _init(self): - # Overridden in concrete Path - pass - - def _make_child(self, args): - drv, root, parts = self._parse_args(args) - drv, root, parts = self._flavour.join_parsed_parts( - self._drv, self._root, self._parts, drv, root, parts) - return self._from_parsed_parts(drv, root, parts) - - def __str__(self): - """Return the string representation of the path, suitable for - passing to system calls.""" - try: - return self._str - except AttributeError: - self._str = self._format_parsed_parts(self._drv, self._root, - self._parts) or '.' - return self._str - - def __fspath__(self): - return str(self) - - def as_posix(self): - """Return the string representation of the path with forward (/) - slashes.""" - f = self._flavour - return str(self).replace(f.sep, '/') - - def __bytes__(self): - """Return the bytes representation of the path. This is only - recommended to use under Unix.""" - if sys.version_info < (3, 2): - raise NotImplementedError("needs Python 3.2 or later") - return os.fsencode(str(self)) - - def __repr__(self): - return "{0}({1!r})".format(self.__class__.__name__, self.as_posix()) - - def as_uri(self): - """Return the path as a 'file' URI.""" - if not self.is_absolute(): - raise ValueError("relative path can't be expressed as a file URI") - return self._flavour.make_uri(self) - - @property - def _cparts(self): - # Cached casefolded parts, for hashing and comparison - try: - return self._cached_cparts - except AttributeError: - self._cached_cparts = self._flavour.casefold_parts(self._parts) - return self._cached_cparts - - def __eq__(self, other): - if not isinstance(other, PurePath): - return NotImplemented - return ( - self._cparts == other._cparts - and self._flavour is other._flavour) - - def __ne__(self, other): - return not self == other - - def __hash__(self): - try: - return self._hash - except AttributeError: - self._hash = hash(tuple(self._cparts)) - return self._hash - - def __lt__(self, other): - if (not isinstance(other, PurePath) - or self._flavour is not other._flavour): - return NotImplemented - return self._cparts < other._cparts - - def __le__(self, other): - if (not isinstance(other, PurePath) - or self._flavour is not other._flavour): - return NotImplemented - return self._cparts <= other._cparts - - def __gt__(self, other): - if (not isinstance(other, PurePath) - or self._flavour is not other._flavour): - return NotImplemented - return self._cparts > other._cparts - - def __ge__(self, other): - if (not isinstance(other, PurePath) - or self._flavour is not other._flavour): - return NotImplemented - return self._cparts >= other._cparts - - drive = property(attrgetter('_drv'), - doc="""The drive prefix (letter or UNC path), if any.""") - - root = property(attrgetter('_root'), - doc="""The root of the path, if any.""") - - @property - def anchor(self): - """The concatenation of the drive and root, or ''.""" - anchor = self._drv + self._root - return anchor - - @property - def name(self): - """The final path component, if any.""" - parts = self._parts - if len(parts) == (1 if (self._drv or self._root) else 0): - return '' - return parts[-1] - - @property - def suffix(self): - """The final component's last suffix, if any.""" - name = self.name - i = name.rfind('.') - if 0 < i < len(name) - 1: - return name[i:] - else: - return '' - - @property - def suffixes(self): - """A list of the final component's suffixes, if any.""" - name = self.name - if name.endswith('.'): - return [] - name = name.lstrip('.') - return ['.' + suffix for suffix in name.split('.')[1:]] - - @property - def stem(self): - """The final path component, minus its last suffix.""" - name = self.name - i = name.rfind('.') - if 0 < i < len(name) - 1: - return name[:i] - else: - return name - - def with_name(self, name): - """Return a new path with the file name changed.""" - if not self.name: - raise ValueError("%r has an empty name" % (self,)) - drv, root, parts = self._flavour.parse_parts((name,)) - if (not name or name[-1] in [self._flavour.sep, self._flavour.altsep] - or drv or root or len(parts) != 1): - raise ValueError("Invalid name %r" % (name)) - return self._from_parsed_parts(self._drv, self._root, - self._parts[:-1] + parts[-1:]) - - def with_suffix(self, suffix): - """Return a new path with the file suffix changed. If the path - has no suffix, add given suffix. If the given suffix is an empty - string, remove the suffix from the path. - """ - # XXX if suffix is None, should the current suffix be removed? - f = self._flavour - if f.sep in suffix or f.altsep and f.altsep in suffix: - raise ValueError("Invalid suffix %r" % (suffix)) - if suffix and not suffix.startswith('.') or suffix == '.': - raise ValueError("Invalid suffix %r" % (suffix)) - - if (six.PY2 and not isinstance(suffix, str) - and isinstance(suffix, six.text_type)): - # see _parse_args() above - suffix = suffix.encode(sys.getfilesystemencoding() or "ascii") - - name = self.name - if not name: - raise ValueError("%r has an empty name" % (self,)) - old_suffix = self.suffix - if not old_suffix: - name = name + suffix - else: - name = name[:-len(old_suffix)] + suffix - return self._from_parsed_parts(self._drv, self._root, - self._parts[:-1] + [name]) - - def relative_to(self, *other): - """Return the relative path to another path identified by the passed - arguments. If the operation is not possible (because this is not - a subpath of the other path), raise ValueError. - """ - # For the purpose of this method, drive and root are considered - # separate parts, i.e.: - # Path('c:/').relative_to('c:') gives Path('/') - # Path('c:/').relative_to('/') raise ValueError - if not other: - raise TypeError("need at least one argument") - parts = self._parts - drv = self._drv - root = self._root - if root: - abs_parts = [drv, root] + parts[1:] - else: - abs_parts = parts - to_drv, to_root, to_parts = self._parse_args(other) - if to_root: - to_abs_parts = [to_drv, to_root] + to_parts[1:] - else: - to_abs_parts = to_parts - n = len(to_abs_parts) - cf = self._flavour.casefold_parts - if (root or drv) if n == 0 else cf(abs_parts[:n]) != cf(to_abs_parts): - formatted = self._format_parsed_parts(to_drv, to_root, to_parts) - raise ValueError("{0!r} does not start with {1!r}" - .format(str(self), str(formatted))) - return self._from_parsed_parts('', root if n == 1 else '', - abs_parts[n:]) - - @property - def parts(self): - """An object providing sequence-like access to the - components in the filesystem path.""" - # We cache the tuple to avoid building a new one each time .parts - # is accessed. XXX is this necessary? - try: - return self._pparts - except AttributeError: - self._pparts = tuple(self._parts) - return self._pparts - - def joinpath(self, *args): - """Combine this path with one or several arguments, and return a - new path representing either a subpath (if all arguments are relative - paths) or a totally different path (if one of the arguments is - anchored). - """ - return self._make_child(args) - - def __truediv__(self, key): - return self._make_child((key,)) - - def __rtruediv__(self, key): - return self._from_parts([key] + self._parts) - - if six.PY2: - __div__ = __truediv__ - __rdiv__ = __rtruediv__ - - @property - def parent(self): - """The logical parent of the path.""" - drv = self._drv - root = self._root - parts = self._parts - if len(parts) == 1 and (drv or root): - return self - return self._from_parsed_parts(drv, root, parts[:-1]) - - @property - def parents(self): - """A sequence of this path's logical parents.""" - return _PathParents(self) - - def is_absolute(self): - """True if the path is absolute (has both a root and, if applicable, - a drive).""" - if not self._root: - return False - return not self._flavour.has_drv or bool(self._drv) - - def is_reserved(self): - """Return True if the path contains one of the special names reserved - by the system, if any.""" - return self._flavour.is_reserved(self._parts) - - def match(self, path_pattern): - """ - Return True if this path matches the given pattern. - """ - cf = self._flavour.casefold - path_pattern = cf(path_pattern) - drv, root, pat_parts = self._flavour.parse_parts((path_pattern,)) - if not pat_parts: - raise ValueError("empty pattern") - if drv and drv != cf(self._drv): - return False - if root and root != cf(self._root): - return False - parts = self._cparts - if drv or root: - if len(pat_parts) != len(parts): - return False - pat_parts = pat_parts[1:] - elif len(pat_parts) > len(parts): - return False - for part, pat in zip(reversed(parts), reversed(pat_parts)): - if not fnmatch.fnmatchcase(part, pat): - return False - return True - - -# Can't subclass os.PathLike from PurePath and keep the constructor -# optimizations in PurePath._parse_args(). -if sys.version_info >= (3, 6): - os.PathLike.register(PurePath) - - -class PurePosixPath(PurePath): - _flavour = _posix_flavour - __slots__ = () - - -class PureWindowsPath(PurePath): - """PurePath subclass for Windows systems. - - On a Windows system, instantiating a PurePath should return this object. - However, you can also instantiate it directly on any system. - """ - _flavour = _windows_flavour - __slots__ = () - - -# Filesystem-accessing classes - - -class Path(PurePath): - """PurePath subclass that can make system calls. - - Path represents a filesystem path but unlike PurePath, also offers - methods to do system calls on path objects. Depending on your system, - instantiating a Path will return either a PosixPath or a WindowsPath - object. You can also instantiate a PosixPath or WindowsPath directly, - but cannot instantiate a WindowsPath on a POSIX system or vice versa. - """ - __slots__ = ( - '_accessor', - '_closed', - ) - - def __new__(cls, *args, **kwargs): - if cls is Path: - cls = WindowsPath if os.name == 'nt' else PosixPath - self = cls._from_parts(args, init=False) - if not self._flavour.is_supported: - raise NotImplementedError("cannot instantiate %r on your system" - % (cls.__name__,)) - self._init() - return self - - def _init(self, - # Private non-constructor arguments - template=None, - ): - self._closed = False - if template is not None: - self._accessor = template._accessor - else: - self._accessor = _normal_accessor - - def _make_child_relpath(self, part): - # This is an optimization used for dir walking. `part` must be - # a single part relative to this path. - parts = self._parts + [part] - return self._from_parsed_parts(self._drv, self._root, parts) - - def __enter__(self): - if self._closed: - self._raise_closed() - return self - - def __exit__(self, t, v, tb): - self._closed = True - - def _raise_closed(self): - raise ValueError("I/O operation on closed path") - - def _opener(self, name, flags, mode=0o666): - # A stub for the opener argument to built-in open() - return self._accessor.open(self, flags, mode) - - def _raw_open(self, flags, mode=0o777): - """ - Open the file pointed by this path and return a file descriptor, - as os.open() does. - """ - if self._closed: - self._raise_closed() - return self._accessor.open(self, flags, mode) - - # Public API - - @classmethod - def cwd(cls): - """Return a new path pointing to the current working directory - (as returned by os.getcwd()). - """ - return cls(os.getcwd()) - - @classmethod - def home(cls): - """Return a new path pointing to the user's home directory (as - returned by os.path.expanduser('~')). - """ - return cls(cls()._flavour.gethomedir(None)) - - def samefile(self, other_path): - """Return whether other_path is the same or not as this file - (as returned by os.path.samefile()). - """ - if hasattr(os.path, "samestat"): - st = self.stat() - try: - other_st = other_path.stat() - except AttributeError: - other_st = os.stat(other_path) - return os.path.samestat(st, other_st) - else: - filename1 = six.text_type(self) - filename2 = six.text_type(other_path) - st1 = _win32_get_unique_path_id(filename1) - st2 = _win32_get_unique_path_id(filename2) - return st1 == st2 - - def iterdir(self): - """Iterate over the files in this directory. Does not yield any - result for the special paths '.' and '..'. - """ - if self._closed: - self._raise_closed() - for name in self._accessor.listdir(self): - if name in ('.', '..'): - # Yielding a path object for these makes little sense - continue - yield self._make_child_relpath(name) - if self._closed: - self._raise_closed() - - def glob(self, pattern): - """Iterate over this subtree and yield all existing files (of any - kind, including directories) matching the given relative pattern. - """ - if not pattern: - raise ValueError("Unacceptable pattern: {0!r}".format(pattern)) - pattern = self._flavour.casefold(pattern) - drv, root, pattern_parts = self._flavour.parse_parts((pattern,)) - if drv or root: - raise NotImplementedError("Non-relative patterns are unsupported") - selector = _make_selector(tuple(pattern_parts)) - for p in selector.select_from(self): - yield p - - def rglob(self, pattern): - """Recursively yield all existing files (of any kind, including - directories) matching the given relative pattern, anywhere in - this subtree. - """ - pattern = self._flavour.casefold(pattern) - drv, root, pattern_parts = self._flavour.parse_parts((pattern,)) - if drv or root: - raise NotImplementedError("Non-relative patterns are unsupported") - selector = _make_selector(("**",) + tuple(pattern_parts)) - for p in selector.select_from(self): - yield p - - def absolute(self): - """Return an absolute version of this path. This function works - even if the path doesn't point to anything. - - No normalization is done, i.e. all '.' and '..' will be kept along. - Use resolve() to get the canonical path to a file. - """ - # XXX untested yet! - if self._closed: - self._raise_closed() - if self.is_absolute(): - return self - # FIXME this must defer to the specific flavour (and, under Windows, - # use nt._getfullpathname()) - obj = self._from_parts([os.getcwd()] + self._parts, init=False) - obj._init(template=self) - return obj - - def resolve(self, strict=False): - """ - Make the path absolute, resolving all symlinks on the way and also - normalizing it (for example turning slashes into backslashes under - Windows). - """ - if self._closed: - self._raise_closed() - s = self._flavour.resolve(self, strict=strict) - if s is None: - # No symlink resolution => for consistency, raise an error if - # the path is forbidden - # but not raise error if file does not exist (see issue #54). - - def _try_func(): - self.stat() - - def _exc_func(exc): - pass - - _try_except_filenotfounderror(_try_func, _exc_func) - s = str(self.absolute()) - else: - # ensure s is a string (normpath requires this on older python) - s = str(s) - # Now we have no symlinks in the path, it's safe to normalize it. - normed = self._flavour.pathmod.normpath(s) - obj = self._from_parts((normed,), init=False) - obj._init(template=self) - return obj - - def stat(self): - """ - Return the result of the stat() system call on this path, like - os.stat() does. - """ - return self._accessor.stat(self) - - def owner(self): - """ - Return the login name of the file owner. - """ - import pwd - return pwd.getpwuid(self.stat().st_uid).pw_name - - def group(self): - """ - Return the group name of the file gid. - """ - import grp - return grp.getgrgid(self.stat().st_gid).gr_name - - def open(self, mode='r', buffering=-1, encoding=None, - errors=None, newline=None): - """ - Open the file pointed by this path and return a file object, as - the built-in open() function does. - """ - if self._closed: - self._raise_closed() - if sys.version_info >= (3, 3): - return io.open( - str(self), mode, buffering, encoding, errors, newline, - opener=self._opener) - else: - return io.open(str(self), mode, buffering, - encoding, errors, newline) - - def read_bytes(self): - """ - Open the file in bytes mode, read it, and close the file. - """ - with self.open(mode='rb') as f: - return f.read() - - def read_text(self, encoding=None, errors=None): - """ - Open the file in text mode, read it, and close the file. - """ - with self.open(mode='r', encoding=encoding, errors=errors) as f: - return f.read() - - def write_bytes(self, data): - """ - Open the file in bytes mode, write to it, and close the file. - """ - if not isinstance(data, six.binary_type): - raise TypeError( - 'data must be %s, not %s' % - (six.binary_type.__name__, data.__class__.__name__)) - with self.open(mode='wb') as f: - return f.write(data) - - def write_text(self, data, encoding=None, errors=None): - """ - Open the file in text mode, write to it, and close the file. - """ - if not isinstance(data, six.text_type): - raise TypeError( - 'data must be %s, not %s' % - (six.text_type.__name__, data.__class__.__name__)) - with self.open(mode='w', encoding=encoding, errors=errors) as f: - return f.write(data) - - def touch(self, mode=0o666, exist_ok=True): - """ - Create this file with the given access mode, if it doesn't exist. - """ - if self._closed: - self._raise_closed() - if exist_ok: - # First try to bump modification time - # Implementation note: GNU touch uses the UTIME_NOW option of - # the utimensat() / futimens() functions. - try: - self._accessor.utime(self, None) - except OSError: - # Avoid exception chaining - pass - else: - return - flags = os.O_CREAT | os.O_WRONLY - if not exist_ok: - flags |= os.O_EXCL - fd = self._raw_open(flags, mode) - os.close(fd) - - def mkdir(self, mode=0o777, parents=False, exist_ok=False): - """ - Create a new directory at this given path. - """ - if self._closed: - self._raise_closed() - - def _try_func(): - self._accessor.mkdir(self, mode) - - def _exc_func(exc): - if not parents or self.parent == self: - raise exc - self.parent.mkdir(parents=True, exist_ok=True) - self.mkdir(mode, parents=False, exist_ok=exist_ok) - - try: - _try_except_filenotfounderror(_try_func, _exc_func) - except OSError: - # Cannot rely on checking for EEXIST, since the operating system - # could give priority to other errors like EACCES or EROFS - if not exist_ok or not self.is_dir(): - raise - - def chmod(self, mode): - """ - Change the permissions of the path, like os.chmod(). - """ - if self._closed: - self._raise_closed() - self._accessor.chmod(self, mode) - - def lchmod(self, mode): - """ - Like chmod(), except if the path points to a symlink, the symlink's - permissions are changed, rather than its target's. - """ - if self._closed: - self._raise_closed() - self._accessor.lchmod(self, mode) - - def unlink(self): - """ - Remove this file or link. - If the path is a directory, use rmdir() instead. - """ - if self._closed: - self._raise_closed() - self._accessor.unlink(self) - - def rmdir(self): - """ - Remove this directory. The directory must be empty. - """ - if self._closed: - self._raise_closed() - self._accessor.rmdir(self) - - def lstat(self): - """ - Like stat(), except if the path points to a symlink, the symlink's - status information is returned, rather than its target's. - """ - if self._closed: - self._raise_closed() - return self._accessor.lstat(self) - - def rename(self, target): - """ - Rename this path to the given path. - """ - if self._closed: - self._raise_closed() - self._accessor.rename(self, target) - - def replace(self, target): - """ - Rename this path to the given path, clobbering the existing - destination if it exists. - """ - if sys.version_info < (3, 3): - raise NotImplementedError("replace() is only available " - "with Python 3.3 and later") - if self._closed: - self._raise_closed() - self._accessor.replace(self, target) - - def symlink_to(self, target, target_is_directory=False): - """ - Make this path a symlink pointing to the given path. - Note the order of arguments (self, target) is the reverse of - os.symlink's. - """ - if self._closed: - self._raise_closed() - self._accessor.symlink(target, self, target_is_directory) - - # Convenience functions for querying the stat results - - def exists(self): - """ - Whether this path exists. - """ - try: - self.stat() - except OSError as e: - if not _ignore_error(e): - raise - return False - except ValueError: - # Non-encodable path - return False - return True - - def is_dir(self): - """ - Whether this path is a directory. - """ - try: - return S_ISDIR(self.stat().st_mode) - except OSError as e: - if not _ignore_error(e): - raise - # Path doesn't exist or is a broken symlink - # (see https://bitbucket.org/pitrou/pathlib/issue/12/) - return False - except ValueError: - # Non-encodable path - return False - - def is_file(self): - """ - Whether this path is a regular file (also True for symlinks pointing - to regular files). - """ - try: - return S_ISREG(self.stat().st_mode) - except OSError as e: - if not _ignore_error(e): - raise - # Path doesn't exist or is a broken symlink - # (see https://bitbucket.org/pitrou/pathlib/issue/12/) - return False - except ValueError: - # Non-encodable path - return False - - def is_mount(self): - """ - Check if this path is a POSIX mount point - """ - # Need to exist and be a dir - if not self.exists() or not self.is_dir(): - return False - - parent = Path(self.parent) - try: - parent_dev = parent.stat().st_dev - except OSError: - return False - - dev = self.stat().st_dev - if dev != parent_dev: - return True - ino = self.stat().st_ino - parent_ino = parent.stat().st_ino - return ino == parent_ino - - def is_symlink(self): - """ - Whether this path is a symbolic link. - """ - try: - return S_ISLNK(self.lstat().st_mode) - except OSError as e: - if not _ignore_error(e): - raise - # Path doesn't exist - return False - except ValueError: - # Non-encodable path - return False - - def is_block_device(self): - """ - Whether this path is a block device. - """ - try: - return S_ISBLK(self.stat().st_mode) - except OSError as e: - if not _ignore_error(e): - raise - # Path doesn't exist or is a broken symlink - # (see https://bitbucket.org/pitrou/pathlib/issue/12/) - return False - except ValueError: - # Non-encodable path - return False - - def is_char_device(self): - """ - Whether this path is a character device. - """ - try: - return S_ISCHR(self.stat().st_mode) - except OSError as e: - if not _ignore_error(e): - raise - # Path doesn't exist or is a broken symlink - # (see https://bitbucket.org/pitrou/pathlib/issue/12/) - return False - except ValueError: - # Non-encodable path - return False - - def is_fifo(self): - """ - Whether this path is a FIFO. - """ - try: - return S_ISFIFO(self.stat().st_mode) - except OSError as e: - if not _ignore_error(e): - raise - # Path doesn't exist or is a broken symlink - # (see https://bitbucket.org/pitrou/pathlib/issue/12/) - return False - except ValueError: - # Non-encodable path - return False - - def is_socket(self): - """ - Whether this path is a socket. - """ - try: - return S_ISSOCK(self.stat().st_mode) - except OSError as e: - if not _ignore_error(e): - raise - # Path doesn't exist or is a broken symlink - # (see https://bitbucket.org/pitrou/pathlib/issue/12/) - return False - except ValueError: - # Non-encodable path - return False - - def expanduser(self): - """ Return a new path with expanded ~ and ~user constructs - (as returned by os.path.expanduser) - """ - if (not (self._drv or self._root) - and self._parts and self._parts[0][:1] == '~'): - homedir = self._flavour.gethomedir(self._parts[0][1:]) - return self._from_parts([homedir] + self._parts[1:]) - - return self - - -class PosixPath(Path, PurePosixPath): - """Path subclass for non-Windows systems. - - On a POSIX system, instantiating a Path should return this object. - """ - __slots__ = () - - -class WindowsPath(Path, PureWindowsPath): - """Path subclass for Windows systems. - - On a Windows system, instantiating a Path should return this object. - """ - __slots__ = () - - def owner(self): - raise NotImplementedError("Path.owner() is unsupported on this system") - - def group(self): - raise NotImplementedError("Path.group() is unsupported on this system") - - def is_mount(self): - raise NotImplementedError( - "Path.is_mount() is unsupported on this system") +# Copyright (c) 2014-2017 Matthias C. M. Troffaes +# Copyright (c) 2012-2014 Antoine Pitrou and contributors +# Distributed under the terms of the MIT License. + +import ctypes +import fnmatch +import functools +import io +import ntpath +import os +import posixpath +import re +import six +import sys + +from errno import EINVAL, ENOENT, ENOTDIR, EBADF +from errno import EEXIST, EPERM, EACCES +from operator import attrgetter +from stat import ( + S_ISDIR, S_ISLNK, S_ISREG, S_ISSOCK, S_ISBLK, S_ISCHR, S_ISFIFO) + +try: + from collections.abc import Sequence +except ImportError: + from collections import Sequence + +try: + from urllib import quote as urlquote_from_bytes +except ImportError: + from urllib.parse import quote_from_bytes as urlquote_from_bytes + + +try: + intern = intern +except NameError: + intern = sys.intern + +supports_symlinks = True +if os.name == 'nt': + import nt + if sys.getwindowsversion()[:2] >= (6, 0) and sys.version_info >= (3, 2): + from nt import _getfinalpathname + else: + supports_symlinks = False + _getfinalpathname = None +else: + nt = None + +try: + from os import scandir as os_scandir +except ImportError: + from scandir import scandir as os_scandir + +__all__ = [ + "PurePath", "PurePosixPath", "PureWindowsPath", + "Path", "PosixPath", "WindowsPath", + ] + +# +# Internals +# + +# EBADF - guard agains macOS `stat` throwing EBADF +_IGNORED_ERROS = (ENOENT, ENOTDIR, EBADF) + +_IGNORED_WINERRORS = ( + 21, # ERROR_NOT_READY - drive exists but is not accessible +) + + +def _ignore_error(exception): + return (getattr(exception, 'errno', None) in _IGNORED_ERROS or + getattr(exception, 'winerror', None) in _IGNORED_WINERRORS) + + +def _py2_fsencode(parts): + # py2 => minimal unicode support + assert six.PY2 + return [part.encode('ascii') if isinstance(part, six.text_type) + else part for part in parts] + + +def _try_except_fileexistserror(try_func, except_func, else_func=None): + if sys.version_info >= (3, 3): + try: + try_func() + except FileExistsError as exc: + except_func(exc) + else: + if else_func is not None: + else_func() + else: + try: + try_func() + except EnvironmentError as exc: + if exc.errno != EEXIST: + raise + else: + except_func(exc) + else: + if else_func is not None: + else_func() + + +def _try_except_filenotfounderror(try_func, except_func): + if sys.version_info >= (3, 3): + try: + try_func() + except FileNotFoundError as exc: + except_func(exc) + elif os.name != 'nt': + try: + try_func() + except EnvironmentError as exc: + if exc.errno != ENOENT: + raise + else: + except_func(exc) + else: + try: + try_func() + except WindowsError as exc: + # errno contains winerror + # 2 = file not found + # 3 = path not found + if exc.errno not in (2, 3): + raise + else: + except_func(exc) + except EnvironmentError as exc: + if exc.errno != ENOENT: + raise + else: + except_func(exc) + + +def _try_except_permissionerror_iter(try_iter, except_iter): + if sys.version_info >= (3, 3): + try: + for x in try_iter(): + yield x + except PermissionError as exc: + for x in except_iter(exc): + yield x + else: + try: + for x in try_iter(): + yield x + except EnvironmentError as exc: + if exc.errno not in (EPERM, EACCES): + raise + else: + for x in except_iter(exc): + yield x + + +def _win32_get_unique_path_id(path): + # get file information, needed for samefile on older Python versions + # see http://timgolden.me.uk/python/win32_how_do_i/ + # see_if_two_files_are_the_same_file.html + from ctypes import POINTER, Structure, WinError + from ctypes.wintypes import DWORD, HANDLE, BOOL + + class FILETIME(Structure): + _fields_ = [("datetime_lo", DWORD), + ("datetime_hi", DWORD), + ] + + class BY_HANDLE_FILE_INFORMATION(Structure): + _fields_ = [("attributes", DWORD), + ("created_at", FILETIME), + ("accessed_at", FILETIME), + ("written_at", FILETIME), + ("volume", DWORD), + ("file_hi", DWORD), + ("file_lo", DWORD), + ("n_links", DWORD), + ("index_hi", DWORD), + ("index_lo", DWORD), + ] + + CreateFile = ctypes.windll.kernel32.CreateFileW + CreateFile.argtypes = [ctypes.c_wchar_p, DWORD, DWORD, ctypes.c_void_p, + DWORD, DWORD, HANDLE] + CreateFile.restype = HANDLE + GetFileInformationByHandle = ( + ctypes.windll.kernel32.GetFileInformationByHandle) + GetFileInformationByHandle.argtypes = [ + HANDLE, POINTER(BY_HANDLE_FILE_INFORMATION)] + GetFileInformationByHandle.restype = BOOL + CloseHandle = ctypes.windll.kernel32.CloseHandle + CloseHandle.argtypes = [HANDLE] + CloseHandle.restype = BOOL + GENERIC_READ = 0x80000000 + FILE_SHARE_READ = 0x00000001 + FILE_FLAG_BACKUP_SEMANTICS = 0x02000000 + OPEN_EXISTING = 3 + if os.path.isdir(path): + flags = FILE_FLAG_BACKUP_SEMANTICS + else: + flags = 0 + hfile = CreateFile(path, GENERIC_READ, FILE_SHARE_READ, + None, OPEN_EXISTING, flags, None) + if hfile == 0xffffffff: + if sys.version_info >= (3, 3): + raise FileNotFoundError(path) + else: + exc = OSError("file not found: path") + exc.errno = ENOENT + raise exc + info = BY_HANDLE_FILE_INFORMATION() + success = GetFileInformationByHandle(hfile, info) + CloseHandle(hfile) + if success == 0: + raise WinError() + return info.volume, info.index_hi, info.index_lo + + +def _is_wildcard_pattern(pat): + # Whether this pattern needs actual matching using fnmatch, or can + # be looked up directly as a file. + return "*" in pat or "?" in pat or "[" in pat + + +class _Flavour(object): + + """A flavour implements a particular (platform-specific) set of path + semantics.""" + + def __init__(self): + self.join = self.sep.join + + def parse_parts(self, parts): + if six.PY2: + parts = _py2_fsencode(parts) + parsed = [] + sep = self.sep + altsep = self.altsep + drv = root = '' + it = reversed(parts) + for part in it: + if not part: + continue + if altsep: + part = part.replace(altsep, sep) + drv, root, rel = self.splitroot(part) + if sep in rel: + for x in reversed(rel.split(sep)): + if x and x != '.': + parsed.append(intern(x)) + else: + if rel and rel != '.': + parsed.append(intern(rel)) + if drv or root: + if not drv: + # If no drive is present, try to find one in the previous + # parts. This makes the result of parsing e.g. + # ("C:", "/", "a") reasonably intuitive. + for part in it: + if not part: + continue + if altsep: + part = part.replace(altsep, sep) + drv = self.splitroot(part)[0] + if drv: + break + break + if drv or root: + parsed.append(drv + root) + parsed.reverse() + return drv, root, parsed + + def join_parsed_parts(self, drv, root, parts, drv2, root2, parts2): + """ + Join the two paths represented by the respective + (drive, root, parts) tuples. Return a new (drive, root, parts) tuple. + """ + if root2: + if not drv2 and drv: + return drv, root2, [drv + root2] + parts2[1:] + elif drv2: + if drv2 == drv or self.casefold(drv2) == self.casefold(drv): + # Same drive => second path is relative to the first + return drv, root, parts + parts2[1:] + else: + # Second path is non-anchored (common case) + return drv, root, parts + parts2 + return drv2, root2, parts2 + + +class _WindowsFlavour(_Flavour): + # Reference for Windows paths can be found at + # http://msdn.microsoft.com/en-us/library/aa365247%28v=vs.85%29.aspx + + sep = '\\' + altsep = '/' + has_drv = True + pathmod = ntpath + + is_supported = (os.name == 'nt') + + drive_letters = set('abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ') + ext_namespace_prefix = '\\\\?\\' + + reserved_names = ( + set(['CON', 'PRN', 'AUX', 'NUL']) | + set(['COM%d' % i for i in range(1, 10)]) | + set(['LPT%d' % i for i in range(1, 10)]) + ) + + # Interesting findings about extended paths: + # - '\\?\c:\a', '//?/c:\a' and '//?/c:/a' are all supported + # but '\\?\c:/a' is not + # - extended paths are always absolute; "relative" extended paths will + # fail. + + def splitroot(self, part, sep=sep): + first = part[0:1] + second = part[1:2] + if (second == sep and first == sep): + # XXX extended paths should also disable the collapsing of "." + # components (according to MSDN docs). + prefix, part = self._split_extended_path(part) + first = part[0:1] + second = part[1:2] + else: + prefix = '' + third = part[2:3] + if (second == sep and first == sep and third != sep): + # is a UNC path: + # vvvvvvvvvvvvvvvvvvvvv root + # \\machine\mountpoint\directory\etc\... + # directory ^^^^^^^^^^^^^^ + index = part.find(sep, 2) + if index != -1: + index2 = part.find(sep, index + 1) + # a UNC path can't have two slashes in a row + # (after the initial two) + if index2 != index + 1: + if index2 == -1: + index2 = len(part) + if prefix: + return prefix + part[1:index2], sep, part[index2 + 1:] + else: + return part[:index2], sep, part[index2 + 1:] + drv = root = '' + if second == ':' and first in self.drive_letters: + drv = part[:2] + part = part[2:] + first = third + if first == sep: + root = first + part = part.lstrip(sep) + return prefix + drv, root, part + + def casefold(self, s): + return s.lower() + + def casefold_parts(self, parts): + return [p.lower() for p in parts] + + def resolve(self, path, strict=False): + s = str(path) + if not s: + return os.getcwd() + previous_s = None + if _getfinalpathname is not None: + if strict: + return self._ext_to_normal(_getfinalpathname(s)) + else: + # End of the path after the first one not found + tail_parts = [] + + def _try_func(): + result[0] = self._ext_to_normal(_getfinalpathname(s)) + # if there was no exception, set flag to 0 + result[1] = 0 + + def _exc_func(exc): + pass + + while True: + result = [None, 1] + _try_except_filenotfounderror(_try_func, _exc_func) + if result[1] == 1: # file not found exception raised + previous_s = s + s, tail = os.path.split(s) + tail_parts.append(tail) + if previous_s == s: + return path + else: + s = result[0] + return os.path.join(s, *reversed(tail_parts)) + # Means fallback on absolute + return None + + def _split_extended_path(self, s, ext_prefix=ext_namespace_prefix): + prefix = '' + if s.startswith(ext_prefix): + prefix = s[:4] + s = s[4:] + if s.startswith('UNC\\'): + prefix += s[:3] + s = '\\' + s[3:] + return prefix, s + + def _ext_to_normal(self, s): + # Turn back an extended path into a normal DOS-like path + return self._split_extended_path(s)[1] + + def is_reserved(self, parts): + # NOTE: the rules for reserved names seem somewhat complicated + # (e.g. r"..\NUL" is reserved but not r"foo\NUL"). + # We err on the side of caution and return True for paths which are + # not considered reserved by Windows. + if not parts: + return False + if parts[0].startswith('\\\\'): + # UNC paths are never reserved + return False + return parts[-1].partition('.')[0].upper() in self.reserved_names + + def make_uri(self, path): + # Under Windows, file URIs use the UTF-8 encoding. + drive = path.drive + if len(drive) == 2 and drive[1] == ':': + # It's a path on a local drive => 'file:///c:/a/b' + rest = path.as_posix()[2:].lstrip('/') + return 'file:///%s/%s' % ( + drive, urlquote_from_bytes(rest.encode('utf-8'))) + else: + # It's a path on a network drive => 'file://host/share/a/b' + return 'file:' + urlquote_from_bytes( + path.as_posix().encode('utf-8')) + + def gethomedir(self, username): + if 'HOME' in os.environ: + userhome = os.environ['HOME'] + elif 'USERPROFILE' in os.environ: + userhome = os.environ['USERPROFILE'] + elif 'HOMEPATH' in os.environ: + try: + drv = os.environ['HOMEDRIVE'] + except KeyError: + drv = '' + userhome = drv + os.environ['HOMEPATH'] + else: + raise RuntimeError("Can't determine home directory") + + if username: + # Try to guess user home directory. By default all users + # directories are located in the same place and are named by + # corresponding usernames. If current user home directory points + # to nonstandard place, this guess is likely wrong. + if os.environ['USERNAME'] != username: + drv, root, parts = self.parse_parts((userhome,)) + if parts[-1] != os.environ['USERNAME']: + raise RuntimeError("Can't determine home directory " + "for %r" % username) + parts[-1] = username + if drv or root: + userhome = drv + root + self.join(parts[1:]) + else: + userhome = self.join(parts) + return userhome + + +class _PosixFlavour(_Flavour): + sep = '/' + altsep = '' + has_drv = False + pathmod = posixpath + + is_supported = (os.name != 'nt') + + def splitroot(self, part, sep=sep): + if part and part[0] == sep: + stripped_part = part.lstrip(sep) + # According to POSIX path resolution: + # http://pubs.opengroup.org/onlinepubs/009695399/basedefs/ + # xbd_chap04.html#tag_04_11 + # "A pathname that begins with two successive slashes may be + # interpreted in an implementation-defined manner, although more + # than two leading slashes shall be treated as a single slash". + if len(part) - len(stripped_part) == 2: + return '', sep * 2, stripped_part + else: + return '', sep, stripped_part + else: + return '', '', part + + def casefold(self, s): + return s + + def casefold_parts(self, parts): + return parts + + def resolve(self, path, strict=False): + sep = self.sep + accessor = path._accessor + seen = {} + + def _resolve(path, rest): + if rest.startswith(sep): + path = '' + + for name in rest.split(sep): + if not name or name == '.': + # current dir + continue + if name == '..': + # parent dir + path, _, _ = path.rpartition(sep) + continue + newpath = path + sep + name + if newpath in seen: + # Already seen this path + path = seen[newpath] + if path is not None: + # use cached value + continue + # The symlink is not resolved, so we must have a symlink + # loop. + raise RuntimeError("Symlink loop from %r" % newpath) + # Resolve the symbolic link + try: + target = accessor.readlink(newpath) + except OSError as e: + if e.errno != EINVAL and strict: + raise + # Not a symlink, or non-strict mode. We just leave the path + # untouched. + path = newpath + else: + seen[newpath] = None # not resolved symlink + path = _resolve(path, target) + seen[newpath] = path # resolved symlink + + return path + # NOTE: according to POSIX, getcwd() cannot contain path components + # which are symlinks. + base = '' if path.is_absolute() else os.getcwd() + return _resolve(base, str(path)) or sep + + def is_reserved(self, parts): + return False + + def make_uri(self, path): + # We represent the path using the local filesystem encoding, + # for portability to other applications. + bpath = bytes(path) + return 'file://' + urlquote_from_bytes(bpath) + + def gethomedir(self, username): + if not username: + try: + return os.environ['HOME'] + except KeyError: + import pwd + return pwd.getpwuid(os.getuid()).pw_dir + else: + import pwd + try: + return pwd.getpwnam(username).pw_dir + except KeyError: + raise RuntimeError("Can't determine home directory " + "for %r" % username) + + +_windows_flavour = _WindowsFlavour() +_posix_flavour = _PosixFlavour() + + +class _Accessor: + + """An accessor implements a particular (system-specific or not) way of + accessing paths on the filesystem.""" + + +class _NormalAccessor(_Accessor): + + def _wrap_strfunc(strfunc): + @functools.wraps(strfunc) + def wrapped(pathobj, *args): + return strfunc(str(pathobj), *args) + return staticmethod(wrapped) + + def _wrap_binary_strfunc(strfunc): + @functools.wraps(strfunc) + def wrapped(pathobjA, pathobjB, *args): + return strfunc(str(pathobjA), str(pathobjB), *args) + return staticmethod(wrapped) + + stat = _wrap_strfunc(os.stat) + + lstat = _wrap_strfunc(os.lstat) + + open = _wrap_strfunc(os.open) + + listdir = _wrap_strfunc(os.listdir) + + scandir = _wrap_strfunc(os_scandir) + + chmod = _wrap_strfunc(os.chmod) + + if hasattr(os, "lchmod"): + lchmod = _wrap_strfunc(os.lchmod) + else: + def lchmod(self, pathobj, mode): + raise NotImplementedError("lchmod() not available on this system") + + mkdir = _wrap_strfunc(os.mkdir) + + unlink = _wrap_strfunc(os.unlink) + + rmdir = _wrap_strfunc(os.rmdir) + + rename = _wrap_binary_strfunc(os.rename) + + if sys.version_info >= (3, 3): + replace = _wrap_binary_strfunc(os.replace) + + if nt: + if supports_symlinks: + symlink = _wrap_binary_strfunc(os.symlink) + else: + def symlink(a, b, target_is_directory): + raise NotImplementedError( + "symlink() not available on this system") + else: + # Under POSIX, os.symlink() takes two args + @staticmethod + def symlink(a, b, target_is_directory): + return os.symlink(str(a), str(b)) + + utime = _wrap_strfunc(os.utime) + + # Helper for resolve() + def readlink(self, path): + return os.readlink(path) + + +_normal_accessor = _NormalAccessor() + + +# +# Globbing helpers +# + +def _make_selector(pattern_parts): + pat = pattern_parts[0] + child_parts = pattern_parts[1:] + if pat == '**': + cls = _RecursiveWildcardSelector + elif '**' in pat: + raise ValueError( + "Invalid pattern: '**' can only be an entire path component") + elif _is_wildcard_pattern(pat): + cls = _WildcardSelector + else: + cls = _PreciseSelector + return cls(pat, child_parts) + + +if hasattr(functools, "lru_cache"): + _make_selector = functools.lru_cache()(_make_selector) + + +class _Selector: + + """A selector matches a specific glob pattern part against the children + of a given path.""" + + def __init__(self, child_parts): + self.child_parts = child_parts + if child_parts: + self.successor = _make_selector(child_parts) + self.dironly = True + else: + self.successor = _TerminatingSelector() + self.dironly = False + + def select_from(self, parent_path): + """Iterate over all child paths of `parent_path` matched by this + selector. This can contain parent_path itself.""" + path_cls = type(parent_path) + is_dir = path_cls.is_dir + exists = path_cls.exists + scandir = parent_path._accessor.scandir + if not is_dir(parent_path): + return iter([]) + return self._select_from(parent_path, is_dir, exists, scandir) + + +class _TerminatingSelector: + + def _select_from(self, parent_path, is_dir, exists, scandir): + yield parent_path + + +class _PreciseSelector(_Selector): + + def __init__(self, name, child_parts): + self.name = name + _Selector.__init__(self, child_parts) + + def _select_from(self, parent_path, is_dir, exists, scandir): + def try_iter(): + path = parent_path._make_child_relpath(self.name) + if (is_dir if self.dironly else exists)(path): + for p in self.successor._select_from( + path, is_dir, exists, scandir): + yield p + + def except_iter(exc): + return + yield + + for x in _try_except_permissionerror_iter(try_iter, except_iter): + yield x + + +class _WildcardSelector(_Selector): + + def __init__(self, pat, child_parts): + self.pat = re.compile(fnmatch.translate(pat)) + _Selector.__init__(self, child_parts) + + def _select_from(self, parent_path, is_dir, exists, scandir): + def try_iter(): + cf = parent_path._flavour.casefold + entries = list(scandir(parent_path)) + for entry in entries: + if not self.dironly or entry.is_dir(): + name = entry.name + casefolded = cf(name) + if self.pat.match(casefolded): + path = parent_path._make_child_relpath(name) + for p in self.successor._select_from( + path, is_dir, exists, scandir): + yield p + + def except_iter(exc): + return + yield + + for x in _try_except_permissionerror_iter(try_iter, except_iter): + yield x + + +class _RecursiveWildcardSelector(_Selector): + + def __init__(self, pat, child_parts): + _Selector.__init__(self, child_parts) + + def _iterate_directories(self, parent_path, is_dir, scandir): + yield parent_path + + def try_iter(): + entries = list(scandir(parent_path)) + for entry in entries: + entry_is_dir = False + try: + entry_is_dir = entry.is_dir() + except OSError as e: + if not _ignore_error(e): + raise + if entry_is_dir and not entry.is_symlink(): + path = parent_path._make_child_relpath(entry.name) + for p in self._iterate_directories(path, is_dir, scandir): + yield p + + def except_iter(exc): + return + yield + + for x in _try_except_permissionerror_iter(try_iter, except_iter): + yield x + + def _select_from(self, parent_path, is_dir, exists, scandir): + def try_iter(): + yielded = set() + try: + successor_select = self.successor._select_from + for starting_point in self._iterate_directories( + parent_path, is_dir, scandir): + for p in successor_select( + starting_point, is_dir, exists, scandir): + if p not in yielded: + yield p + yielded.add(p) + finally: + yielded.clear() + + def except_iter(exc): + return + yield + + for x in _try_except_permissionerror_iter(try_iter, except_iter): + yield x + + +# +# Public API +# + +class _PathParents(Sequence): + + """This object provides sequence-like access to the logical ancestors + of a path. Don't try to construct it yourself.""" + __slots__ = ('_pathcls', '_drv', '_root', '_parts') + + def __init__(self, path): + # We don't store the instance to avoid reference cycles + self._pathcls = type(path) + self._drv = path._drv + self._root = path._root + self._parts = path._parts + + def __len__(self): + if self._drv or self._root: + return len(self._parts) - 1 + else: + return len(self._parts) + + def __getitem__(self, idx): + if idx < 0 or idx >= len(self): + raise IndexError(idx) + return self._pathcls._from_parsed_parts(self._drv, self._root, + self._parts[:-idx - 1]) + + def __repr__(self): + return "<{0}.parents>".format(self._pathcls.__name__) + + +class PurePath(object): + + """PurePath represents a filesystem path and offers operations which + don't imply any actual filesystem I/O. Depending on your system, + instantiating a PurePath will return either a PurePosixPath or a + PureWindowsPath object. You can also instantiate either of these classes + directly, regardless of your system. + """ + __slots__ = ( + '_drv', '_root', '_parts', + '_str', '_hash', '_pparts', '_cached_cparts', + ) + + def __new__(cls, *args): + """Construct a PurePath from one or several strings and or existing + PurePath objects. The strings and path objects are combined so as + to yield a canonicalized path, which is incorporated into the + new PurePath object. + """ + if cls is PurePath: + cls = PureWindowsPath if os.name == 'nt' else PurePosixPath + return cls._from_parts(args) + + def __reduce__(self): + # Using the parts tuple helps share interned path parts + # when pickling related paths. + return (self.__class__, tuple(self._parts)) + + @classmethod + def _parse_args(cls, args): + # This is useful when you don't want to create an instance, just + # canonicalize some constructor arguments. + parts = [] + for a in args: + if isinstance(a, PurePath): + parts += a._parts + else: + if sys.version_info >= (3, 6): + a = os.fspath(a) + else: + # duck typing for older Python versions + if hasattr(a, "__fspath__"): + a = a.__fspath__() + if isinstance(a, str): + # Force-cast str subclasses to str (issue #21127) + parts.append(str(a)) + # also handle unicode for PY2 (six.text_type = unicode) + elif six.PY2 and isinstance(a, six.text_type): + # cast to str using filesystem encoding + # note: in rare circumstances, on Python < 3.2, + # getfilesystemencoding can return None, in that + # case fall back to ascii + parts.append(a.encode( + sys.getfilesystemencoding() or "ascii")) + else: + raise TypeError( + "argument should be a str object or an os.PathLike " + "object returning str, not %r" + % type(a)) + return cls._flavour.parse_parts(parts) + + @classmethod + def _from_parts(cls, args, init=True): + # We need to call _parse_args on the instance, so as to get the + # right flavour. + self = object.__new__(cls) + drv, root, parts = self._parse_args(args) + self._drv = drv + self._root = root + self._parts = parts + if init: + self._init() + return self + + @classmethod + def _from_parsed_parts(cls, drv, root, parts, init=True): + self = object.__new__(cls) + self._drv = drv + self._root = root + self._parts = parts + if init: + self._init() + return self + + @classmethod + def _format_parsed_parts(cls, drv, root, parts): + if drv or root: + return drv + root + cls._flavour.join(parts[1:]) + else: + return cls._flavour.join(parts) + + def _init(self): + # Overridden in concrete Path + pass + + def _make_child(self, args): + drv, root, parts = self._parse_args(args) + drv, root, parts = self._flavour.join_parsed_parts( + self._drv, self._root, self._parts, drv, root, parts) + return self._from_parsed_parts(drv, root, parts) + + def __str__(self): + """Return the string representation of the path, suitable for + passing to system calls.""" + try: + return self._str + except AttributeError: + self._str = self._format_parsed_parts(self._drv, self._root, + self._parts) or '.' + return self._str + + def __fspath__(self): + return str(self) + + def as_posix(self): + """Return the string representation of the path with forward (/) + slashes.""" + f = self._flavour + return str(self).replace(f.sep, '/') + + def __bytes__(self): + """Return the bytes representation of the path. This is only + recommended to use under Unix.""" + if sys.version_info < (3, 2): + raise NotImplementedError("needs Python 3.2 or later") + return os.fsencode(str(self)) + + def __repr__(self): + return "{0}({1!r})".format(self.__class__.__name__, self.as_posix()) + + def as_uri(self): + """Return the path as a 'file' URI.""" + if not self.is_absolute(): + raise ValueError("relative path can't be expressed as a file URI") + return self._flavour.make_uri(self) + + @property + def _cparts(self): + # Cached casefolded parts, for hashing and comparison + try: + return self._cached_cparts + except AttributeError: + self._cached_cparts = self._flavour.casefold_parts(self._parts) + return self._cached_cparts + + def __eq__(self, other): + if not isinstance(other, PurePath): + return NotImplemented + return ( + self._cparts == other._cparts + and self._flavour is other._flavour) + + def __ne__(self, other): + return not self == other + + def __hash__(self): + try: + return self._hash + except AttributeError: + self._hash = hash(tuple(self._cparts)) + return self._hash + + def __lt__(self, other): + if (not isinstance(other, PurePath) + or self._flavour is not other._flavour): + return NotImplemented + return self._cparts < other._cparts + + def __le__(self, other): + if (not isinstance(other, PurePath) + or self._flavour is not other._flavour): + return NotImplemented + return self._cparts <= other._cparts + + def __gt__(self, other): + if (not isinstance(other, PurePath) + or self._flavour is not other._flavour): + return NotImplemented + return self._cparts > other._cparts + + def __ge__(self, other): + if (not isinstance(other, PurePath) + or self._flavour is not other._flavour): + return NotImplemented + return self._cparts >= other._cparts + + drive = property(attrgetter('_drv'), + doc="""The drive prefix (letter or UNC path), if any.""") + + root = property(attrgetter('_root'), + doc="""The root of the path, if any.""") + + @property + def anchor(self): + """The concatenation of the drive and root, or ''.""" + anchor = self._drv + self._root + return anchor + + @property + def name(self): + """The final path component, if any.""" + parts = self._parts + if len(parts) == (1 if (self._drv or self._root) else 0): + return '' + return parts[-1] + + @property + def suffix(self): + """The final component's last suffix, if any.""" + name = self.name + i = name.rfind('.') + if 0 < i < len(name) - 1: + return name[i:] + else: + return '' + + @property + def suffixes(self): + """A list of the final component's suffixes, if any.""" + name = self.name + if name.endswith('.'): + return [] + name = name.lstrip('.') + return ['.' + suffix for suffix in name.split('.')[1:]] + + @property + def stem(self): + """The final path component, minus its last suffix.""" + name = self.name + i = name.rfind('.') + if 0 < i < len(name) - 1: + return name[:i] + else: + return name + + def with_name(self, name): + """Return a new path with the file name changed.""" + if not self.name: + raise ValueError("%r has an empty name" % (self,)) + drv, root, parts = self._flavour.parse_parts((name,)) + if (not name or name[-1] in [self._flavour.sep, self._flavour.altsep] + or drv or root or len(parts) != 1): + raise ValueError("Invalid name %r" % (name)) + return self._from_parsed_parts(self._drv, self._root, + self._parts[:-1] + [name]) + + def with_suffix(self, suffix): + """Return a new path with the file suffix changed. If the path + has no suffix, add given suffix. If the given suffix is an empty + string, remove the suffix from the path. + """ + # XXX if suffix is None, should the current suffix be removed? + f = self._flavour + if f.sep in suffix or f.altsep and f.altsep in suffix: + raise ValueError("Invalid suffix %r" % (suffix)) + if suffix and not suffix.startswith('.') or suffix == '.': + raise ValueError("Invalid suffix %r" % (suffix)) + name = self.name + if not name: + raise ValueError("%r has an empty name" % (self,)) + old_suffix = self.suffix + if not old_suffix: + name = name + suffix + else: + name = name[:-len(old_suffix)] + suffix + return self._from_parsed_parts(self._drv, self._root, + self._parts[:-1] + [name]) + + def relative_to(self, *other): + """Return the relative path to another path identified by the passed + arguments. If the operation is not possible (because this is not + a subpath of the other path), raise ValueError. + """ + # For the purpose of this method, drive and root are considered + # separate parts, i.e.: + # Path('c:/').relative_to('c:') gives Path('/') + # Path('c:/').relative_to('/') raise ValueError + if not other: + raise TypeError("need at least one argument") + parts = self._parts + drv = self._drv + root = self._root + if root: + abs_parts = [drv, root] + parts[1:] + else: + abs_parts = parts + to_drv, to_root, to_parts = self._parse_args(other) + if to_root: + to_abs_parts = [to_drv, to_root] + to_parts[1:] + else: + to_abs_parts = to_parts + n = len(to_abs_parts) + cf = self._flavour.casefold_parts + if (root or drv) if n == 0 else cf(abs_parts[:n]) != cf(to_abs_parts): + formatted = self._format_parsed_parts(to_drv, to_root, to_parts) + raise ValueError("{0!r} does not start with {1!r}" + .format(str(self), str(formatted))) + return self._from_parsed_parts('', root if n == 1 else '', + abs_parts[n:]) + + @property + def parts(self): + """An object providing sequence-like access to the + components in the filesystem path.""" + # We cache the tuple to avoid building a new one each time .parts + # is accessed. XXX is this necessary? + try: + return self._pparts + except AttributeError: + self._pparts = tuple(self._parts) + return self._pparts + + def joinpath(self, *args): + """Combine this path with one or several arguments, and return a + new path representing either a subpath (if all arguments are relative + paths) or a totally different path (if one of the arguments is + anchored). + """ + return self._make_child(args) + + def __truediv__(self, key): + return self._make_child((key,)) + + def __rtruediv__(self, key): + return self._from_parts([key] + self._parts) + + if six.PY2: + __div__ = __truediv__ + __rdiv__ = __rtruediv__ + + @property + def parent(self): + """The logical parent of the path.""" + drv = self._drv + root = self._root + parts = self._parts + if len(parts) == 1 and (drv or root): + return self + return self._from_parsed_parts(drv, root, parts[:-1]) + + @property + def parents(self): + """A sequence of this path's logical parents.""" + return _PathParents(self) + + def is_absolute(self): + """True if the path is absolute (has both a root and, if applicable, + a drive).""" + if not self._root: + return False + return not self._flavour.has_drv or bool(self._drv) + + def is_reserved(self): + """Return True if the path contains one of the special names reserved + by the system, if any.""" + return self._flavour.is_reserved(self._parts) + + def match(self, path_pattern): + """ + Return True if this path matches the given pattern. + """ + cf = self._flavour.casefold + path_pattern = cf(path_pattern) + drv, root, pat_parts = self._flavour.parse_parts((path_pattern,)) + if not pat_parts: + raise ValueError("empty pattern") + if drv and drv != cf(self._drv): + return False + if root and root != cf(self._root): + return False + parts = self._cparts + if drv or root: + if len(pat_parts) != len(parts): + return False + pat_parts = pat_parts[1:] + elif len(pat_parts) > len(parts): + return False + for part, pat in zip(reversed(parts), reversed(pat_parts)): + if not fnmatch.fnmatchcase(part, pat): + return False + return True + + +# Can't subclass os.PathLike from PurePath and keep the constructor +# optimizations in PurePath._parse_args(). +if sys.version_info >= (3, 6): + os.PathLike.register(PurePath) + + +class PurePosixPath(PurePath): + _flavour = _posix_flavour + __slots__ = () + + +class PureWindowsPath(PurePath): + """PurePath subclass for Windows systems. + + On a Windows system, instantiating a PurePath should return this object. + However, you can also instantiate it directly on any system. + """ + _flavour = _windows_flavour + __slots__ = () + + +# Filesystem-accessing classes + + +class Path(PurePath): + """PurePath subclass that can make system calls. + + Path represents a filesystem path but unlike PurePath, also offers + methods to do system calls on path objects. Depending on your system, + instantiating a Path will return either a PosixPath or a WindowsPath + object. You can also instantiate a PosixPath or WindowsPath directly, + but cannot instantiate a WindowsPath on a POSIX system or vice versa. + """ + __slots__ = ( + '_accessor', + '_closed', + ) + + def __new__(cls, *args, **kwargs): + if cls is Path: + cls = WindowsPath if os.name == 'nt' else PosixPath + self = cls._from_parts(args, init=False) + if not self._flavour.is_supported: + raise NotImplementedError("cannot instantiate %r on your system" + % (cls.__name__,)) + self._init() + return self + + def _init(self, + # Private non-constructor arguments + template=None, + ): + self._closed = False + if template is not None: + self._accessor = template._accessor + else: + self._accessor = _normal_accessor + + def _make_child_relpath(self, part): + # This is an optimization used for dir walking. `part` must be + # a single part relative to this path. + parts = self._parts + [part] + return self._from_parsed_parts(self._drv, self._root, parts) + + def __enter__(self): + if self._closed: + self._raise_closed() + return self + + def __exit__(self, t, v, tb): + self._closed = True + + def _raise_closed(self): + raise ValueError("I/O operation on closed path") + + def _opener(self, name, flags, mode=0o666): + # A stub for the opener argument to built-in open() + return self._accessor.open(self, flags, mode) + + def _raw_open(self, flags, mode=0o777): + """ + Open the file pointed by this path and return a file descriptor, + as os.open() does. + """ + if self._closed: + self._raise_closed() + return self._accessor.open(self, flags, mode) + + # Public API + + @classmethod + def cwd(cls): + """Return a new path pointing to the current working directory + (as returned by os.getcwd()). + """ + return cls(os.getcwd()) + + @classmethod + def home(cls): + """Return a new path pointing to the user's home directory (as + returned by os.path.expanduser('~')). + """ + return cls(cls()._flavour.gethomedir(None)) + + def samefile(self, other_path): + """Return whether other_path is the same or not as this file + (as returned by os.path.samefile()). + """ + if hasattr(os.path, "samestat"): + st = self.stat() + try: + other_st = other_path.stat() + except AttributeError: + other_st = os.stat(other_path) + return os.path.samestat(st, other_st) + else: + filename1 = six.text_type(self) + filename2 = six.text_type(other_path) + st1 = _win32_get_unique_path_id(filename1) + st2 = _win32_get_unique_path_id(filename2) + return st1 == st2 + + def iterdir(self): + """Iterate over the files in this directory. Does not yield any + result for the special paths '.' and '..'. + """ + if self._closed: + self._raise_closed() + for name in self._accessor.listdir(self): + if name in ('.', '..'): + # Yielding a path object for these makes little sense + continue + yield self._make_child_relpath(name) + if self._closed: + self._raise_closed() + + def glob(self, pattern): + """Iterate over this subtree and yield all existing files (of any + kind, including directories) matching the given relative pattern. + """ + if not pattern: + raise ValueError("Unacceptable pattern: {0!r}".format(pattern)) + pattern = self._flavour.casefold(pattern) + drv, root, pattern_parts = self._flavour.parse_parts((pattern,)) + if drv or root: + raise NotImplementedError("Non-relative patterns are unsupported") + selector = _make_selector(tuple(pattern_parts)) + for p in selector.select_from(self): + yield p + + def rglob(self, pattern): + """Recursively yield all existing files (of any kind, including + directories) matching the given relative pattern, anywhere in + this subtree. + """ + pattern = self._flavour.casefold(pattern) + drv, root, pattern_parts = self._flavour.parse_parts((pattern,)) + if drv or root: + raise NotImplementedError("Non-relative patterns are unsupported") + selector = _make_selector(("**",) + tuple(pattern_parts)) + for p in selector.select_from(self): + yield p + + def absolute(self): + """Return an absolute version of this path. This function works + even if the path doesn't point to anything. + + No normalization is done, i.e. all '.' and '..' will be kept along. + Use resolve() to get the canonical path to a file. + """ + # XXX untested yet! + if self._closed: + self._raise_closed() + if self.is_absolute(): + return self + # FIXME this must defer to the specific flavour (and, under Windows, + # use nt._getfullpathname()) + obj = self._from_parts([os.getcwd()] + self._parts, init=False) + obj._init(template=self) + return obj + + def resolve(self, strict=False): + """ + Make the path absolute, resolving all symlinks on the way and also + normalizing it (for example turning slashes into backslashes under + Windows). + """ + if self._closed: + self._raise_closed() + s = self._flavour.resolve(self, strict=strict) + if s is None: + # No symlink resolution => for consistency, raise an error if + # the path is forbidden + # but not raise error if file does not exist (see issue #54). + + def _try_func(): + self.stat() + + def _exc_func(exc): + pass + + _try_except_filenotfounderror(_try_func, _exc_func) + s = str(self.absolute()) + else: + # ensure s is a string (normpath requires this on older python) + s = str(s) + # Now we have no symlinks in the path, it's safe to normalize it. + normed = self._flavour.pathmod.normpath(s) + obj = self._from_parts((normed,), init=False) + obj._init(template=self) + return obj + + def stat(self): + """ + Return the result of the stat() system call on this path, like + os.stat() does. + """ + return self._accessor.stat(self) + + def owner(self): + """ + Return the login name of the file owner. + """ + import pwd + return pwd.getpwuid(self.stat().st_uid).pw_name + + def group(self): + """ + Return the group name of the file gid. + """ + import grp + return grp.getgrgid(self.stat().st_gid).gr_name + + def open(self, mode='r', buffering=-1, encoding=None, + errors=None, newline=None): + """ + Open the file pointed by this path and return a file object, as + the built-in open() function does. + """ + if self._closed: + self._raise_closed() + if sys.version_info >= (3, 3): + return io.open( + str(self), mode, buffering, encoding, errors, newline, + opener=self._opener) + else: + return io.open(str(self), mode, buffering, + encoding, errors, newline) + + def read_bytes(self): + """ + Open the file in bytes mode, read it, and close the file. + """ + with self.open(mode='rb') as f: + return f.read() + + def read_text(self, encoding=None, errors=None): + """ + Open the file in text mode, read it, and close the file. + """ + with self.open(mode='r', encoding=encoding, errors=errors) as f: + return f.read() + + def write_bytes(self, data): + """ + Open the file in bytes mode, write to it, and close the file. + """ + if not isinstance(data, six.binary_type): + raise TypeError( + 'data must be %s, not %s' % + (six.binary_type.__name__, data.__class__.__name__)) + with self.open(mode='wb') as f: + return f.write(data) + + def write_text(self, data, encoding=None, errors=None): + """ + Open the file in text mode, write to it, and close the file. + """ + if not isinstance(data, six.text_type): + raise TypeError( + 'data must be %s, not %s' % + (six.text_type.__name__, data.__class__.__name__)) + with self.open(mode='w', encoding=encoding, errors=errors) as f: + return f.write(data) + + def touch(self, mode=0o666, exist_ok=True): + """ + Create this file with the given access mode, if it doesn't exist. + """ + if self._closed: + self._raise_closed() + if exist_ok: + # First try to bump modification time + # Implementation note: GNU touch uses the UTIME_NOW option of + # the utimensat() / futimens() functions. + try: + self._accessor.utime(self, None) + except OSError: + # Avoid exception chaining + pass + else: + return + flags = os.O_CREAT | os.O_WRONLY + if not exist_ok: + flags |= os.O_EXCL + fd = self._raw_open(flags, mode) + os.close(fd) + + def mkdir(self, mode=0o777, parents=False, exist_ok=False): + """ + Create a new directory at this given path. + """ + if self._closed: + self._raise_closed() + + def _try_func(): + self._accessor.mkdir(self, mode) + + def _exc_func(exc): + if not parents or self.parent == self: + raise exc + self.parent.mkdir(parents=True, exist_ok=True) + self.mkdir(mode, parents=False, exist_ok=exist_ok) + + try: + _try_except_filenotfounderror(_try_func, _exc_func) + except OSError: + # Cannot rely on checking for EEXIST, since the operating system + # could give priority to other errors like EACCES or EROFS + if not exist_ok or not self.is_dir(): + raise + + def chmod(self, mode): + """ + Change the permissions of the path, like os.chmod(). + """ + if self._closed: + self._raise_closed() + self._accessor.chmod(self, mode) + + def lchmod(self, mode): + """ + Like chmod(), except if the path points to a symlink, the symlink's + permissions are changed, rather than its target's. + """ + if self._closed: + self._raise_closed() + self._accessor.lchmod(self, mode) + + def unlink(self): + """ + Remove this file or link. + If the path is a directory, use rmdir() instead. + """ + if self._closed: + self._raise_closed() + self._accessor.unlink(self) + + def rmdir(self): + """ + Remove this directory. The directory must be empty. + """ + if self._closed: + self._raise_closed() + self._accessor.rmdir(self) + + def lstat(self): + """ + Like stat(), except if the path points to a symlink, the symlink's + status information is returned, rather than its target's. + """ + if self._closed: + self._raise_closed() + return self._accessor.lstat(self) + + def rename(self, target): + """ + Rename this path to the given path. + """ + if self._closed: + self._raise_closed() + self._accessor.rename(self, target) + + def replace(self, target): + """ + Rename this path to the given path, clobbering the existing + destination if it exists. + """ + if sys.version_info < (3, 3): + raise NotImplementedError("replace() is only available " + "with Python 3.3 and later") + if self._closed: + self._raise_closed() + self._accessor.replace(self, target) + + def symlink_to(self, target, target_is_directory=False): + """ + Make this path a symlink pointing to the given path. + Note the order of arguments (self, target) is the reverse of + os.symlink's. + """ + if self._closed: + self._raise_closed() + self._accessor.symlink(target, self, target_is_directory) + + # Convenience functions for querying the stat results + + def exists(self): + """ + Whether this path exists. + """ + try: + self.stat() + except OSError as e: + if not _ignore_error(e): + raise + return False + except ValueError: + # Non-encodable path + return False + return True + + def is_dir(self): + """ + Whether this path is a directory. + """ + try: + return S_ISDIR(self.stat().st_mode) + except OSError as e: + if not _ignore_error(e): + raise + # Path doesn't exist or is a broken symlink + # (see https://bitbucket.org/pitrou/pathlib/issue/12/) + return False + except ValueError: + # Non-encodable path + return False + + def is_file(self): + """ + Whether this path is a regular file (also True for symlinks pointing + to regular files). + """ + try: + return S_ISREG(self.stat().st_mode) + except OSError as e: + if not _ignore_error(e): + raise + # Path doesn't exist or is a broken symlink + # (see https://bitbucket.org/pitrou/pathlib/issue/12/) + return False + except ValueError: + # Non-encodable path + return False + + def is_mount(self): + """ + Check if this path is a POSIX mount point + """ + # Need to exist and be a dir + if not self.exists() or not self.is_dir(): + return False + + parent = Path(self.parent) + try: + parent_dev = parent.stat().st_dev + except OSError: + return False + + dev = self.stat().st_dev + if dev != parent_dev: + return True + ino = self.stat().st_ino + parent_ino = parent.stat().st_ino + return ino == parent_ino + + def is_symlink(self): + """ + Whether this path is a symbolic link. + """ + try: + return S_ISLNK(self.lstat().st_mode) + except OSError as e: + if not _ignore_error(e): + raise + # Path doesn't exist + return False + except ValueError: + # Non-encodable path + return False + + def is_block_device(self): + """ + Whether this path is a block device. + """ + try: + return S_ISBLK(self.stat().st_mode) + except OSError as e: + if not _ignore_error(e): + raise + # Path doesn't exist or is a broken symlink + # (see https://bitbucket.org/pitrou/pathlib/issue/12/) + return False + except ValueError: + # Non-encodable path + return False + + def is_char_device(self): + """ + Whether this path is a character device. + """ + try: + return S_ISCHR(self.stat().st_mode) + except OSError as e: + if not _ignore_error(e): + raise + # Path doesn't exist or is a broken symlink + # (see https://bitbucket.org/pitrou/pathlib/issue/12/) + return False + except ValueError: + # Non-encodable path + return False + + def is_fifo(self): + """ + Whether this path is a FIFO. + """ + try: + return S_ISFIFO(self.stat().st_mode) + except OSError as e: + if not _ignore_error(e): + raise + # Path doesn't exist or is a broken symlink + # (see https://bitbucket.org/pitrou/pathlib/issue/12/) + return False + except ValueError: + # Non-encodable path + return False + + def is_socket(self): + """ + Whether this path is a socket. + """ + try: + return S_ISSOCK(self.stat().st_mode) + except OSError as e: + if not _ignore_error(e): + raise + # Path doesn't exist or is a broken symlink + # (see https://bitbucket.org/pitrou/pathlib/issue/12/) + return False + except ValueError: + # Non-encodable path + return False + + def expanduser(self): + """ Return a new path with expanded ~ and ~user constructs + (as returned by os.path.expanduser) + """ + if (not (self._drv or self._root) + and self._parts and self._parts[0][:1] == '~'): + homedir = self._flavour.gethomedir(self._parts[0][1:]) + return self._from_parts([homedir] + self._parts[1:]) + + return self + + +class PosixPath(Path, PurePosixPath): + """Path subclass for non-Windows systems. + + On a POSIX system, instantiating a Path should return this object. + """ + __slots__ = () + + +class WindowsPath(Path, PureWindowsPath): + """Path subclass for Windows systems. + + On a Windows system, instantiating a Path should return this object. + """ + __slots__ = () + + def owner(self): + raise NotImplementedError("Path.owner() is unsupported on this system") + + def group(self): + raise NotImplementedError("Path.group() is unsupported on this system") + + def is_mount(self): + raise NotImplementedError( + "Path.is_mount() is unsupported on this system") diff --git a/third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.6-py2.py3-none-any/pathlib2-2.3.6.dist-info/LICENSE.rst b/third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.6-py2.py3-none-any/pathlib2-2.3.6.dist-info/LICENSE.rst deleted file mode 100644 index cf68fb4e6b55..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.6-py2.py3-none-any/pathlib2-2.3.6.dist-info/LICENSE.rst +++ /dev/null @@ -1,23 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2014-2017 Matthias C. M. Troffaes -Copyright (c) 2012-2014 Antoine Pitrou and contributors - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - diff --git a/third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.6-py2.py3-none-any/pathlib2-2.3.6.dist-info/RECORD b/third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.6-py2.py3-none-any/pathlib2-2.3.6.dist-info/RECORD deleted file mode 100644 index 8a0ffe6594c7..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.6-py2.py3-none-any/pathlib2-2.3.6.dist-info/RECORD +++ /dev/null @@ -1,6 +0,0 @@ -pathlib2/__init__.py,sha256=dnnY0V6WFrj4jPRKav-5wHiccg1DjtOwYiIy29uwgG8,61436 -pathlib2-2.3.6.dist-info/LICENSE.rst,sha256=hh-BMAShUax3AkrURXlGU4Cd34p1cq7nurGNEd8rocY,1175 -pathlib2-2.3.6.dist-info/METADATA,sha256=jF4L3KQUpyocdlJ2_J-yNWZCow4oBvo72NvCLrWrHjw,3478 -pathlib2-2.3.6.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110 -pathlib2-2.3.6.dist-info/top_level.txt,sha256=tNPkisFiGBFsPUnCIHg62vSFlkx_1NO86Id8lbJmfFQ,9 -pathlib2-2.3.6.dist-info/RECORD,, diff --git a/third_party/python/virtualenv/__virtualenv__/platformdirs-2.0.2-py2.py3-none-any/platformdirs-2.0.2.dist-info/RECORD b/third_party/python/virtualenv/__virtualenv__/platformdirs-2.0.2-py2.py3-none-any/platformdirs-2.0.2.dist-info/RECORD deleted file mode 100644 index 45ab83018f76..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/platformdirs-2.0.2-py2.py3-none-any/platformdirs-2.0.2.dist-info/RECORD +++ /dev/null @@ -1,6 +0,0 @@ -platformdirs.py,sha256=vwvsSuJFZ4ETQO4EA7vd6eM7s8d6NEY_Ev7icuu2vig,29618 -platformdirs-2.0.2.dist-info/LICENSE.txt,sha256=Nt200KdFqTqyAyA9cZCBSxuJcn0lTK_0jHp6-71HAAs,1097 -platformdirs-2.0.2.dist-info/METADATA,sha256=NfQCBZPpq60Fu8iUw5jYkLRnjCs0NWkhhcvcx_FX5tg,10370 -platformdirs-2.0.2.dist-info/WHEEL,sha256=Z-nyYpwrcSqxfdux5Mbn_DQ525iP7J2DG3JgGvOYyTQ,110 -platformdirs-2.0.2.dist-info/top_level.txt,sha256=i0Q-nUAcPabcNxrgdGr0mzOEavZ4cGml0zH0oxKEFQE,13 -platformdirs-2.0.2.dist-info/RECORD,, diff --git a/third_party/python/virtualenv/__virtualenv__/platformdirs-2.0.2-py2.py3-none-any/platformdirs-2.0.2.dist-info/WHEEL b/third_party/python/virtualenv/__virtualenv__/platformdirs-2.0.2-py2.py3-none-any/platformdirs-2.0.2.dist-info/WHEEL deleted file mode 100644 index 01b8fc7d4a10..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/platformdirs-2.0.2-py2.py3-none-any/platformdirs-2.0.2.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.36.2) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/third_party/python/virtualenv/__virtualenv__/platformdirs-2.0.2-py2.py3-none-any/platformdirs-2.0.2.dist-info/top_level.txt b/third_party/python/virtualenv/__virtualenv__/platformdirs-2.0.2-py2.py3-none-any/platformdirs-2.0.2.dist-info/top_level.txt deleted file mode 100644 index 67fd014bbdd7..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/platformdirs-2.0.2-py2.py3-none-any/platformdirs-2.0.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -platformdirs diff --git a/third_party/python/virtualenv/__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs-2.2.0.dist-info/LICENSE.txt b/third_party/python/virtualenv/__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs-2.2.0.dist-info/LICENSE.txt deleted file mode 100644 index f0bbd69f0c88..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs-2.2.0.dist-info/LICENSE.txt +++ /dev/null @@ -1,22 +0,0 @@ -# This is the MIT license - -Copyright (c) 2010 ActiveState Software Inc. - -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be included -in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, -TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE -SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/third_party/python/virtualenv/__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs-2.2.0.dist-info/METADATA b/third_party/python/virtualenv/__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs-2.2.0.dist-info/METADATA deleted file mode 100644 index 5dff59a5517d..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs-2.2.0.dist-info/METADATA +++ /dev/null @@ -1,225 +0,0 @@ -Metadata-Version: 2.1 -Name: platformdirs -Version: 2.2.0 -Summary: A small Python module for determining appropriate platform-specific dirs, e.g. a "user data dir". -Home-page: https://github.com/platformdirs/platformdirs -Maintainer: Bernát Gábor, Julian Berman, Ofek Lev, Ronny Pfannschmidt -Maintainer-email: gaborjbernat@gmail.com, Julian@GrayVines.com, oss@ofek.dev, opensource@ronnypfannschmidt.de -License: MIT -Project-URL: Source, https://github.com/platformdirs/platformdirs -Project-URL: Tracker, https://github.com/platformdirs/platformdirs/issues -Project-URL: Documentation, https://platformdirs.readthedocs.io/ -Keywords: application directory log cache user -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Requires-Python: >=3.6 -Description-Content-Type: text/x-rst -License-File: LICENSE.txt -Provides-Extra: docs -Requires-Dist: Sphinx (>=4) ; extra == 'docs' -Requires-Dist: furo (>=2021.7.5b38) ; extra == 'docs' -Requires-Dist: proselint (>=0.10.2) ; extra == 'docs' -Requires-Dist: sphinx-autodoc-typehints (>=1.12) ; extra == 'docs' -Provides-Extra: test -Requires-Dist: appdirs (==1.4.4) ; extra == 'test' -Requires-Dist: pytest (>=6) ; extra == 'test' -Requires-Dist: pytest-cov (>=2.7) ; extra == 'test' -Requires-Dist: pytest-mock (>=3.6) ; extra == 'test' - -the problem -=========== - -.. image:: https://github.com/platformdirs/platformdirs/workflows/Test/badge.svg - :target: https://github.com/platformdirs/platformdirs/actions?query=workflow%3ATest - -When writing desktop application, finding the right location to store user data -and configuration varies per platform. Even for single-platform apps, there -may by plenty of nuances in figuring out the right location. - -For example, if running on macOS, you should use:: - - ~/Library/Application Support/ - -If on Windows (at least English Win XP) that should be:: - - C:\Documents and Settings\\Application Data\Local Settings\\ - -or possibly:: - - C:\Documents and Settings\\Application Data\\ - -for `roaming profiles `_ but that is another story. - -On Linux (and other Unices), according to the `XDG Basedir Spec`_, it should be:: - - ~/.local/share/ - -.. _XDG Basedir Spec: https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html - -``platformdirs`` to the rescue -============================== - -This kind of thing is what the ``platformdirs`` module is for. -``platformdirs`` will help you choose an appropriate: - -- user data dir (``user_data_dir``) -- user config dir (``user_config_dir``) -- user cache dir (``user_cache_dir``) -- site data dir (``site_data_dir``) -- site config dir (``site_config_dir``) -- user log dir (``user_log_dir``) - -And also: - -- Is a single module so other Python packages can vendor their own private copy. -- Is slightly opinionated on the directory names used. Look for "OPINION" in - documentation and code for when an opinion is being applied. - -Example output -============== - -On macOS: - -.. code-block:: pycon - - >>> from platformdirs import * - >>> appname = "SuperApp" - >>> appauthor = "Acme" - >>> user_data_dir(appname, appauthor) - '/Users/trentm/Library/Application Support/SuperApp' - >>> site_data_dir(appname, appauthor) - '/Library/Application Support/SuperApp' - >>> user_cache_dir(appname, appauthor) - '/Users/trentm/Library/Caches/SuperApp' - >>> user_log_dir(appname, appauthor) - '/Users/trentm/Library/Logs/SuperApp' - -On Windows 7: - -.. code-block:: pycon - - >>> from platformdirs import * - >>> appname = "SuperApp" - >>> appauthor = "Acme" - >>> user_data_dir(appname, appauthor) - 'C:\\Users\\trentm\\AppData\\Local\\Acme\\SuperApp' - >>> user_data_dir(appname, appauthor, roaming=True) - 'C:\\Users\\trentm\\AppData\\Roaming\\Acme\\SuperApp' - >>> user_cache_dir(appname, appauthor) - 'C:\\Users\\trentm\\AppData\\Local\\Acme\\SuperApp\\Cache' - >>> user_log_dir(appname, appauthor) - 'C:\\Users\\trentm\\AppData\\Local\\Acme\\SuperApp\\Logs' - -On Linux: - -.. code-block:: pycon - - >>> from platformdirs import * - >>> appname = "SuperApp" - >>> appauthor = "Acme" - >>> user_data_dir(appname, appauthor) - '/home/trentm/.local/share/SuperApp - >>> site_data_dir(appname, appauthor) - '/usr/local/share/SuperApp' - >>> site_data_dir(appname, appauthor, multipath=True) - '/usr/local/share/SuperApp:/usr/share/SuperApp' - >>> user_cache_dir(appname, appauthor) - '/home/trentm/.cache/SuperApp' - >>> user_log_dir(appname, appauthor) - '/home/trentm/.cache/SuperApp/log' - >>> user_config_dir(appname) - '/home/trentm/.config/SuperApp' - >>> site_config_dir(appname) - '/etc/xdg/SuperApp' - >>> os.environ['XDG_CONFIG_DIRS'] = '/etc:/usr/local/etc' - >>> site_config_dir(appname, multipath=True) - '/etc/SuperApp:/usr/local/etc/SuperApp' - -On Android:: - - >>> from platformdirs import * - >>> appname = "SuperApp" - >>> appauthor = "Acme" - >>> user_data_dir(appname, appauthor) - '/data/data/com.termux/files/SuperApp' - >>> user_cache_dir(appname, appauthor) - '/data/data/com.termux/cache/SuperApp' - >>> user_log_dir(appname, appauthor) - '/data/data/com.termux/cache/SuperApp/log' - >>> user_config_dir(appname) - '/data/data/com.termux/shared_prefs/SuperApp' - - -``PlatformDirs`` for convenience -================================ - -.. code-block:: pycon - - >>> from platformdirs import PlatformDirs - >>> dirs = PlatformDirs("SuperApp", "Acme") - >>> dirs.user_data_dir - '/Users/trentm/Library/Application Support/SuperApp' - >>> dirs.site_data_dir - '/Library/Application Support/SuperApp' - >>> dirs.user_cache_dir - '/Users/trentm/Library/Caches/SuperApp' - >>> dirs.user_log_dir - '/Users/trentm/Library/Logs/SuperApp' - -Per-version isolation -===================== - -If you have multiple versions of your app in use that you want to be -able to run side-by-side, then you may want version-isolation for these -dirs:: - - >>> from platformdirs import PlatformDirs - >>> dirs = PlatformDirs("SuperApp", "Acme", version="1.0") - >>> dirs.user_data_dir - '/Users/trentm/Library/Application Support/SuperApp/1.0' - >>> dirs.site_data_dir - '/Library/Application Support/SuperApp/1.0' - >>> dirs.user_cache_dir - '/Users/trentm/Library/Caches/SuperApp/1.0' - >>> dirs.user_log_dir - '/Users/trentm/Library/Logs/SuperApp/1.0' - -Be wary of using this for configuration files though; you'll need to handle -migrating configuration files manually. - -Why this Fork? -============== - -This repository is a friendly fork of the wonderful work started by -`ActiveState `_ who created -``appdirs``, this package's ancestor. - -Maintaining an open source project is no easy task, particularly -from within an organization, and the Python community is indebted -to ``appdirs`` (and to Trent Mick and Jeff Rouse in particular) for -creating an incredibly useful simple module, as evidenced by the wide -number of users it has attracted over the years. - -Nonetheless, given the number of long-standing open issues -and pull requests, and no clear path towards `ensuring -that maintenance of the package would continue or grow -`_, this fork was -created. - -Contributions are most welcome. - - diff --git a/third_party/python/virtualenv/__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs-2.2.0.dist-info/RECORD b/third_party/python/virtualenv/__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs-2.2.0.dist-info/RECORD deleted file mode 100644 index f07bbaa3ac31..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs-2.2.0.dist-info/RECORD +++ /dev/null @@ -1,15 +0,0 @@ -platformdirs/__init__.py,sha256=C32z-EbmCccqueHgd-cBemkcWLPM7ndXTr3CHQukVX0,11051 -platformdirs/__main__.py,sha256=ouFgPhklG0WPNj9k5BRp1ch9-aXav2qg_ERQFiMSUK8,1078 -platformdirs/android.py,sha256=Wj2P-RcqHbtg6Wj_5JVS_MUY5zOtk0eEiFTfrBNI-X0,2830 -platformdirs/api.py,sha256=BE-Ph_vD1gGkCbqt_6BCwrWVBrTJzvmHfquUTNIXlx8,4344 -platformdirs/macos.py,sha256=dz7JjAjLn8fjFxV_GVD8g9MbgMuKL3kztt3rHsE6l8Y,2156 -platformdirs/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -platformdirs/unix.py,sha256=q7YNu7b9nr6f4j-MeBl_iJpeoAJdc7RYpZi9x-1nKj4,4973 -platformdirs/version.py,sha256=3VDVXYTLENi4j_4gt5pC-U5-K1JsyRu2C2p1JmtmNto,80 -platformdirs/windows.py,sha256=5VtOt_j6JiWqMgpOQONC7KbA3JOvj5FLeWRzHzWnBtE,5712 -platformdirs-2.2.0.dist-info/LICENSE.txt,sha256=1l7lsqNqOR6Nau_OuArlAGMEml2xS8gk7g8eusmXDlA,1096 -platformdirs-2.2.0.dist-info/METADATA,sha256=NUgmcuIi6c0FO6U__3cgMMGK8wSLyDuOSY5SPRmSDOA,8162 -platformdirs-2.2.0.dist-info/WHEEL,sha256=OqRkF0eY5GHssMorFjlbTIq072vpHpF60fIQA6lS9xA,92 -platformdirs-2.2.0.dist-info/top_level.txt,sha256=i0Q-nUAcPabcNxrgdGr0mzOEavZ4cGml0zH0oxKEFQE,13 -platformdirs-2.2.0.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 -platformdirs-2.2.0.dist-info/RECORD,, diff --git a/third_party/python/virtualenv/__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs-2.2.0.dist-info/WHEEL b/third_party/python/virtualenv/__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs-2.2.0.dist-info/WHEEL deleted file mode 100644 index 385faab0525c..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs-2.2.0.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.36.2) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/third_party/python/virtualenv/__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs-2.2.0.dist-info/top_level.txt b/third_party/python/virtualenv/__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs-2.2.0.dist-info/top_level.txt deleted file mode 100644 index 67fd014bbdd7..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs-2.2.0.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -platformdirs diff --git a/third_party/python/virtualenv/__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/__init__.py b/third_party/python/virtualenv/__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/__init__.py deleted file mode 100644 index f2f6abc9adec..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/__init__.py +++ /dev/null @@ -1,279 +0,0 @@ -""" -Utilities for determining application-specific dirs. See for details and -usage. -""" -import importlib -import os -import sys -from pathlib import Path -from typing import TYPE_CHECKING, Optional, Type, Union - -if TYPE_CHECKING: - from typing_extensions import Literal # pragma: no cover - -from .api import PlatformDirsABC -from .version import __version__, __version_info__ - - -def _set_platform_dir_class() -> Type[PlatformDirsABC]: - if os.getenv("ANDROID_DATA") == "/data" and os.getenv("ANDROID_ROOT") == "/system": - module, name = "platformdirs.android", "Android" - elif sys.platform == "win32": - module, name = "platformdirs.windows", "Windows" - elif sys.platform == "darwin": - module, name = "platformdirs.macos", "MacOS" - else: - module, name = "platformdirs.unix", "Unix" - result: Type[PlatformDirsABC] = getattr(importlib.import_module(module), name) - return result - - -PlatformDirs = _set_platform_dir_class() #: Currently active platform -AppDirs = PlatformDirs #: Backwards compatibility with appdirs - - -def user_data_dir( - appname: Optional[str] = None, - appauthor: Union[str, None, "Literal[False]"] = None, - version: Optional[str] = None, - roaming: bool = False, -) -> str: - """ - :param appname: See `appname `. - :param appauthor: See `appauthor `. - :param version: See `version `. - :param roaming: See `roaming `. - :returns: data directory tied to the user - """ - return PlatformDirs(appname=appname, appauthor=appauthor, version=version, roaming=roaming).user_data_dir - - -def site_data_dir( - appname: Optional[str] = None, - appauthor: Union[str, None, "Literal[False]"] = None, - version: Optional[str] = None, - multipath: bool = False, -) -> str: - """ - :param appname: See `appname `. - :param appauthor: See `appauthor `. - :param version: See `version `. - :param multipath: See `roaming `. - :returns: data directory shared by users - """ - return PlatformDirs(appname=appname, appauthor=appauthor, version=version, multipath=multipath).site_data_dir - - -def user_config_dir( - appname: Optional[str] = None, - appauthor: Union[str, None, "Literal[False]"] = None, - version: Optional[str] = None, - roaming: bool = False, -) -> str: - """ - :param appname: See `appname `. - :param appauthor: See `appauthor `. - :param version: See `version `. - :param roaming: See `roaming `. - :returns: config directory tied to the user - """ - return PlatformDirs(appname=appname, appauthor=appauthor, version=version, roaming=roaming).user_config_dir - - -def site_config_dir( - appname: Optional[str] = None, - appauthor: Union[str, None, "Literal[False]"] = None, - version: Optional[str] = None, - multipath: bool = False, -) -> str: - """ - :param appname: See `appname `. - :param appauthor: See `appauthor `. - :param version: See `version `. - :param multipath: See `roaming `. - :returns: config directory shared by the users - """ - return PlatformDirs(appname=appname, appauthor=appauthor, version=version, multipath=multipath).site_config_dir - - -def user_cache_dir( - appname: Optional[str] = None, - appauthor: Union[str, None, "Literal[False]"] = None, - version: Optional[str] = None, - opinion: bool = True, -) -> str: - """ - :param appname: See `appname `. - :param appauthor: See `appauthor `. - :param version: See `version `. - :param opinion: See `roaming `. - :returns: cache directory tied to the user - """ - return PlatformDirs(appname=appname, appauthor=appauthor, version=version, opinion=opinion).user_cache_dir - - -def user_state_dir( - appname: Optional[str] = None, - appauthor: Union[str, None, "Literal[False]"] = None, - version: Optional[str] = None, - roaming: bool = False, -) -> str: - """ - :param appname: See `appname `. - :param appauthor: See `appauthor `. - :param version: See `version `. - :param roaming: See `roaming `. - :returns: state directory tied to the user - """ - return PlatformDirs(appname=appname, appauthor=appauthor, version=version, roaming=roaming).user_state_dir - - -def user_log_dir( - appname: Optional[str] = None, - appauthor: Union[str, None, "Literal[False]"] = None, - version: Optional[str] = None, - opinion: bool = True, -) -> str: - """ - :param appname: See `appname `. - :param appauthor: See `appauthor `. - :param version: See `version `. - :param opinion: See `roaming `. - :returns: log directory tied to the user - """ - return PlatformDirs(appname=appname, appauthor=appauthor, version=version, opinion=opinion).user_log_dir - - -def user_data_path( - appname: Optional[str] = None, - appauthor: Union[str, None, "Literal[False]"] = None, - version: Optional[str] = None, - roaming: bool = False, -) -> Path: - """ - :param appname: See `appname `. - :param appauthor: See `appauthor `. - :param version: See `version `. - :param roaming: See `roaming `. - :returns: data path tied to the user - """ - return PlatformDirs(appname=appname, appauthor=appauthor, version=version, roaming=roaming).user_data_path - - -def site_data_path( - appname: Optional[str] = None, - appauthor: Union[str, None, "Literal[False]"] = None, - version: Optional[str] = None, - multipath: bool = False, -) -> Path: - """ - :param appname: See `appname `. - :param appauthor: See `appauthor `. - :param version: See `version `. - :param multipath: See `multipath `. - :returns: data path shared by users - """ - return PlatformDirs(appname=appname, appauthor=appauthor, version=version, multipath=multipath).site_data_path - - -def user_config_path( - appname: Optional[str] = None, - appauthor: Union[str, None, "Literal[False]"] = None, - version: Optional[str] = None, - roaming: bool = False, -) -> Path: - """ - :param appname: See `appname `. - :param appauthor: See `appauthor `. - :param version: See `version `. - :param roaming: See `roaming `. - :returns: config path tied to the user - """ - return PlatformDirs(appname=appname, appauthor=appauthor, version=version, roaming=roaming).user_config_path - - -def site_config_path( - appname: Optional[str] = None, - appauthor: Union[str, None, "Literal[False]"] = None, - version: Optional[str] = None, - multipath: bool = False, -) -> Path: - """ - :param appname: See `appname `. - :param appauthor: See `appauthor `. - :param version: See `version `. - :param multipath: See `roaming `. - :returns: config path shared by the users - """ - return PlatformDirs(appname=appname, appauthor=appauthor, version=version, multipath=multipath).site_config_path - - -def user_cache_path( - appname: Optional[str] = None, - appauthor: Union[str, None, "Literal[False]"] = None, - version: Optional[str] = None, - opinion: bool = True, -) -> Path: - """ - :param appname: See `appname `. - :param appauthor: See `appauthor `. - :param version: See `version `. - :param opinion: See `roaming `. - :returns: cache path tied to the user - """ - return PlatformDirs(appname=appname, appauthor=appauthor, version=version, opinion=opinion).user_cache_path - - -def user_state_path( - appname: Optional[str] = None, - appauthor: Union[str, None, "Literal[False]"] = None, - version: Optional[str] = None, - roaming: bool = False, -) -> Path: - """ - :param appname: See `appname `. - :param appauthor: See `appauthor `. - :param version: See `version `. - :param roaming: See `roaming `. - :returns: state path tied to the user - """ - return PlatformDirs(appname=appname, appauthor=appauthor, version=version, roaming=roaming).user_state_path - - -def user_log_path( - appname: Optional[str] = None, - appauthor: Union[str, None, "Literal[False]"] = None, - version: Optional[str] = None, - opinion: bool = True, -) -> Path: - """ - :param appname: See `appname `. - :param appauthor: See `appauthor `. - :param version: See `version `. - :param opinion: See `roaming `. - :returns: log path tied to the user - """ - return PlatformDirs(appname=appname, appauthor=appauthor, version=version, opinion=opinion).user_log_path - - -__all__ = [ - "__version__", - "__version_info__", - "PlatformDirs", - "AppDirs", - "PlatformDirsABC", - "user_data_dir", - "user_config_dir", - "user_cache_dir", - "user_state_dir", - "user_log_dir", - "site_data_dir", - "site_config_dir", - "user_data_path", - "user_config_path", - "user_cache_path", - "user_state_path", - "user_log_path", - "site_data_path", - "site_config_path", -] diff --git a/third_party/python/virtualenv/__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/__main__.py b/third_party/python/virtualenv/__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/__main__.py deleted file mode 100644 index dd35066fb365..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/__main__.py +++ /dev/null @@ -1,42 +0,0 @@ -from platformdirs import PlatformDirs, __version__ - -PROPS = ( - "user_data_dir", - "user_config_dir", - "user_cache_dir", - "user_state_dir", - "user_log_dir", - "site_data_dir", - "site_config_dir", -) - - -def main() -> None: - app_name = "MyApp" - app_author = "MyCompany" - - print(f"-- platformdirs {__version__} --") - - print("-- app dirs (with optional 'version')") - dirs = PlatformDirs(app_name, app_author, version="1.0") - for prop in PROPS: - print(f"{prop}: {getattr(dirs, prop)}") - - print("\n-- app dirs (without optional 'version')") - dirs = PlatformDirs(app_name, app_author) - for prop in PROPS: - print(f"{prop}: {getattr(dirs, prop)}") - - print("\n-- app dirs (without optional 'appauthor')") - dirs = PlatformDirs(app_name) - for prop in PROPS: - print(f"{prop}: {getattr(dirs, prop)}") - - print("\n-- app dirs (with disabled 'appauthor')") - dirs = PlatformDirs(app_name, appauthor=False) - for prop in PROPS: - print(f"{prop}: {getattr(dirs, prop)}") - - -if __name__ == "__main__": - main() diff --git a/third_party/python/virtualenv/__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/android.py b/third_party/python/virtualenv/__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/android.py deleted file mode 100644 index c8c6419460d9..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/android.py +++ /dev/null @@ -1,83 +0,0 @@ -import os -import re -import sys -from functools import lru_cache - -from .api import PlatformDirsABC - - -class Android(PlatformDirsABC): - """ - Follows the guidance `from here `_. Makes use of the - `appname ` and - `version `. - """ - - @property - def user_data_dir(self) -> str: - """:return: data directory tied to the user, e.g. ``/data/user///files/``""" - return self._append_app_name_and_version(_android_folder(), "files") - - @property - def site_data_dir(self) -> str: - """:return: data directory shared by users, same as `user_data_dir`""" - return self.user_data_dir - - @property - def user_config_dir(self) -> str: - """ - :return: config directory tied to the user, e.g. ``/data/user///shared_prefs/`` - """ - return self._append_app_name_and_version(_android_folder(), "shared_prefs") - - @property - def site_config_dir(self) -> str: - """:return: config directory shared by the users, same as `user_config_dir`""" - return self.user_config_dir - - @property - def user_cache_dir(self) -> str: - """:return: cache directory tied to the user, e.g. e.g. ``/data/user///cache/``""" - return self._append_app_name_and_version(_android_folder(), "cache") - - @property - def user_state_dir(self) -> str: - """:return: state directory tied to the user, same as `user_data_dir`""" - return self.user_data_dir - - @property - def user_log_dir(self) -> str: - """ - :return: log directory tied to the user, same as `user_cache_dir` if not opinionated else ``log`` in it, - e.g. ``/data/user///cache//log`` - """ - path = self.user_cache_dir - if self.opinion: - path = os.path.join(path, "log") - return path - - -@lru_cache(maxsize=1) -def _android_folder() -> str: - """:return: base folder for the Android OS""" - try: - # First try to get path to android app via pyjnius - from jnius import autoclass # noqa: SC200 - - Context = autoclass("android.content.Context") # noqa: SC200 - result: str = Context.getFilesDir().getParentFile().getAbsolutePath() - except Exception: - # if fails find an android folder looking path on the sys.path - pattern = re.compile(r"/data/(data|user/\d+)/(.+)/files") - for path in sys.path: - if pattern.match(path): - result = path.split("/files")[0] - break - else: - raise OSError("Cannot find path to android app folder") - return result - - -__all__ = [ - "Android", -] diff --git a/third_party/python/virtualenv/__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/api.py b/third_party/python/virtualenv/__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/api.py deleted file mode 100644 index 7b29173452f7..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/api.py +++ /dev/null @@ -1,135 +0,0 @@ -import os -import sys -from abc import ABC, abstractmethod -from pathlib import Path -from typing import Optional, Union - -if sys.version_info >= (3, 8): # pragma: no branch - from typing import Literal # pragma: no cover - - -class PlatformDirsABC(ABC): - """ - Abstract base class for platform directories. - """ - - def __init__( - self, - appname: Optional[str] = None, - appauthor: Union[str, None, "Literal[False]"] = None, - version: Optional[str] = None, - roaming: bool = False, - multipath: bool = False, - opinion: bool = True, - ): - """ - Create a new platform directory. - - :param appname: See `appname`. - :param appauthor: See `appauthor`. - :param version: See `version`. - :param roaming: See `roaming`. - :param multipath: See `multipath`. - :param opinion: See `opinion`. - """ - self.appname = appname #: The name of application. - self.appauthor = appauthor - """ - The name of the app author or distributing body for this application. Typically, it is the owning company name. - Defaults to `appname`. You may pass ``False`` to disable it. - """ - self.version = version - """ - An optional version path element to append to the path. You might want to use this if you want multiple versions - of your app to be able to run independently. If used, this would typically be ``.``. - """ - self.roaming = roaming - """ - Whether to use the roaming appdata directory on Windows. That means that for users on a Windows network setup - for roaming profiles, this user data will be synced on login (see - `here `_). - """ - self.multipath = multipath - """ - An optional parameter only applicable to Unix/Linux which indicates that the entire list of data dirs should be - returned. By default, the first item would only be returned. - """ - self.opinion = opinion #: A flag to indicating to use opinionated values. - - def _append_app_name_and_version(self, *base: str) -> str: - params = list(base[1:]) - if self.appname: - params.append(self.appname) - if self.version: - params.append(self.version) - return os.path.join(base[0], *params) - - @property - @abstractmethod - def user_data_dir(self) -> str: - """:return: data directory tied to the user""" - - @property - @abstractmethod - def site_data_dir(self) -> str: - """:return: data directory shared by users""" - - @property - @abstractmethod - def user_config_dir(self) -> str: - """:return: config directory tied to the user""" - - @property - @abstractmethod - def site_config_dir(self) -> str: - """:return: config directory shared by the users""" - - @property - @abstractmethod - def user_cache_dir(self) -> str: - """:return: cache directory tied to the user""" - - @property - @abstractmethod - def user_state_dir(self) -> str: - """:return: state directory tied to the user""" - - @property - @abstractmethod - def user_log_dir(self) -> str: - """:return: log directory tied to the user""" - - @property - def user_data_path(self) -> Path: - """:return: data path tied to the user""" - return Path(self.user_data_dir) - - @property - def site_data_path(self) -> Path: - """:return: data path shared by users""" - return Path(self.site_data_dir) - - @property - def user_config_path(self) -> Path: - """:return: config path tied to the user""" - return Path(self.user_config_dir) - - @property - def site_config_path(self) -> Path: - """:return: config path shared by the users""" - return Path(self.site_config_dir) - - @property - def user_cache_path(self) -> Path: - """:return: cache path tied to the user""" - return Path(self.user_cache_dir) - - @property - def user_state_path(self) -> Path: - """:return: state path tied to the user""" - return Path(self.user_state_dir) - - @property - def user_log_path(self) -> Path: - """:return: log path tied to the user""" - return Path(self.user_log_dir) diff --git a/third_party/python/virtualenv/__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/macos.py b/third_party/python/virtualenv/__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/macos.py deleted file mode 100644 index 4d4d5b02c271..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/macos.py +++ /dev/null @@ -1,52 +0,0 @@ -import os - -from .api import PlatformDirsABC - - -class MacOS(PlatformDirsABC): - """ - Platform directories for the macOS operating system. Follows the guidance from `Apple documentation - `_. - Makes use of the `appname ` and - `version `. - """ - - @property - def user_data_dir(self) -> str: - """:return: data directory tied to the user, e.g. ``~/Library/Application Support/$appname/$version``""" - return self._append_app_name_and_version(os.path.expanduser("~/Library/Application Support/")) - - @property - def site_data_dir(self) -> str: - """:return: data directory shared by users, e.g. ``/Library/Application Support/$appname/$version``""" - return self._append_app_name_and_version("/Library/Application Support") - - @property - def user_config_dir(self) -> str: - """:return: config directory tied to the user, e.g. ``~/Library/Preferences/$appname/$version``""" - return self._append_app_name_and_version(os.path.expanduser("~/Library/Preferences/")) - - @property - def site_config_dir(self) -> str: - """:return: config directory shared by the users, e.g. ``/Library/Preferences/$appname``""" - return self._append_app_name_and_version("/Library/Preferences") - - @property - def user_cache_dir(self) -> str: - """:return: cache directory tied to the user, e.g. ``~/Library/Caches/$appname/$version``""" - return self._append_app_name_and_version(os.path.expanduser("~/Library/Caches")) - - @property - def user_state_dir(self) -> str: - """:return: state directory tied to the user, same as `user_data_dir`""" - return self.user_data_dir - - @property - def user_log_dir(self) -> str: - """:return: log directory tied to the user, e.g. ``~/Library/Logs/$appname/$version``""" - return self._append_app_name_and_version(os.path.expanduser("~/Library/Logs")) - - -__all__ = [ - "MacOS", -] diff --git a/third_party/python/virtualenv/__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/py.typed b/third_party/python/virtualenv/__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/py.typed deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/third_party/python/virtualenv/__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/unix.py b/third_party/python/virtualenv/__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/unix.py deleted file mode 100644 index 3f6afec5a6d4..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/unix.py +++ /dev/null @@ -1,125 +0,0 @@ -import os -from pathlib import Path - -from .api import PlatformDirsABC - - -class Unix(PlatformDirsABC): - """ - On Unix/Linux, we follow the - `XDG Basedir Spec `_. The spec allows - overriding directories with environment variables. The examples show are the default values, alongside the name of - the environment variable that overrides them. Makes use of the - `appname `, - `version `, - `multipath `, - `opinion `. - """ - - @property - def user_data_dir(self) -> str: - """ - :return: data directory tied to the user, e.g. ``~/.local/share/$appname/$version`` or - ``$XDG_DATA_HOME/$appname/$version`` - """ - path = os.environ.get("XDG_DATA_HOME", "") - if not path.strip(): - path = os.path.expanduser("~/.local/share") - return self._append_app_name_and_version(path) - - @property - def site_data_dir(self) -> str: - """ - :return: data directories shared by users (if `multipath ` is - enabled and ``XDG_DATA_DIR`` is set and a multi path the response is also a multi path separated by the OS - path separator), e.g. ``/usr/local/share/$appname/$version`` or ``/usr/share/$appname/$version`` - """ - # XDG default for $XDG_DATA_DIRS; only first, if multipath is False - path = os.environ.get("XDG_DATA_DIRS", "") - if not path.strip(): - path = f"/usr/local/share{os.pathsep}/usr/share" - return self._with_multi_path(path) - - def _with_multi_path(self, path: str) -> str: - path_list = path.split(os.pathsep) - if not self.multipath: - path_list = path_list[0:1] - path_list = [self._append_app_name_and_version(os.path.expanduser(p)) for p in path_list] - return os.pathsep.join(path_list) - - @property - def user_config_dir(self) -> str: - """ - :return: config directory tied to the user, e.g. ``~/.config/$appname/$version`` or - ``$XDG_CONFIG_HOME/$appname/$version`` - """ - path = os.environ.get("XDG_CONFIG_HOME", "") - if not path.strip(): - path = os.path.expanduser("~/.config") - return self._append_app_name_and_version(path) - - @property - def site_config_dir(self) -> str: - """ - :return: config directories shared by users (if `multipath ` - is enabled and ``XDG_DATA_DIR`` is set and a multi path the response is also a multi path separated by the OS - path separator), e.g. ``/etc/xdg/$appname/$version`` - """ - # XDG default for $XDG_CONFIG_DIRS only first, if multipath is False - path = os.environ.get("XDG_CONFIG_DIRS", "") - if not path.strip(): - path = "/etc/xdg" - return self._with_multi_path(path) - - @property - def user_cache_dir(self) -> str: - """ - :return: cache directory tied to the user, e.g. ``~/.cache/$appname/$version`` or - ``~/$XDG_CACHE_HOME/$appname/$version`` - """ - path = os.environ.get("XDG_CACHE_HOME", "") - if not path.strip(): - path = os.path.expanduser("~/.cache") - return self._append_app_name_and_version(path) - - @property - def user_state_dir(self) -> str: - """ - :return: state directory tied to the user, e.g. ``~/.local/state/$appname/$version`` or - ``$XDG_STATE_HOME/$appname/$version`` - """ - path = os.environ.get("XDG_STATE_HOME", "") - if not path.strip(): - path = os.path.expanduser("~/.local/state") - return self._append_app_name_and_version(path) - - @property - def user_log_dir(self) -> str: - """ - :return: log directory tied to the user, same as `user_data_dir` if not opinionated else ``log`` in it - """ - path = self.user_cache_dir - if self.opinion: - path = os.path.join(path, "log") - return path - - @property - def site_data_path(self) -> Path: - """:return: data path shared by users. Only return first item, even if ``multipath`` is set to ``True``""" - return self._first_item_as_path_if_multipath(self.site_data_dir) - - @property - def site_config_path(self) -> Path: - """:return: config path shared by the users. Only return first item, even if ``multipath`` is set to ``True``""" - return self._first_item_as_path_if_multipath(self.site_config_dir) - - def _first_item_as_path_if_multipath(self, directory: str) -> Path: - if self.multipath: - # If multipath is True, the first path is returned. - directory = directory.split(os.pathsep)[0] - return Path(directory) - - -__all__ = [ - "Unix", -] diff --git a/third_party/python/virtualenv/__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/version.py b/third_party/python/virtualenv/__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/version.py deleted file mode 100644 index 2b18c715b520..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/version.py +++ /dev/null @@ -1,4 +0,0 @@ -""" Version information """ - -__version__ = "2.2.0" -__version_info__ = (2, 2, 0) diff --git a/third_party/python/virtualenv/__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/windows.py b/third_party/python/virtualenv/__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/windows.py deleted file mode 100644 index 274a8c89e858..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/windows.py +++ /dev/null @@ -1,163 +0,0 @@ -import ctypes -import os -from functools import lru_cache -from typing import Callable, Optional - -from .api import PlatformDirsABC - - -class Windows(PlatformDirsABC): - """`MSDN on where to store app data files - `_. - Makes use of the - `appname `, - `appauthor `, - `version `, - `roaming `, - `opinion `.""" - - @property - def user_data_dir(self) -> str: - """ - :return: data directory tied to the user, e.g. - ``%USERPROFILE%\\AppData\\Local\\$appauthor\\$appname`` (not roaming) or - ``%USERPROFILE%\\AppData\\Roaming\\$appauthor\\$appname`` (roaming) - """ - const = "CSIDL_APPDATA" if self.roaming else "CSIDL_LOCAL_APPDATA" - path = os.path.normpath(get_win_folder(const)) - return self._append_parts(path) - - def _append_parts(self, path: str, *, opinion_value: Optional[str] = None) -> str: - params = [] - if self.appname: - if self.appauthor is not False: - author = self.appauthor or self.appname - params.append(author) - params.append(self.appname) - if opinion_value is not None and self.opinion: - params.append(opinion_value) - if self.version: - params.append(self.version) - return os.path.join(path, *params) - - @property - def site_data_dir(self) -> str: - """:return: data directory shared by users, e.g. ``C:\\ProgramData\\$appauthor\\$appname``""" - path = os.path.normpath(get_win_folder("CSIDL_COMMON_APPDATA")) - return self._append_parts(path) - - @property - def user_config_dir(self) -> str: - """:return: config directory tied to the user, same as `user_data_dir`""" - return self.user_data_dir - - @property - def site_config_dir(self) -> str: - """:return: config directory shared by the users, same as `site_data_dir`""" - return self.site_data_dir - - @property - def user_cache_dir(self) -> str: - """ - :return: cache directory tied to the user (if opinionated with ``Cache`` folder within ``$appname``) e.g. - ``%USERPROFILE%\\AppData\\Local\\$appauthor\\$appname\\Cache\\$version`` - """ - path = os.path.normpath(get_win_folder("CSIDL_LOCAL_APPDATA")) - return self._append_parts(path, opinion_value="Cache") - - @property - def user_state_dir(self) -> str: - """:return: state directory tied to the user, same as `user_data_dir`""" - return self.user_data_dir - - @property - def user_log_dir(self) -> str: - """ - :return: log directory tied to the user, same as `user_data_dir` if not opinionated else ``Logs`` in it - """ - path = self.user_data_dir - if self.opinion: - path = os.path.join(path, "Logs") - return path - - -def get_win_folder_from_env_vars(csidl_name: str) -> str: - """Get folder from environment variables.""" - env_var_name = { - "CSIDL_APPDATA": "APPDATA", - "CSIDL_COMMON_APPDATA": "ALLUSERSPROFILE", - "CSIDL_LOCAL_APPDATA": "LOCALAPPDATA", - }.get(csidl_name) - if env_var_name is None: - raise ValueError(f"Unknown CSIDL name: {csidl_name}") - result = os.environ.get(env_var_name) - if result is None: - raise ValueError(f"Unset environment variable: {env_var_name}") - return result - - -def get_win_folder_from_registry(csidl_name: str) -> str: - """Get folder from the registry. - - This is a fallback technique at best. I'm not sure if using the - registry for this guarantees us the correct answer for all CSIDL_* - names. - """ - shell_folder_name = { - "CSIDL_APPDATA": "AppData", - "CSIDL_COMMON_APPDATA": "Common AppData", - "CSIDL_LOCAL_APPDATA": "Local AppData", - }.get(csidl_name) - if shell_folder_name is None: - raise ValueError(f"Unknown CSIDL name: {csidl_name}") - - import winreg - - key = winreg.OpenKey(winreg.HKEY_CURRENT_USER, r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders") - directory, _ = winreg.QueryValueEx(key, shell_folder_name) - return str(directory) - - -def get_win_folder_via_ctypes(csidl_name: str) -> str: - """Get folder with ctypes.""" - csidl_const = { - "CSIDL_APPDATA": 26, - "CSIDL_COMMON_APPDATA": 35, - "CSIDL_LOCAL_APPDATA": 28, - }.get(csidl_name) - if csidl_const is None: - raise ValueError(f"Unknown CSIDL name: {csidl_name}") - - buf = ctypes.create_unicode_buffer(1024) - windll = getattr(ctypes, "windll") # noqa: B009 # using getattr to avoid false positive with mypy type checker - windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf) - - has_high_char = False # Downgrade to short path name if it has highbit chars. - for c in buf: - if ord(c) > 255: - has_high_char = True - break - if has_high_char: - buf2 = ctypes.create_unicode_buffer(1024) - if windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024): - buf = buf2 - - return buf.value - - -def _pick_get_win_folder() -> Callable[[str], str]: - if hasattr(ctypes, "windll"): - return get_win_folder_via_ctypes - try: - import winreg # noqa: F401 - except ImportError: - return get_win_folder_from_env_vars - else: - return get_win_folder_from_registry - - -get_win_folder = lru_cache(maxsize=None)(_pick_get_win_folder()) - -__all__ = [ - "Windows", -] diff --git a/third_party/python/virtualenv/__virtualenv__/scandir-1.10.0-cp39-cp39-linux_x86_64/scandir-1.10.0.dist-info/WHEEL b/third_party/python/virtualenv/__virtualenv__/scandir-1.10.0-cp39-cp39-linux_x86_64/scandir-1.10.0.dist-info/WHEEL deleted file mode 100644 index 25a30861c7a5..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/scandir-1.10.0-cp39-cp39-linux_x86_64/scandir-1.10.0.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.36.2) -Root-Is-Purelib: false -Tag: cp39-cp39-linux_x86_64 - diff --git a/third_party/python/virtualenv/__virtualenv__/scandir-1.10.0-cp39-cp39-linux_x86_64/scandir-1.10.0.dist-info/LICENSE.txt b/third_party/python/virtualenv/__virtualenv__/scandir-1.10.0-cp39-cp39-macosx_10_15_x86_64/scandir-1.10.0.dist-info/LICENSE.txt similarity index 100% rename from third_party/python/virtualenv/__virtualenv__/scandir-1.10.0-cp39-cp39-linux_x86_64/scandir-1.10.0.dist-info/LICENSE.txt rename to third_party/python/virtualenv/__virtualenv__/scandir-1.10.0-cp39-cp39-macosx_10_15_x86_64/scandir-1.10.0.dist-info/LICENSE.txt diff --git a/third_party/python/virtualenv/__virtualenv__/scandir-1.10.0-cp39-cp39-linux_x86_64/scandir-1.10.0.dist-info/METADATA b/third_party/python/virtualenv/__virtualenv__/scandir-1.10.0-cp39-cp39-macosx_10_15_x86_64/scandir-1.10.0.dist-info/METADATA similarity index 99% rename from third_party/python/virtualenv/__virtualenv__/scandir-1.10.0-cp39-cp39-linux_x86_64/scandir-1.10.0.dist-info/METADATA rename to third_party/python/virtualenv/__virtualenv__/scandir-1.10.0-cp39-cp39-macosx_10_15_x86_64/scandir-1.10.0.dist-info/METADATA index 6538e8823042..ee4b11a523e6 100644 --- a/third_party/python/virtualenv/__virtualenv__/scandir-1.10.0-cp39-cp39-linux_x86_64/scandir-1.10.0.dist-info/METADATA +++ b/third_party/python/virtualenv/__virtualenv__/scandir-1.10.0-cp39-cp39-macosx_10_15_x86_64/scandir-1.10.0.dist-info/METADATA @@ -23,7 +23,6 @@ Classifier: Programming Language :: Python :: 3.5 Classifier: Programming Language :: Python :: 3.6 Classifier: Programming Language :: Python :: 3.7 Classifier: Programming Language :: Python :: Implementation :: CPython -License-File: LICENSE.txt scandir, a better directory iterator and faster os.walk() diff --git a/third_party/python/virtualenv/__virtualenv__/scandir-1.10.0-cp39-cp39-linux_x86_64/scandir-1.10.0.dist-info/RECORD b/third_party/python/virtualenv/__virtualenv__/scandir-1.10.0-cp39-cp39-macosx_10_15_x86_64/scandir-1.10.0.dist-info/RECORD similarity index 62% rename from third_party/python/virtualenv/__virtualenv__/scandir-1.10.0-cp39-cp39-linux_x86_64/scandir-1.10.0.dist-info/RECORD rename to third_party/python/virtualenv/__virtualenv__/scandir-1.10.0-cp39-cp39-macosx_10_15_x86_64/scandir-1.10.0.dist-info/RECORD index 847f48e3edff..2140d975b4a7 100644 --- a/third_party/python/virtualenv/__virtualenv__/scandir-1.10.0-cp39-cp39-linux_x86_64/scandir-1.10.0.dist-info/RECORD +++ b/third_party/python/virtualenv/__virtualenv__/scandir-1.10.0-cp39-cp39-macosx_10_15_x86_64/scandir-1.10.0.dist-info/RECORD @@ -1,6 +1,6 @@ scandir.py,sha256=97C2AQInuKk-Phb3aXM7fJomhc-00pZMcBur23NUmrE,24827 scandir-1.10.0.dist-info/LICENSE.txt,sha256=peL73COXREGdKUB828knk8TZwdlWwXT3y3-W-m0FjIY,1464 -scandir-1.10.0.dist-info/METADATA,sha256=BuKVp0Sn8CSvG1Ayp7tLOjyg4zrwWbUFxrUNz439u6U,9585 -scandir-1.10.0.dist-info/WHEEL,sha256=jR_DIfh0YPrTez7i3YqBzGYuKdVs75FjZMdMtvIwioM,103 +scandir-1.10.0.dist-info/METADATA,sha256=cv1fZ5DeC3DJqnMByWGiprvGhLpQCkWOZiJduweakGk,9559 +scandir-1.10.0.dist-info/WHEEL,sha256=WO4o60shExe_A5pkiO6Yb-8OHLGhlAGcs2oJ7aUuE5Q,110 scandir-1.10.0.dist-info/top_level.txt,sha256=Ixze5mNjmis99ql7JEtAYc9-djJMbfRx-FFw3R_zZf8,17 scandir-1.10.0.dist-info/RECORD,, diff --git a/third_party/python/virtualenv/__virtualenv__/scandir-1.10.0-cp39-cp39-macosx_10_15_x86_64/scandir-1.10.0.dist-info/WHEEL b/third_party/python/virtualenv/__virtualenv__/scandir-1.10.0-cp39-cp39-macosx_10_15_x86_64/scandir-1.10.0.dist-info/WHEEL new file mode 100644 index 000000000000..310051fe9e47 --- /dev/null +++ b/third_party/python/virtualenv/__virtualenv__/scandir-1.10.0-cp39-cp39-macosx_10_15_x86_64/scandir-1.10.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.35.1) +Root-Is-Purelib: false +Tag: cp39-cp39-macosx_10_15_x86_64 + diff --git a/third_party/python/virtualenv/__virtualenv__/scandir-1.10.0-cp39-cp39-linux_x86_64/scandir-1.10.0.dist-info/top_level.txt b/third_party/python/virtualenv/__virtualenv__/scandir-1.10.0-cp39-cp39-macosx_10_15_x86_64/scandir-1.10.0.dist-info/top_level.txt similarity index 100% rename from third_party/python/virtualenv/__virtualenv__/scandir-1.10.0-cp39-cp39-linux_x86_64/scandir-1.10.0.dist-info/top_level.txt rename to third_party/python/virtualenv/__virtualenv__/scandir-1.10.0-cp39-cp39-macosx_10_15_x86_64/scandir-1.10.0.dist-info/top_level.txt diff --git a/third_party/python/virtualenv/__virtualenv__/scandir-1.10.0-cp39-cp39-linux_x86_64/scandir.py b/third_party/python/virtualenv/__virtualenv__/scandir-1.10.0-cp39-cp39-macosx_10_15_x86_64/scandir.py similarity index 100% rename from third_party/python/virtualenv/__virtualenv__/scandir-1.10.0-cp39-cp39-linux_x86_64/scandir.py rename to third_party/python/virtualenv/__virtualenv__/scandir-1.10.0-cp39-cp39-macosx_10_15_x86_64/scandir.py diff --git a/third_party/python/virtualenv/__virtualenv__/singledispatch-3.6.2-py2.py3-none-any/singledispatch-3.6.2.dist-info/LICENSE b/third_party/python/virtualenv/__virtualenv__/singledispatch-3.6.2-py2.py3-none-any/singledispatch-3.6.2.dist-info/LICENSE deleted file mode 100644 index 353924be0e59..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/singledispatch-3.6.2-py2.py3-none-any/singledispatch-3.6.2.dist-info/LICENSE +++ /dev/null @@ -1,19 +0,0 @@ -Copyright Jason R. Coombs - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to -deal in the Software without restriction, including without limitation the -rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -sell copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -IN THE SOFTWARE. diff --git a/third_party/python/virtualenv/__virtualenv__/singledispatch-3.6.2-py2.py3-none-any/singledispatch-3.6.2.dist-info/METADATA b/third_party/python/virtualenv/__virtualenv__/singledispatch-3.6.2-py2.py3-none-any/singledispatch-3.6.2.dist-info/METADATA deleted file mode 100644 index f5588bb60c93..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/singledispatch-3.6.2-py2.py3-none-any/singledispatch-3.6.2.dist-info/METADATA +++ /dev/null @@ -1,90 +0,0 @@ -Metadata-Version: 2.1 -Name: singledispatch -Version: 3.6.2 -Summary: Backport functools.singledispatch from Python 3.4 to Python 2.6-3.3. -Home-page: https://github.com/jaraco/singledispatch -Author: Jason R. Coombs -Author-email: jaraco@jaraco.com -License: UNKNOWN -Keywords: single,dispatch,generic,functions,singledispatch,genericfunctions,decorator,backport -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.6 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.2 -Classifier: Programming Language :: Python :: 3.3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Requires-Python: >=2.6 -License-File: LICENSE -Requires-Dist: six -Requires-Dist: ordereddict ; python_version < "2.7" -Provides-Extra: docs -Requires-Dist: sphinx ; extra == 'docs' -Requires-Dist: jaraco.packaging (>=8.2) ; extra == 'docs' -Requires-Dist: rst.linker (>=1.9) ; extra == 'docs' -Provides-Extra: testing -Requires-Dist: pytest (>=4.6) ; extra == 'testing' -Requires-Dist: pytest-flake8 ; extra == 'testing' -Requires-Dist: pytest-cov ; extra == 'testing' -Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy" and python_version < "3.10") and extra == 'testing' -Requires-Dist: unittest2 ; (python_version < "2.7") and extra == 'testing' -Requires-Dist: pytest-checkdocs (>=2.4) ; (python_version > "3.6") and extra == 'testing' - -.. image:: https://img.shields.io/pypi/v/singledispatch.svg - :target: `PyPI link`_ - -.. image:: https://img.shields.io/pypi/pyversions/singledispatch.svg - :target: `PyPI link`_ - -.. _PyPI link: https://pypi.org/project/singledispatch - -.. image:: https://github.com/jaraco/singledispatch/workflows/tests/badge.svg - :target: https://github.com/jaraco/singledispatch/actions?query=workflow%3A%22tests%22 - :alt: tests - -.. image:: https://img.shields.io/badge/code%20style-black-000000.svg - :target: https://github.com/psf/black - :alt: Code style: Black - -.. .. image:: https://readthedocs.org/projects/skeleton/badge/?version=latest -.. :target: https://skeleton.readthedocs.io/en/latest/?badge=latest - -.. image:: https://img.shields.io/badge/skeleton-2021-informational - :target: https://blog.jaraco.com/skeleton - -`PEP 443 `_ proposed to expose -a mechanism in the ``functools`` standard library module in Python 3.4 -that provides a simple form of generic programming known as -single-dispatch generic functions. - -This library is a backport of this functionality and its evolution. - -Refer to the `upstream documentation -`_ -for API guidance. To use the backport, simply use -``from singledispatch import singledispatch`` in place of -``from functools import singledispatch``. - - - -Maintenance ------------ - -This backport is maintained on Github by Jason R. Coombs, one of the -members of the core CPython team: - -* `repository `_ - -* `issue tracker `_ - - diff --git a/third_party/python/virtualenv/__virtualenv__/singledispatch-3.6.2-py2.py3-none-any/singledispatch-3.6.2.dist-info/RECORD b/third_party/python/virtualenv/__virtualenv__/singledispatch-3.6.2-py2.py3-none-any/singledispatch-3.6.2.dist-info/RECORD deleted file mode 100644 index d47012b4ee0b..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/singledispatch-3.6.2-py2.py3-none-any/singledispatch-3.6.2.dist-info/RECORD +++ /dev/null @@ -1,7 +0,0 @@ -singledispatch/__init__.py,sha256=j7Jl4Ja9enGVqQ1nCuK9dQ47OCW802S_AOPLmI_oyI8,9166 -singledispatch/helpers.py,sha256=m0tA1Qy5s5djLPOlATJDBybx2hiMkXNcYCqqIs-bHHA,5572 -singledispatch-3.6.2.dist-info/LICENSE,sha256=2z8CRrH5J48VhFuZ_sR4uLUG63ZIeZNyL4xuJUKF-vg,1050 -singledispatch-3.6.2.dist-info/METADATA,sha256=9mVzuE7bes4RL2DLaYR77q-_rWpj8mTF4AmXRpBKC3E,3583 -singledispatch-3.6.2.dist-info/WHEEL,sha256=Z-nyYpwrcSqxfdux5Mbn_DQ525iP7J2DG3JgGvOYyTQ,110 -singledispatch-3.6.2.dist-info/top_level.txt,sha256=t_1HQQSQKXhafBcgOJoZZ5y-hPzkVV0gIWP2WIjL7JA,15 -singledispatch-3.6.2.dist-info/RECORD,, diff --git a/third_party/python/virtualenv/__virtualenv__/singledispatch-3.6.2-py2.py3-none-any/singledispatch-3.6.2.dist-info/WHEEL b/third_party/python/virtualenv/__virtualenv__/singledispatch-3.6.2-py2.py3-none-any/singledispatch-3.6.2.dist-info/WHEEL deleted file mode 100644 index 01b8fc7d4a10..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/singledispatch-3.6.2-py2.py3-none-any/singledispatch-3.6.2.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.36.2) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/third_party/python/virtualenv/__virtualenv__/singledispatch-3.6.2-py2.py3-none-any/singledispatch-3.6.2.dist-info/top_level.txt b/third_party/python/virtualenv/__virtualenv__/singledispatch-3.6.2-py2.py3-none-any/singledispatch-3.6.2.dist-info/top_level.txt deleted file mode 100644 index ebb5ff79be83..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/singledispatch-3.6.2-py2.py3-none-any/singledispatch-3.6.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -singledispatch diff --git a/third_party/python/virtualenv/__virtualenv__/singledispatch-3.6.2-py2.py3-none-any/singledispatch/__init__.py b/third_party/python/virtualenv/__virtualenv__/singledispatch-3.6.2-py2.py3-none-any/singledispatch/__init__.py deleted file mode 100644 index 0ed3ca365297..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/singledispatch-3.6.2-py2.py3-none-any/singledispatch/__init__.py +++ /dev/null @@ -1,244 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -__all__ = ['singledispatch'] - -from functools import update_wrapper -from weakref import WeakKeyDictionary - -from .helpers import MappingProxyType, get_cache_token, get_type_hints - -################################################################################ -### singledispatch() - single-dispatch generic function decorator -################################################################################ - -def _c3_merge(sequences): - """Merges MROs in *sequences* to a single MRO using the C3 algorithm. - - Adapted from http://www.python.org/download/releases/2.3/mro/. - - """ - result = [] - while True: - sequences = [s for s in sequences if s] # purge empty sequences - if not sequences: - return result - for s1 in sequences: # find merge candidates among seq heads - candidate = s1[0] - for s2 in sequences: - if candidate in s2[1:]: - candidate = None - break # reject the current head, it appears later - else: - break - if candidate is None: - raise RuntimeError("Inconsistent hierarchy") - result.append(candidate) - # remove the chosen candidate - for seq in sequences: - if seq[0] == candidate: - del seq[0] - -def _c3_mro(cls, abcs=None): - """Computes the method resolution order using extended C3 linearization. - - If no *abcs* are given, the algorithm works exactly like the built-in C3 - linearization used for method resolution. - - If given, *abcs* is a list of abstract base classes that should be inserted - into the resulting MRO. Unrelated ABCs are ignored and don't end up in the - result. The algorithm inserts ABCs where their functionality is introduced, - i.e. issubclass(cls, abc) returns True for the class itself but returns - False for all its direct base classes. Implicit ABCs for a given class - (either registered or inferred from the presence of a special method like - __len__) are inserted directly after the last ABC explicitly listed in the - MRO of said class. If two implicit ABCs end up next to each other in the - resulting MRO, their ordering depends on the order of types in *abcs*. - - """ - for i, base in enumerate(reversed(cls.__bases__)): - if hasattr(base, '__abstractmethods__'): - boundary = len(cls.__bases__) - i - break # Bases up to the last explicit ABC are considered first. - else: - boundary = 0 - abcs = list(abcs) if abcs else [] - explicit_bases = list(cls.__bases__[:boundary]) - abstract_bases = [] - other_bases = list(cls.__bases__[boundary:]) - for base in abcs: - if issubclass(cls, base) and not any( - issubclass(b, base) for b in cls.__bases__ - ): - # If *cls* is the class that introduces behaviour described by - # an ABC *base*, insert said ABC to its MRO. - abstract_bases.append(base) - for base in abstract_bases: - abcs.remove(base) - explicit_c3_mros = [_c3_mro(base, abcs=abcs) for base in explicit_bases] - abstract_c3_mros = [_c3_mro(base, abcs=abcs) for base in abstract_bases] - other_c3_mros = [_c3_mro(base, abcs=abcs) for base in other_bases] - return _c3_merge( - [[cls]] + - explicit_c3_mros + abstract_c3_mros + other_c3_mros + - [explicit_bases] + [abstract_bases] + [other_bases] - ) - -def _compose_mro(cls, types): - """Calculates the method resolution order for a given class *cls*. - - Includes relevant abstract base classes (with their respective bases) from - the *types* iterable. Uses a modified C3 linearization algorithm. - - """ - bases = set(cls.__mro__) - # Remove entries which are already present in the __mro__ or unrelated. - def is_related(typ): - return (typ not in bases and hasattr(typ, '__mro__') - and issubclass(cls, typ)) - types = [n for n in types if is_related(n)] - # Remove entries which are strict bases of other entries (they will end up - # in the MRO anyway. - def is_strict_base(typ): - for other in types: - if typ != other and typ in other.__mro__: - return True - return False - types = [n for n in types if not is_strict_base(n)] - # Subclasses of the ABCs in *types* which are also implemented by - # *cls* can be used to stabilize ABC ordering. - type_set = set(types) - mro = [] - for typ in types: - found = [] - for sub in filter(_safe, typ.__subclasses__()): - if sub not in bases and issubclass(cls, sub): - found.append([s for s in sub.__mro__ if s in type_set]) - if not found: - mro.append(typ) - continue - # Favor subclasses with the biggest number of useful bases - found.sort(key=len, reverse=True) - for sub in found: - for subcls in sub: - if subcls not in mro: - mro.append(subcls) - return _c3_mro(cls, abcs=mro) - - -def _safe(class_): - """ - Return if the class is safe for testing as subclass. Ref #2. - """ - return not getattr(class_, '__origin__', None) - - -def _find_impl(cls, registry): - """Returns the best matching implementation from *registry* for type *cls*. - - Where there is no registered implementation for a specific type, its method - resolution order is used to find a more generic implementation. - - Note: if *registry* does not contain an implementation for the base - *object* type, this function may return None. - - """ - mro = _compose_mro(cls, registry.keys()) - match = None - for t in mro: - if match is not None: - # If *match* is an implicit ABC but there is another unrelated, - # equally matching implicit ABC, refuse the temptation to guess. - if (t in registry and t not in cls.__mro__ - and match not in cls.__mro__ - and not issubclass(match, t)): - raise RuntimeError("Ambiguous dispatch: {0} or {1}".format( - match, t)) - break - if t in registry: - match = t - return registry.get(match) - -def singledispatch(func): - """Single-dispatch generic function decorator. - - Transforms a function into a generic function, which can have different - behaviours depending upon the type of its first argument. The decorated - function acts as the default implementation, and additional - implementations can be registered using the register() attribute of the - generic function. - - """ - registry = {} - dispatch_cache = WeakKeyDictionary() - def ns(): pass - ns.cache_token = None - - def dispatch(cls): - """generic_func.dispatch(cls) -> - - Runs the dispatch algorithm to return the best available implementation - for the given *cls* registered on *generic_func*. - - """ - if ns.cache_token is not None: - current_token = get_cache_token() - if ns.cache_token != current_token: - dispatch_cache.clear() - ns.cache_token = current_token - try: - impl = dispatch_cache[cls] - except KeyError: - try: - impl = registry[cls] - except KeyError: - impl = _find_impl(cls, registry) - dispatch_cache[cls] = impl - return impl - - def register(cls, func=None): - """generic_func.register(cls, func) -> func - - Registers a new implementation for the given *cls* on a *generic_func*. - - """ - if func is None: - if isinstance(cls, type): - return lambda f: register(cls, f) - ann = getattr(cls, '__annotations__', {}) - if not ann: - raise TypeError( - "Invalid first argument to `register()`: {cls!r}. " - "Use either `@register(some_class)` or plain `@register` " - "on an annotated function.".format(**locals()) - ) - func = cls - - argname, cls = next(iter(get_type_hints(func).items())) - if not isinstance(cls, type): - raise TypeError( - "Invalid annotation for {argname!r}. " - "{cls!r} is not a class.".format(**locals()) - ) - registry[cls] = func - if ns.cache_token is None and hasattr(cls, '__abstractmethods__'): - ns.cache_token = get_cache_token() - dispatch_cache.clear() - return func - - def wrapper(*args, **kw): - return dispatch(args[0].__class__)(*args, **kw) - - registry[object] = func - wrapper.register = register - wrapper.dispatch = dispatch - wrapper.registry = MappingProxyType(registry) - wrapper._clear_cache = dispatch_cache.clear - update_wrapper(wrapper, func) - return wrapper - diff --git a/third_party/python/virtualenv/__virtualenv__/singledispatch-3.6.2-py2.py3-none-any/singledispatch/helpers.py b/third_party/python/virtualenv/__virtualenv__/singledispatch-3.6.2-py2.py3-none-any/singledispatch/helpers.py deleted file mode 100644 index ca8d625a5350..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/singledispatch-3.6.2-py2.py3-none-any/singledispatch/helpers.py +++ /dev/null @@ -1,183 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -import sys -from abc import ABCMeta - -try: - from collections.abc import MutableMapping -except ImportError: - from collections import MutableMapping - -try: - from collections import UserDict -except ImportError: - from UserDict import UserDict -try: - from collections import OrderedDict -except ImportError: - from ordereddict import OrderedDict -try: - from thread import get_ident -except ImportError: - try: - from _thread import get_ident - except ImportError: - from _dummy_thread import get_ident - - -def recursive_repr(fillvalue='...'): - 'Decorator to make a repr function return fillvalue for a recursive call' - - def decorating_function(user_function): - repr_running = set() - - def wrapper(self): - key = id(self), get_ident() - if key in repr_running: - return fillvalue - repr_running.add(key) - try: - result = user_function(self) - finally: - repr_running.discard(key) - return result - - # Can't use functools.wraps() here because of bootstrap issues - wrapper.__module__ = getattr(user_function, '__module__') - wrapper.__doc__ = getattr(user_function, '__doc__') - wrapper.__name__ = getattr(user_function, '__name__') - wrapper.__annotations__ = getattr(user_function, '__annotations__', {}) - return wrapper - - return decorating_function - - -class ChainMap(MutableMapping): - ''' A ChainMap groups multiple dicts (or other mappings) together - to create a single, updateable view. - - The underlying mappings are stored in a list. That list is public and can - accessed or updated using the *maps* attribute. There is no other state. - - Lookups search the underlying mappings successively until a key is found. - In contrast, writes, updates, and deletions only operate on the first - mapping. - - ''' - - def __init__(self, *maps): - '''Initialize a ChainMap by setting *maps* to the given mappings. - If no mappings are provided, a single empty dictionary is used. - - ''' - self.maps = list(maps) or [{}] # always at least one map - - def __missing__(self, key): - raise KeyError(key) - - def __getitem__(self, key): - for mapping in self.maps: - try: - return mapping[key] # can't use 'key in mapping' with defaultdict - except KeyError: - pass - return self.__missing__(key) # support subclasses that define __missing__ - - def get(self, key, default=None): - return self[key] if key in self else default - - def __len__(self): - return len(set().union(*self.maps)) # reuses stored hash values if possible - - def __iter__(self): - return iter(set().union(*self.maps)) - - def __contains__(self, key): - return any(key in m for m in self.maps) - - @recursive_repr() - def __repr__(self): - return '{0.__class__.__name__}({1})'.format( - self, ', '.join(map(repr, self.maps))) - - @classmethod - def fromkeys(cls, iterable, *args): - 'Create a ChainMap with a single dict created from the iterable.' - return cls(dict.fromkeys(iterable, *args)) - - def copy(self): - 'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]' - return self.__class__(self.maps[0].copy(), *self.maps[1:]) - - __copy__ = copy - - def new_child(self): # like Django's Context.push() - 'New ChainMap with a new dict followed by all previous maps.' - return self.__class__({}, *self.maps) - - @property - def parents(self): # like Django's Context.pop() - 'New ChainMap from maps[1:].' - return self.__class__(*self.maps[1:]) - - def __setitem__(self, key, value): - self.maps[0][key] = value - - def __delitem__(self, key): - try: - del self.maps[0][key] - except KeyError: - raise KeyError('Key not found in the first mapping: {!r}'.format(key)) - - def popitem(self): - 'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.' - try: - return self.maps[0].popitem() - except KeyError: - raise KeyError('No keys found in the first mapping.') - - def pop(self, key, *args): - 'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].' - try: - return self.maps[0].pop(key, *args) - except KeyError: - raise KeyError('Key not found in the first mapping: {!r}'.format(key)) - - def clear(self): - 'Clear maps[0], leaving maps[1:] intact.' - self.maps[0].clear() - - -class MappingProxyType(UserDict): - def __init__(self, data): - UserDict.__init__(self) - self.data = data - - -try: - from abc import get_cache_token -except ImportError: - def get_cache_token(): - return ABCMeta._abc_invalidation_counter - - -class Support(object): - def dummy(self): - pass - - def cpython_only(self, func): - if 'PyPy' in sys.version: - return self.dummy - return func - - -def get_type_hints(func): - # only import typing if annotation parsing is necessary - from typing import get_type_hints - return get_type_hints(func) or getattr(func, '__annotations__', {}) diff --git a/third_party/python/virtualenv/__virtualenv__/six-1.16.0-py2.py3-none-any/six-1.16.0.dist-info/LICENSE b/third_party/python/virtualenv/__virtualenv__/six-1.15.0-py2.py3-none-any/six-1.15.0.dist-info/LICENSE similarity index 100% rename from third_party/python/virtualenv/__virtualenv__/six-1.16.0-py2.py3-none-any/six-1.16.0.dist-info/LICENSE rename to third_party/python/virtualenv/__virtualenv__/six-1.15.0-py2.py3-none-any/six-1.15.0.dist-info/LICENSE diff --git a/third_party/python/virtualenv/__virtualenv__/six-1.16.0-py2.py3-none-any/six-1.16.0.dist-info/METADATA b/third_party/python/virtualenv/__virtualenv__/six-1.15.0-py2.py3-none-any/six-1.15.0.dist-info/METADATA similarity index 99% rename from third_party/python/virtualenv/__virtualenv__/six-1.16.0-py2.py3-none-any/six-1.16.0.dist-info/METADATA rename to third_party/python/virtualenv/__virtualenv__/six-1.15.0-py2.py3-none-any/six-1.15.0.dist-info/METADATA index 6d7525c2ebcf..869bf25a8843 100644 --- a/third_party/python/virtualenv/__virtualenv__/six-1.16.0-py2.py3-none-any/six-1.16.0.dist-info/METADATA +++ b/third_party/python/virtualenv/__virtualenv__/six-1.15.0-py2.py3-none-any/six-1.15.0.dist-info/METADATA @@ -1,6 +1,6 @@ Metadata-Version: 2.1 Name: six -Version: 1.16.0 +Version: 1.15.0 Summary: Python 2 and 3 compatibility utilities Home-page: https://github.com/benjaminp/six Author: Benjamin Peterson diff --git a/third_party/python/virtualenv/__virtualenv__/six-1.15.0-py2.py3-none-any/six-1.15.0.dist-info/RECORD b/third_party/python/virtualenv/__virtualenv__/six-1.15.0-py2.py3-none-any/six-1.15.0.dist-info/RECORD new file mode 100644 index 000000000000..4cccdb4af604 --- /dev/null +++ b/third_party/python/virtualenv/__virtualenv__/six-1.15.0-py2.py3-none-any/six-1.15.0.dist-info/RECORD @@ -0,0 +1,6 @@ +six.py,sha256=U4Z_yv534W5CNyjY9i8V1OXY2SjAny8y2L5vDLhhThM,34159 +six-1.15.0.dist-info/LICENSE,sha256=i7hQxWWqOJ_cFvOkaWWtI9gq3_YPI5P8J2K2MYXo5sk,1066 +six-1.15.0.dist-info/METADATA,sha256=W6rlyoeMZHXh6srP9NXNsm0rjAf_660re8WdH5TBT8E,1795 +six-1.15.0.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110 +six-1.15.0.dist-info/top_level.txt,sha256=_iVH_iYEtEXnD8nYGQYpYFUvkUW9sEO1GYbkeKSAais,4 +six-1.15.0.dist-info/RECORD,, diff --git a/third_party/python/virtualenv/__virtualenv__/six-1.15.0-py2.py3-none-any/six-1.15.0.dist-info/WHEEL b/third_party/python/virtualenv/__virtualenv__/six-1.15.0-py2.py3-none-any/six-1.15.0.dist-info/WHEEL new file mode 100644 index 000000000000..ef99c6cf3283 --- /dev/null +++ b/third_party/python/virtualenv/__virtualenv__/six-1.15.0-py2.py3-none-any/six-1.15.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.34.2) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/third_party/python/virtualenv/__virtualenv__/six-1.16.0-py2.py3-none-any/six-1.16.0.dist-info/top_level.txt b/third_party/python/virtualenv/__virtualenv__/six-1.15.0-py2.py3-none-any/six-1.15.0.dist-info/top_level.txt similarity index 100% rename from third_party/python/virtualenv/__virtualenv__/six-1.16.0-py2.py3-none-any/six-1.16.0.dist-info/top_level.txt rename to third_party/python/virtualenv/__virtualenv__/six-1.15.0-py2.py3-none-any/six-1.15.0.dist-info/top_level.txt diff --git a/third_party/python/virtualenv/__virtualenv__/six-1.16.0-py2.py3-none-any/six.py b/third_party/python/virtualenv/__virtualenv__/six-1.15.0-py2.py3-none-any/six.py similarity index 98% rename from third_party/python/virtualenv/__virtualenv__/six-1.16.0-py2.py3-none-any/six.py rename to third_party/python/virtualenv/__virtualenv__/six-1.15.0-py2.py3-none-any/six.py index 4e15675d8b5c..83f69783d1a2 100644 --- a/third_party/python/virtualenv/__virtualenv__/six-1.16.0-py2.py3-none-any/six.py +++ b/third_party/python/virtualenv/__virtualenv__/six-1.15.0-py2.py3-none-any/six.py @@ -29,7 +29,7 @@ import sys import types __author__ = "Benjamin Peterson " -__version__ = "1.16.0" +__version__ = "1.15.0" # Useful for very coarse version differentiation. @@ -71,11 +71,6 @@ else: MAXSIZE = int((1 << 63) - 1) del X -if PY34: - from importlib.util import spec_from_loader -else: - spec_from_loader = None - def _add_doc(func, doc): """Add documentation to a function.""" @@ -191,11 +186,6 @@ class _SixMetaPathImporter(object): return self return None - def find_spec(self, fullname, path, target=None): - if fullname in self.known_modules: - return spec_from_loader(fullname, self) - return None - def __get_module(self, fullname): try: return self.known_modules[fullname] @@ -233,12 +223,6 @@ class _SixMetaPathImporter(object): return None get_source = get_code # same as get_code - def create_module(self, spec): - return self.load_module(spec.name) - - def exec_module(self, module): - pass - _importer = _SixMetaPathImporter(__name__) diff --git a/third_party/python/virtualenv/__virtualenv__/six-1.16.0-py2.py3-none-any/six-1.16.0.dist-info/RECORD b/third_party/python/virtualenv/__virtualenv__/six-1.16.0-py2.py3-none-any/six-1.16.0.dist-info/RECORD deleted file mode 100644 index 8de4af79fae0..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/six-1.16.0-py2.py3-none-any/six-1.16.0.dist-info/RECORD +++ /dev/null @@ -1,6 +0,0 @@ -six.py,sha256=TOOfQi7nFGfMrIvtdr6wX4wyHH8M7aknmuLfo2cBBrM,34549 -six-1.16.0.dist-info/LICENSE,sha256=i7hQxWWqOJ_cFvOkaWWtI9gq3_YPI5P8J2K2MYXo5sk,1066 -six-1.16.0.dist-info/METADATA,sha256=VQcGIFCAEmfZcl77E5riPCN4v2TIsc_qtacnjxKHJoI,1795 -six-1.16.0.dist-info/WHEEL,sha256=Z-nyYpwrcSqxfdux5Mbn_DQ525iP7J2DG3JgGvOYyTQ,110 -six-1.16.0.dist-info/top_level.txt,sha256=_iVH_iYEtEXnD8nYGQYpYFUvkUW9sEO1GYbkeKSAais,4 -six-1.16.0.dist-info/RECORD,, diff --git a/third_party/python/virtualenv/__virtualenv__/six-1.16.0-py2.py3-none-any/six-1.16.0.dist-info/WHEEL b/third_party/python/virtualenv/__virtualenv__/six-1.16.0-py2.py3-none-any/six-1.16.0.dist-info/WHEEL deleted file mode 100644 index 01b8fc7d4a10..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/six-1.16.0-py2.py3-none-any/six-1.16.0.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.36.2) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/third_party/python/virtualenv/__virtualenv__/typing-3.10.0.0-py2-none-any/typing-3.10.0.0.dist-info/RECORD b/third_party/python/virtualenv/__virtualenv__/typing-3.10.0.0-py2-none-any/typing-3.10.0.0.dist-info/RECORD deleted file mode 100644 index 310ecd274cf9..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/typing-3.10.0.0-py2-none-any/typing-3.10.0.0.dist-info/RECORD +++ /dev/null @@ -1,6 +0,0 @@ -typing.py,sha256=yP2fxy8eprK-cHMe9bAcvU7QL7n_YGtoTFOG3bsWVJQ,84492 -typing-3.10.0.0.dist-info/LICENSE,sha256=_xfOlOECAk3raHc-scx0ynbaTmWPNzUx8Kwi1oprsa0,12755 -typing-3.10.0.0.dist-info/METADATA,sha256=ZMrqFtMBF5truoFjyvYNNd44n84-wErUFcPTKMNrln8,2265 -typing-3.10.0.0.dist-info/WHEEL,sha256=bK8TJl-oUKFDa18qkB68zwTZhIBCifqi4qjS_NS4aFQ,92 -typing-3.10.0.0.dist-info/top_level.txt,sha256=oG8QCMTRcfcgGpEVbdwBU2DM8MthjmZSDaaQ6WWHx4o,7 -typing-3.10.0.0.dist-info/RECORD,, diff --git a/third_party/python/virtualenv/__virtualenv__/typing-3.10.0.0-py2-none-any/typing-3.10.0.0.dist-info/LICENSE b/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.1-py3-none-any/typing-3.7.4.1.dist-info/LICENSE similarity index 100% rename from third_party/python/virtualenv/__virtualenv__/typing-3.10.0.0-py2-none-any/typing-3.10.0.0.dist-info/LICENSE rename to third_party/python/virtualenv/__virtualenv__/typing-3.7.4.1-py3-none-any/typing-3.7.4.1.dist-info/LICENSE diff --git a/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.1-py3-none-any/typing-3.7.4.1.dist-info/METADATA b/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.1-py3-none-any/typing-3.7.4.1.dist-info/METADATA new file mode 100644 index 000000000000..d98aa6599646 --- /dev/null +++ b/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.1-py3-none-any/typing-3.7.4.1.dist-info/METADATA @@ -0,0 +1,41 @@ +Metadata-Version: 2.1 +Name: typing +Version: 3.7.4.1 +Summary: Type Hints for Python +Home-page: https://docs.python.org/3/library/typing.html +Author: Guido van Rossum, Jukka Lehtosalo, Łukasz Langa, Ivan Levkivskyi +Author-email: jukka.lehtosalo@iki.fi +License: PSF +Keywords: typing function annotations type hints hinting checking checker typehints typehinting typechecking backport +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Console +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Python Software Foundation License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Topic :: Software Development + +Typing -- Type Hints for Python + +This is a backport of the standard library typing module to Python +versions older than 3.5. (See note below for newer versions.) + +Typing defines a standard notation for Python function and variable +type annotations. The notation can be used for documenting code in a +concise, standard format, and it has been designed to also be used by +static and runtime type checkers, static analyzers, IDEs and other +tools. + +NOTE: in Python 3.5 and later, the typing module lives in the stdlib, +and installing this package has NO EFFECT. To get a newer version of +the typing module in Python 3.5 or later, you have to upgrade to a +newer Python (bugfix) version. For example, typing in Python 3.6.0 is +missing the definition of 'Type' -- upgrading to 3.6.2 will fix this. + +Also note that most improvements to the typing module in Python 3.7 +will not be included in this package, since Python 3.7 has some +built-in support that is not present in older versions (See PEP 560.) + + diff --git a/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.1-py3-none-any/typing-3.7.4.1.dist-info/RECORD b/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.1-py3-none-any/typing-3.7.4.1.dist-info/RECORD new file mode 100644 index 000000000000..921edbf40f7d --- /dev/null +++ b/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.1-py3-none-any/typing-3.7.4.1.dist-info/RECORD @@ -0,0 +1,6 @@ +typing.py,sha256=JfGga08eJ_AJ-n_EX5EHtDjNNI5h79rYSXucibO6yNg,80432 +typing-3.7.4.1.dist-info/LICENSE,sha256=_xfOlOECAk3raHc-scx0ynbaTmWPNzUx8Kwi1oprsa0,12755 +typing-3.7.4.1.dist-info/METADATA,sha256=bDK323dZ06sy5ADWZkwBpgq6jS9nwECYjA2oysfGjeg,1798 +typing-3.7.4.1.dist-info/WHEEL,sha256=p46_5Uhzqz6AzeSosiOnxK-zmFja1i22CrQCjmYe8ec,92 +typing-3.7.4.1.dist-info/top_level.txt,sha256=oG8QCMTRcfcgGpEVbdwBU2DM8MthjmZSDaaQ6WWHx4o,7 +typing-3.7.4.1.dist-info/RECORD,, diff --git a/third_party/python/taskcluster_urls/taskcluster_urls-11.0.0.dist-info/WHEEL b/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.1-py3-none-any/typing-3.7.4.1.dist-info/WHEEL similarity index 65% rename from third_party/python/taskcluster_urls/taskcluster_urls-11.0.0.dist-info/WHEEL rename to third_party/python/virtualenv/__virtualenv__/typing-3.7.4.1-py3-none-any/typing-3.7.4.1.dist-info/WHEEL index bff023edf224..3b5c4038dd7b 100644 --- a/third_party/python/taskcluster_urls/taskcluster_urls-11.0.0.dist-info/WHEEL +++ b/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.1-py3-none-any/typing-3.7.4.1.dist-info/WHEEL @@ -1,5 +1,5 @@ Wheel-Version: 1.0 -Generator: bdist_wheel (0.31.1) +Generator: bdist_wheel (0.33.6) Root-Is-Purelib: true Tag: py3-none-any diff --git a/third_party/python/virtualenv/__virtualenv__/typing-3.10.0.0-py2-none-any/typing-3.10.0.0.dist-info/top_level.txt b/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.1-py3-none-any/typing-3.7.4.1.dist-info/top_level.txt similarity index 100% rename from third_party/python/virtualenv/__virtualenv__/typing-3.10.0.0-py2-none-any/typing-3.10.0.0.dist-info/top_level.txt rename to third_party/python/virtualenv/__virtualenv__/typing-3.7.4.1-py3-none-any/typing-3.7.4.1.dist-info/top_level.txt diff --git a/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.1-py3-none-any/typing.py b/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.1-py3-none-any/typing.py new file mode 100644 index 000000000000..62a677eee3b1 --- /dev/null +++ b/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.1-py3-none-any/typing.py @@ -0,0 +1,2422 @@ +import abc +from abc import abstractmethod, abstractproperty +import collections +import contextlib +import functools +import re as stdlib_re # Avoid confusion with the re we export. +import sys +import types +try: + import collections.abc as collections_abc +except ImportError: + import collections as collections_abc # Fallback for PY3.2. +if sys.version_info[:2] >= (3, 6): + import _collections_abc # Needed for private function _check_methods # noqa +try: + from types import WrapperDescriptorType, MethodWrapperType, MethodDescriptorType +except ImportError: + WrapperDescriptorType = type(object.__init__) + MethodWrapperType = type(object().__str__) + MethodDescriptorType = type(str.join) + + +# Please keep __all__ alphabetized within each category. +__all__ = [ + # Super-special typing primitives. + 'Any', + 'Callable', + 'ClassVar', + 'Generic', + 'Optional', + 'Tuple', + 'Type', + 'TypeVar', + 'Union', + + # ABCs (from collections.abc). + 'AbstractSet', # collections.abc.Set. + 'GenericMeta', # subclass of abc.ABCMeta and a metaclass + # for 'Generic' and ABCs below. + 'ByteString', + 'Container', + 'ContextManager', + 'Hashable', + 'ItemsView', + 'Iterable', + 'Iterator', + 'KeysView', + 'Mapping', + 'MappingView', + 'MutableMapping', + 'MutableSequence', + 'MutableSet', + 'Sequence', + 'Sized', + 'ValuesView', + # The following are added depending on presence + # of their non-generic counterparts in stdlib: + # Awaitable, + # AsyncIterator, + # AsyncIterable, + # Coroutine, + # Collection, + # AsyncGenerator, + # AsyncContextManager + + # Structural checks, a.k.a. protocols. + 'Reversible', + 'SupportsAbs', + 'SupportsBytes', + 'SupportsComplex', + 'SupportsFloat', + 'SupportsIndex', + 'SupportsInt', + 'SupportsRound', + + # Concrete collection types. + 'Counter', + 'Deque', + 'Dict', + 'DefaultDict', + 'List', + 'Set', + 'FrozenSet', + 'NamedTuple', # Not really a type. + 'Generator', + + # One-off things. + 'AnyStr', + 'cast', + 'get_type_hints', + 'NewType', + 'no_type_check', + 'no_type_check_decorator', + 'NoReturn', + 'overload', + 'Text', + 'TYPE_CHECKING', +] + +# The pseudo-submodules 're' and 'io' are part of the public +# namespace, but excluded from __all__ because they might stomp on +# legitimate imports of those modules. + + +def _qualname(x): + if sys.version_info[:2] >= (3, 3): + return x.__qualname__ + else: + # Fall back to just name. + return x.__name__ + + +def _trim_name(nm): + whitelist = ('_TypeAlias', '_ForwardRef', '_TypingBase', '_FinalTypingBase') + if nm.startswith('_') and nm not in whitelist: + nm = nm[1:] + return nm + + +class TypingMeta(type): + """Metaclass for most types defined in typing module + (not a part of public API). + + This overrides __new__() to require an extra keyword parameter + '_root', which serves as a guard against naive subclassing of the + typing classes. Any legitimate class defined using a metaclass + derived from TypingMeta must pass _root=True. + + This also defines a dummy constructor (all the work for most typing + constructs is done in __new__) and a nicer repr(). + """ + + _is_protocol = False + + def __new__(cls, name, bases, namespace, *, _root=False): + if not _root: + raise TypeError("Cannot subclass %s" % + (', '.join(map(_type_repr, bases)) or '()')) + return super().__new__(cls, name, bases, namespace) + + def __init__(self, *args, **kwds): + pass + + def _eval_type(self, globalns, localns): + """Override this in subclasses to interpret forward references. + + For example, List['C'] is internally stored as + List[_ForwardRef('C')], which should evaluate to List[C], + where C is an object found in globalns or localns (searching + localns first, of course). + """ + return self + + def _get_type_vars(self, tvars): + pass + + def __repr__(self): + qname = _trim_name(_qualname(self)) + return '%s.%s' % (self.__module__, qname) + + +class _TypingBase(metaclass=TypingMeta, _root=True): + """Internal indicator of special typing constructs.""" + + __slots__ = ('__weakref__',) + + def __init__(self, *args, **kwds): + pass + + def __new__(cls, *args, **kwds): + """Constructor. + + This only exists to give a better error message in case + someone tries to subclass a special typing object (not a good idea). + """ + if (len(args) == 3 and + isinstance(args[0], str) and + isinstance(args[1], tuple)): + # Close enough. + raise TypeError("Cannot subclass %r" % cls) + return super().__new__(cls) + + # Things that are not classes also need these. + def _eval_type(self, globalns, localns): + return self + + def _get_type_vars(self, tvars): + pass + + def __repr__(self): + cls = type(self) + qname = _trim_name(_qualname(cls)) + return '%s.%s' % (cls.__module__, qname) + + def __call__(self, *args, **kwds): + raise TypeError("Cannot instantiate %r" % type(self)) + + +class _FinalTypingBase(_TypingBase, _root=True): + """Internal mix-in class to prevent instantiation. + + Prevents instantiation unless _root=True is given in class call. + It is used to create pseudo-singleton instances Any, Union, Optional, etc. + """ + + __slots__ = () + + def __new__(cls, *args, _root=False, **kwds): + self = super().__new__(cls, *args, **kwds) + if _root is True: + return self + raise TypeError("Cannot instantiate %r" % cls) + + def __reduce__(self): + return _trim_name(type(self).__name__) + + +class _ForwardRef(_TypingBase, _root=True): + """Internal wrapper to hold a forward reference.""" + + __slots__ = ('__forward_arg__', '__forward_code__', + '__forward_evaluated__', '__forward_value__') + + def __init__(self, arg): + super().__init__(arg) + if not isinstance(arg, str): + raise TypeError('Forward reference must be a string -- got %r' % (arg,)) + try: + code = compile(arg, '', 'eval') + except SyntaxError: + raise SyntaxError('Forward reference must be an expression -- got %r' % + (arg,)) + self.__forward_arg__ = arg + self.__forward_code__ = code + self.__forward_evaluated__ = False + self.__forward_value__ = None + + def _eval_type(self, globalns, localns): + if not self.__forward_evaluated__ or localns is not globalns: + if globalns is None and localns is None: + globalns = localns = {} + elif globalns is None: + globalns = localns + elif localns is None: + localns = globalns + self.__forward_value__ = _type_check( + eval(self.__forward_code__, globalns, localns), + "Forward references must evaluate to types.") + self.__forward_evaluated__ = True + return self.__forward_value__ + + def __eq__(self, other): + if not isinstance(other, _ForwardRef): + return NotImplemented + return (self.__forward_arg__ == other.__forward_arg__ and + self.__forward_value__ == other.__forward_value__) + + def __hash__(self): + return hash((self.__forward_arg__, self.__forward_value__)) + + def __instancecheck__(self, obj): + raise TypeError("Forward references cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + raise TypeError("Forward references cannot be used with issubclass().") + + def __repr__(self): + return '_ForwardRef(%r)' % (self.__forward_arg__,) + + +class _TypeAlias(_TypingBase, _root=True): + """Internal helper class for defining generic variants of concrete types. + + Note that this is not a type; let's call it a pseudo-type. It cannot + be used in instance and subclass checks in parameterized form, i.e. + ``isinstance(42, Match[str])`` raises ``TypeError`` instead of returning + ``False``. + """ + + __slots__ = ('name', 'type_var', 'impl_type', 'type_checker') + + def __init__(self, name, type_var, impl_type, type_checker): + """Initializer. + + Args: + name: The name, e.g. 'Pattern'. + type_var: The type parameter, e.g. AnyStr, or the + specific type, e.g. str. + impl_type: The implementation type. + type_checker: Function that takes an impl_type instance. + and returns a value that should be a type_var instance. + """ + assert isinstance(name, str), repr(name) + assert isinstance(impl_type, type), repr(impl_type) + assert not isinstance(impl_type, TypingMeta), repr(impl_type) + assert isinstance(type_var, (type, _TypingBase)), repr(type_var) + self.name = name + self.type_var = type_var + self.impl_type = impl_type + self.type_checker = type_checker + + def __repr__(self): + return "%s[%s]" % (self.name, _type_repr(self.type_var)) + + def __getitem__(self, parameter): + if not isinstance(self.type_var, TypeVar): + raise TypeError("%s cannot be further parameterized." % self) + if self.type_var.__constraints__ and isinstance(parameter, type): + if not issubclass(parameter, self.type_var.__constraints__): + raise TypeError("%s is not a valid substitution for %s." % + (parameter, self.type_var)) + if isinstance(parameter, TypeVar) and parameter is not self.type_var: + raise TypeError("%s cannot be re-parameterized." % self) + return self.__class__(self.name, parameter, + self.impl_type, self.type_checker) + + def __eq__(self, other): + if not isinstance(other, _TypeAlias): + return NotImplemented + return self.name == other.name and self.type_var == other.type_var + + def __hash__(self): + return hash((self.name, self.type_var)) + + def __instancecheck__(self, obj): + if not isinstance(self.type_var, TypeVar): + raise TypeError("Parameterized type aliases cannot be used " + "with isinstance().") + return isinstance(obj, self.impl_type) + + def __subclasscheck__(self, cls): + if not isinstance(self.type_var, TypeVar): + raise TypeError("Parameterized type aliases cannot be used " + "with issubclass().") + return issubclass(cls, self.impl_type) + + +def _get_type_vars(types, tvars): + for t in types: + if isinstance(t, TypingMeta) or isinstance(t, _TypingBase): + t._get_type_vars(tvars) + + +def _type_vars(types): + tvars = [] + _get_type_vars(types, tvars) + return tuple(tvars) + + +def _eval_type(t, globalns, localns): + if isinstance(t, TypingMeta) or isinstance(t, _TypingBase): + return t._eval_type(globalns, localns) + return t + + +def _type_check(arg, msg): + """Check that the argument is a type, and return it (internal helper). + + As a special case, accept None and return type(None) instead. + Also, _TypeAlias instances (e.g. Match, Pattern) are acceptable. + + The msg argument is a human-readable error message, e.g. + + "Union[arg, ...]: arg should be a type." + + We append the repr() of the actual value (truncated to 100 chars). + """ + if arg is None: + return type(None) + if isinstance(arg, str): + arg = _ForwardRef(arg) + if ( + isinstance(arg, _TypingBase) and type(arg).__name__ == '_ClassVar' or + not isinstance(arg, (type, _TypingBase)) and not callable(arg) + ): + raise TypeError(msg + " Got %.100r." % (arg,)) + # Bare Union etc. are not valid as type arguments + if ( + type(arg).__name__ in ('_Union', '_Optional') and + not getattr(arg, '__origin__', None) or + isinstance(arg, TypingMeta) and arg._gorg in (Generic, _Protocol) + ): + raise TypeError("Plain %s is not valid as type argument" % arg) + return arg + + +def _type_repr(obj): + """Return the repr() of an object, special-casing types (internal helper). + + If obj is a type, we return a shorter version than the default + type.__repr__, based on the module and qualified name, which is + typically enough to uniquely identify a type. For everything + else, we fall back on repr(obj). + """ + if isinstance(obj, type) and not isinstance(obj, TypingMeta): + if obj.__module__ == 'builtins': + return _qualname(obj) + return '%s.%s' % (obj.__module__, _qualname(obj)) + if obj is ...: + return('...') + if isinstance(obj, types.FunctionType): + return obj.__name__ + return repr(obj) + + +class _Any(_FinalTypingBase, _root=True): + """Special type indicating an unconstrained type. + + - Any is compatible with every type. + - Any assumed to have all methods. + - All values assumed to be instances of Any. + + Note that all the above statements are true from the point of view of + static type checkers. At runtime, Any should not be used with instance + or class checks. + """ + + __slots__ = () + + def __instancecheck__(self, obj): + raise TypeError("Any cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + raise TypeError("Any cannot be used with issubclass().") + + +Any = _Any(_root=True) + + +class _NoReturn(_FinalTypingBase, _root=True): + """Special type indicating functions that never return. + Example:: + + from typing import NoReturn + + def stop() -> NoReturn: + raise Exception('no way') + + This type is invalid in other positions, e.g., ``List[NoReturn]`` + will fail in static type checkers. + """ + + __slots__ = () + + def __instancecheck__(self, obj): + raise TypeError("NoReturn cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + raise TypeError("NoReturn cannot be used with issubclass().") + + +NoReturn = _NoReturn(_root=True) + + +class TypeVar(_TypingBase, _root=True): + """Type variable. + + Usage:: + + T = TypeVar('T') # Can be anything + A = TypeVar('A', str, bytes) # Must be str or bytes + + Type variables exist primarily for the benefit of static type + checkers. They serve as the parameters for generic types as well + as for generic function definitions. See class Generic for more + information on generic types. Generic functions work as follows: + + def repeat(x: T, n: int) -> List[T]: + '''Return a list containing n references to x.''' + return [x]*n + + def longest(x: A, y: A) -> A: + '''Return the longest of two strings.''' + return x if len(x) >= len(y) else y + + The latter example's signature is essentially the overloading + of (str, str) -> str and (bytes, bytes) -> bytes. Also note + that if the arguments are instances of some subclass of str, + the return type is still plain str. + + At runtime, isinstance(x, T) and issubclass(C, T) will raise TypeError. + + Type variables defined with covariant=True or contravariant=True + can be used do declare covariant or contravariant generic types. + See PEP 484 for more details. By default generic types are invariant + in all type variables. + + Type variables can be introspected. e.g.: + + T.__name__ == 'T' + T.__constraints__ == () + T.__covariant__ == False + T.__contravariant__ = False + A.__constraints__ == (str, bytes) + """ + + __slots__ = ('__name__', '__bound__', '__constraints__', + '__covariant__', '__contravariant__') + + def __init__(self, name, *constraints, bound=None, + covariant=False, contravariant=False): + super().__init__(name, *constraints, bound=bound, + covariant=covariant, contravariant=contravariant) + self.__name__ = name + if covariant and contravariant: + raise ValueError("Bivariant types are not supported.") + self.__covariant__ = bool(covariant) + self.__contravariant__ = bool(contravariant) + if constraints and bound is not None: + raise TypeError("Constraints cannot be combined with bound=...") + if constraints and len(constraints) == 1: + raise TypeError("A single constraint is not allowed") + msg = "TypeVar(name, constraint, ...): constraints must be types." + self.__constraints__ = tuple(_type_check(t, msg) for t in constraints) + if bound: + self.__bound__ = _type_check(bound, "Bound must be a type.") + else: + self.__bound__ = None + + def _get_type_vars(self, tvars): + if self not in tvars: + tvars.append(self) + + def __repr__(self): + if self.__covariant__: + prefix = '+' + elif self.__contravariant__: + prefix = '-' + else: + prefix = '~' + return prefix + self.__name__ + + def __instancecheck__(self, instance): + raise TypeError("Type variables cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + raise TypeError("Type variables cannot be used with issubclass().") + + +# Some unconstrained type variables. These are used by the container types. +# (These are not for export.) +T = TypeVar('T') # Any type. +KT = TypeVar('KT') # Key type. +VT = TypeVar('VT') # Value type. +T_co = TypeVar('T_co', covariant=True) # Any type covariant containers. +V_co = TypeVar('V_co', covariant=True) # Any type covariant containers. +VT_co = TypeVar('VT_co', covariant=True) # Value type covariant containers. +T_contra = TypeVar('T_contra', contravariant=True) # Ditto contravariant. + +# A useful type variable with constraints. This represents string types. +# (This one *is* for export!) +AnyStr = TypeVar('AnyStr', bytes, str) + + +def _replace_arg(arg, tvars, args): + """An internal helper function: replace arg if it is a type variable + found in tvars with corresponding substitution from args or + with corresponding substitution sub-tree if arg is a generic type. + """ + + if tvars is None: + tvars = [] + if hasattr(arg, '_subs_tree') and isinstance(arg, (GenericMeta, _TypingBase)): + return arg._subs_tree(tvars, args) + if isinstance(arg, TypeVar): + for i, tvar in enumerate(tvars): + if arg == tvar: + return args[i] + return arg + + +# Special typing constructs Union, Optional, Generic, Callable and Tuple +# use three special attributes for internal bookkeeping of generic types: +# * __parameters__ is a tuple of unique free type parameters of a generic +# type, for example, Dict[T, T].__parameters__ == (T,); +# * __origin__ keeps a reference to a type that was subscripted, +# e.g., Union[T, int].__origin__ == Union; +# * __args__ is a tuple of all arguments used in subscripting, +# e.g., Dict[T, int].__args__ == (T, int). + + +def _subs_tree(cls, tvars=None, args=None): + """An internal helper function: calculate substitution tree + for generic cls after replacing its type parameters with + substitutions in tvars -> args (if any). + Repeat the same following __origin__'s. + + Return a list of arguments with all possible substitutions + performed. Arguments that are generic classes themselves are represented + as tuples (so that no new classes are created by this function). + For example: _subs_tree(List[Tuple[int, T]][str]) == [(Tuple, int, str)] + """ + + if cls.__origin__ is None: + return cls + # Make of chain of origins (i.e. cls -> cls.__origin__) + current = cls.__origin__ + orig_chain = [] + while current.__origin__ is not None: + orig_chain.append(current) + current = current.__origin__ + # Replace type variables in __args__ if asked ... + tree_args = [] + for arg in cls.__args__: + tree_args.append(_replace_arg(arg, tvars, args)) + # ... then continue replacing down the origin chain. + for ocls in orig_chain: + new_tree_args = [] + for arg in ocls.__args__: + new_tree_args.append(_replace_arg(arg, ocls.__parameters__, tree_args)) + tree_args = new_tree_args + return tree_args + + +def _remove_dups_flatten(parameters): + """An internal helper for Union creation and substitution: flatten Union's + among parameters, then remove duplicates and strict subclasses. + """ + + # Flatten out Union[Union[...], ...]. + params = [] + for p in parameters: + if isinstance(p, _Union) and p.__origin__ is Union: + params.extend(p.__args__) + elif isinstance(p, tuple) and len(p) > 0 and p[0] is Union: + params.extend(p[1:]) + else: + params.append(p) + # Weed out strict duplicates, preserving the first of each occurrence. + all_params = set(params) + if len(all_params) < len(params): + new_params = [] + for t in params: + if t in all_params: + new_params.append(t) + all_params.remove(t) + params = new_params + assert not all_params, all_params + # Weed out subclasses. + # E.g. Union[int, Employee, Manager] == Union[int, Employee]. + # If object is present it will be sole survivor among proper classes. + # Never discard type variables. + # (In particular, Union[str, AnyStr] != AnyStr.) + all_params = set(params) + for t1 in params: + if not isinstance(t1, type): + continue + if any(isinstance(t2, type) and issubclass(t1, t2) + for t2 in all_params - {t1} + if not (isinstance(t2, GenericMeta) and + t2.__origin__ is not None)): + all_params.remove(t1) + return tuple(t for t in params if t in all_params) + + +def _check_generic(cls, parameters): + # Check correct count for parameters of a generic cls (internal helper). + if not cls.__parameters__: + raise TypeError("%s is not a generic class" % repr(cls)) + alen = len(parameters) + elen = len(cls.__parameters__) + if alen != elen: + raise TypeError("Too %s parameters for %s; actual %s, expected %s" % + ("many" if alen > elen else "few", repr(cls), alen, elen)) + + +_cleanups = [] + + +def _tp_cache(func): + """Internal wrapper caching __getitem__ of generic types with a fallback to + original function for non-hashable arguments. + """ + + cached = functools.lru_cache()(func) + _cleanups.append(cached.cache_clear) + + @functools.wraps(func) + def inner(*args, **kwds): + try: + return cached(*args, **kwds) + except TypeError: + pass # All real errors (not unhashable args) are raised below. + return func(*args, **kwds) + return inner + + +class _Union(_FinalTypingBase, _root=True): + """Union type; Union[X, Y] means either X or Y. + + To define a union, use e.g. Union[int, str]. Details: + + - The arguments must be types and there must be at least one. + + - None as an argument is a special case and is replaced by + type(None). + + - Unions of unions are flattened, e.g.:: + + Union[Union[int, str], float] == Union[int, str, float] + + - Unions of a single argument vanish, e.g.:: + + Union[int] == int # The constructor actually returns int + + - Redundant arguments are skipped, e.g.:: + + Union[int, str, int] == Union[int, str] + + - When comparing unions, the argument order is ignored, e.g.:: + + Union[int, str] == Union[str, int] + + - When two arguments have a subclass relationship, the least + derived argument is kept, e.g.:: + + class Employee: pass + class Manager(Employee): pass + Union[int, Employee, Manager] == Union[int, Employee] + Union[Manager, int, Employee] == Union[int, Employee] + Union[Employee, Manager] == Employee + + - Similar for object:: + + Union[int, object] == object + + - You cannot subclass or instantiate a union. + + - You can use Optional[X] as a shorthand for Union[X, None]. + """ + + __slots__ = ('__parameters__', '__args__', '__origin__', '__tree_hash__') + + def __new__(cls, parameters=None, origin=None, *args, _root=False): + self = super().__new__(cls, parameters, origin, *args, _root=_root) + if origin is None: + self.__parameters__ = None + self.__args__ = None + self.__origin__ = None + self.__tree_hash__ = hash(frozenset(('Union',))) + return self + if not isinstance(parameters, tuple): + raise TypeError("Expected parameters=") + if origin is Union: + parameters = _remove_dups_flatten(parameters) + # It's not a union if there's only one type left. + if len(parameters) == 1: + return parameters[0] + self.__parameters__ = _type_vars(parameters) + self.__args__ = parameters + self.__origin__ = origin + # Pre-calculate the __hash__ on instantiation. + # This improves speed for complex substitutions. + subs_tree = self._subs_tree() + if isinstance(subs_tree, tuple): + self.__tree_hash__ = hash(frozenset(subs_tree)) + else: + self.__tree_hash__ = hash(subs_tree) + return self + + def _eval_type(self, globalns, localns): + if self.__args__ is None: + return self + ev_args = tuple(_eval_type(t, globalns, localns) for t in self.__args__) + ev_origin = _eval_type(self.__origin__, globalns, localns) + if ev_args == self.__args__ and ev_origin == self.__origin__: + # Everything is already evaluated. + return self + return self.__class__(ev_args, ev_origin, _root=True) + + def _get_type_vars(self, tvars): + if self.__origin__ and self.__parameters__: + _get_type_vars(self.__parameters__, tvars) + + def __repr__(self): + if self.__origin__ is None: + return super().__repr__() + tree = self._subs_tree() + if not isinstance(tree, tuple): + return repr(tree) + return tree[0]._tree_repr(tree) + + def _tree_repr(self, tree): + arg_list = [] + for arg in tree[1:]: + if not isinstance(arg, tuple): + arg_list.append(_type_repr(arg)) + else: + arg_list.append(arg[0]._tree_repr(arg)) + return super().__repr__() + '[%s]' % ', '.join(arg_list) + + @_tp_cache + def __getitem__(self, parameters): + if parameters == (): + raise TypeError("Cannot take a Union of no types.") + if not isinstance(parameters, tuple): + parameters = (parameters,) + if self.__origin__ is None: + msg = "Union[arg, ...]: each arg must be a type." + else: + msg = "Parameters to generic types must be types." + parameters = tuple(_type_check(p, msg) for p in parameters) + if self is not Union: + _check_generic(self, parameters) + return self.__class__(parameters, origin=self, _root=True) + + def _subs_tree(self, tvars=None, args=None): + if self is Union: + return Union # Nothing to substitute + tree_args = _subs_tree(self, tvars, args) + tree_args = _remove_dups_flatten(tree_args) + if len(tree_args) == 1: + return tree_args[0] # Union of a single type is that type + return (Union,) + tree_args + + def __eq__(self, other): + if isinstance(other, _Union): + return self.__tree_hash__ == other.__tree_hash__ + elif self is not Union: + return self._subs_tree() == other + else: + return self is other + + def __hash__(self): + return self.__tree_hash__ + + def __instancecheck__(self, obj): + raise TypeError("Unions cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + raise TypeError("Unions cannot be used with issubclass().") + + +Union = _Union(_root=True) + + +class _Optional(_FinalTypingBase, _root=True): + """Optional type. + + Optional[X] is equivalent to Union[X, None]. + """ + + __slots__ = () + + @_tp_cache + def __getitem__(self, arg): + arg = _type_check(arg, "Optional[t] requires a single type.") + return Union[arg, type(None)] + + +Optional = _Optional(_root=True) + + +def _next_in_mro(cls): + """Helper for Generic.__new__. + + Returns the class after the last occurrence of Generic or + Generic[...] in cls.__mro__. + """ + next_in_mro = object + # Look for the last occurrence of Generic or Generic[...]. + for i, c in enumerate(cls.__mro__[:-1]): + if isinstance(c, GenericMeta) and c._gorg is Generic: + next_in_mro = cls.__mro__[i + 1] + return next_in_mro + + +def _make_subclasshook(cls): + """Construct a __subclasshook__ callable that incorporates + the associated __extra__ class in subclass checks performed + against cls. + """ + if isinstance(cls.__extra__, abc.ABCMeta): + # The logic mirrors that of ABCMeta.__subclasscheck__. + # Registered classes need not be checked here because + # cls and its extra share the same _abc_registry. + def __extrahook__(subclass): + res = cls.__extra__.__subclasshook__(subclass) + if res is not NotImplemented: + return res + if cls.__extra__ in subclass.__mro__: + return True + for scls in cls.__extra__.__subclasses__(): + if isinstance(scls, GenericMeta): + continue + if issubclass(subclass, scls): + return True + return NotImplemented + else: + # For non-ABC extras we'll just call issubclass(). + def __extrahook__(subclass): + if cls.__extra__ and issubclass(subclass, cls.__extra__): + return True + return NotImplemented + return __extrahook__ + + +def _no_slots_copy(dct): + """Internal helper: copy class __dict__ and clean slots class variables. + (They will be re-created if necessary by normal class machinery.) + """ + dict_copy = dict(dct) + if '__slots__' in dict_copy: + for slot in dict_copy['__slots__']: + dict_copy.pop(slot, None) + return dict_copy + + +class GenericMeta(TypingMeta, abc.ABCMeta): + """Metaclass for generic types. + + This is a metaclass for typing.Generic and generic ABCs defined in + typing module. User defined subclasses of GenericMeta can override + __new__ and invoke super().__new__. Note that GenericMeta.__new__ + has strict rules on what is allowed in its bases argument: + * plain Generic is disallowed in bases; + * Generic[...] should appear in bases at most once; + * if Generic[...] is present, then it should list all type variables + that appear in other bases. + In addition, type of all generic bases is erased, e.g., C[int] is + stripped to plain C. + """ + + def __new__(cls, name, bases, namespace, + tvars=None, args=None, origin=None, extra=None, orig_bases=None): + """Create a new generic class. GenericMeta.__new__ accepts + keyword arguments that are used for internal bookkeeping, therefore + an override should pass unused keyword arguments to super(). + """ + if tvars is not None: + # Called from __getitem__() below. + assert origin is not None + assert all(isinstance(t, TypeVar) for t in tvars), tvars + else: + # Called from class statement. + assert tvars is None, tvars + assert args is None, args + assert origin is None, origin + + # Get the full set of tvars from the bases. + tvars = _type_vars(bases) + # Look for Generic[T1, ..., Tn]. + # If found, tvars must be a subset of it. + # If not found, tvars is it. + # Also check for and reject plain Generic, + # and reject multiple Generic[...]. + gvars = None + for base in bases: + if base is Generic: + raise TypeError("Cannot inherit from plain Generic") + if (isinstance(base, GenericMeta) and + base.__origin__ is Generic): + if gvars is not None: + raise TypeError( + "Cannot inherit from Generic[...] multiple types.") + gvars = base.__parameters__ + if gvars is None: + gvars = tvars + else: + tvarset = set(tvars) + gvarset = set(gvars) + if not tvarset <= gvarset: + raise TypeError( + "Some type variables (%s) " + "are not listed in Generic[%s]" % + (", ".join(str(t) for t in tvars if t not in gvarset), + ", ".join(str(g) for g in gvars))) + tvars = gvars + + initial_bases = bases + if extra is not None and type(extra) is abc.ABCMeta and extra not in bases: + bases = (extra,) + bases + bases = tuple(b._gorg if isinstance(b, GenericMeta) else b for b in bases) + + # remove bare Generic from bases if there are other generic bases + if any(isinstance(b, GenericMeta) and b is not Generic for b in bases): + bases = tuple(b for b in bases if b is not Generic) + namespace.update({'__origin__': origin, '__extra__': extra, + '_gorg': None if not origin else origin._gorg}) + self = super().__new__(cls, name, bases, namespace, _root=True) + super(GenericMeta, self).__setattr__('_gorg', + self if not origin else origin._gorg) + self.__parameters__ = tvars + # Be prepared that GenericMeta will be subclassed by TupleMeta + # and CallableMeta, those two allow ..., (), or [] in __args___. + self.__args__ = tuple(... if a is _TypingEllipsis else + () if a is _TypingEmpty else + a for a in args) if args else None + # Speed hack (https://github.com/python/typing/issues/196). + self.__next_in_mro__ = _next_in_mro(self) + # Preserve base classes on subclassing (__bases__ are type erased now). + if orig_bases is None: + self.__orig_bases__ = initial_bases + + # This allows unparameterized generic collections to be used + # with issubclass() and isinstance() in the same way as their + # collections.abc counterparts (e.g., isinstance([], Iterable)). + if ( + '__subclasshook__' not in namespace and extra or + # allow overriding + getattr(self.__subclasshook__, '__name__', '') == '__extrahook__' + ): + self.__subclasshook__ = _make_subclasshook(self) + if isinstance(extra, abc.ABCMeta): + self._abc_registry = extra._abc_registry + self._abc_cache = extra._abc_cache + elif origin is not None: + self._abc_registry = origin._abc_registry + self._abc_cache = origin._abc_cache + + if origin and hasattr(origin, '__qualname__'): # Fix for Python 3.2. + self.__qualname__ = origin.__qualname__ + self.__tree_hash__ = (hash(self._subs_tree()) if origin else + super(GenericMeta, self).__hash__()) + return self + + # _abc_negative_cache and _abc_negative_cache_version + # realised as descriptors, since GenClass[t1, t2, ...] always + # share subclass info with GenClass. + # This is an important memory optimization. + @property + def _abc_negative_cache(self): + if isinstance(self.__extra__, abc.ABCMeta): + return self.__extra__._abc_negative_cache + return self._gorg._abc_generic_negative_cache + + @_abc_negative_cache.setter + def _abc_negative_cache(self, value): + if self.__origin__ is None: + if isinstance(self.__extra__, abc.ABCMeta): + self.__extra__._abc_negative_cache = value + else: + self._abc_generic_negative_cache = value + + @property + def _abc_negative_cache_version(self): + if isinstance(self.__extra__, abc.ABCMeta): + return self.__extra__._abc_negative_cache_version + return self._gorg._abc_generic_negative_cache_version + + @_abc_negative_cache_version.setter + def _abc_negative_cache_version(self, value): + if self.__origin__ is None: + if isinstance(self.__extra__, abc.ABCMeta): + self.__extra__._abc_negative_cache_version = value + else: + self._abc_generic_negative_cache_version = value + + def _get_type_vars(self, tvars): + if self.__origin__ and self.__parameters__: + _get_type_vars(self.__parameters__, tvars) + + def _eval_type(self, globalns, localns): + ev_origin = (self.__origin__._eval_type(globalns, localns) + if self.__origin__ else None) + ev_args = tuple(_eval_type(a, globalns, localns) for a + in self.__args__) if self.__args__ else None + if ev_origin == self.__origin__ and ev_args == self.__args__: + return self + return self.__class__(self.__name__, + self.__bases__, + _no_slots_copy(self.__dict__), + tvars=_type_vars(ev_args) if ev_args else None, + args=ev_args, + origin=ev_origin, + extra=self.__extra__, + orig_bases=self.__orig_bases__) + + def __repr__(self): + if self.__origin__ is None: + return super().__repr__() + return self._tree_repr(self._subs_tree()) + + def _tree_repr(self, tree): + arg_list = [] + for arg in tree[1:]: + if arg == (): + arg_list.append('()') + elif not isinstance(arg, tuple): + arg_list.append(_type_repr(arg)) + else: + arg_list.append(arg[0]._tree_repr(arg)) + return super().__repr__() + '[%s]' % ', '.join(arg_list) + + def _subs_tree(self, tvars=None, args=None): + if self.__origin__ is None: + return self + tree_args = _subs_tree(self, tvars, args) + return (self._gorg,) + tuple(tree_args) + + def __eq__(self, other): + if not isinstance(other, GenericMeta): + return NotImplemented + if self.__origin__ is None or other.__origin__ is None: + return self is other + return self.__tree_hash__ == other.__tree_hash__ + + def __hash__(self): + return self.__tree_hash__ + + @_tp_cache + def __getitem__(self, params): + if not isinstance(params, tuple): + params = (params,) + if not params and self._gorg is not Tuple: + raise TypeError( + "Parameter list to %s[...] cannot be empty" % _qualname(self)) + msg = "Parameters to generic types must be types." + params = tuple(_type_check(p, msg) for p in params) + if self is Generic: + # Generic can only be subscripted with unique type variables. + if not all(isinstance(p, TypeVar) for p in params): + raise TypeError( + "Parameters to Generic[...] must all be type variables") + if len(set(params)) != len(params): + raise TypeError( + "Parameters to Generic[...] must all be unique") + tvars = params + args = params + elif self in (Tuple, Callable): + tvars = _type_vars(params) + args = params + elif self is _Protocol: + # _Protocol is internal, don't check anything. + tvars = params + args = params + elif self.__origin__ in (Generic, _Protocol): + # Can't subscript Generic[...] or _Protocol[...]. + raise TypeError("Cannot subscript already-subscripted %s" % + repr(self)) + else: + # Subscripting a regular Generic subclass. + _check_generic(self, params) + tvars = _type_vars(params) + args = params + + prepend = (self,) if self.__origin__ is None else () + return self.__class__(self.__name__, + prepend + self.__bases__, + _no_slots_copy(self.__dict__), + tvars=tvars, + args=args, + origin=self, + extra=self.__extra__, + orig_bases=self.__orig_bases__) + + def __subclasscheck__(self, cls): + if self.__origin__ is not None: + if sys._getframe(1).f_globals['__name__'] not in ['abc', 'functools']: + raise TypeError("Parameterized generics cannot be used with class " + "or instance checks") + return False + if self is Generic: + raise TypeError("Class %r cannot be used with class " + "or instance checks" % self) + return super().__subclasscheck__(cls) + + def __instancecheck__(self, instance): + # Since we extend ABC.__subclasscheck__ and + # ABC.__instancecheck__ inlines the cache checking done by the + # latter, we must extend __instancecheck__ too. For simplicity + # we just skip the cache check -- instance checks for generic + # classes are supposed to be rare anyways. + return issubclass(instance.__class__, self) + + def __setattr__(self, attr, value): + # We consider all the subscripted generics as proxies for original class + if ( + attr.startswith('__') and attr.endswith('__') or + attr.startswith('_abc_') or + self._gorg is None # The class is not fully created, see #typing/506 + ): + super(GenericMeta, self).__setattr__(attr, value) + else: + super(GenericMeta, self._gorg).__setattr__(attr, value) + + +# Prevent checks for Generic to crash when defining Generic. +Generic = None + + +def _generic_new(base_cls, cls, *args, **kwds): + # Assure type is erased on instantiation, + # but attempt to store it in __orig_class__ + if cls.__origin__ is None: + if (base_cls.__new__ is object.__new__ and + cls.__init__ is not object.__init__): + return base_cls.__new__(cls) + else: + return base_cls.__new__(cls, *args, **kwds) + else: + origin = cls._gorg + if (base_cls.__new__ is object.__new__ and + cls.__init__ is not object.__init__): + obj = base_cls.__new__(origin) + else: + obj = base_cls.__new__(origin, *args, **kwds) + try: + obj.__orig_class__ = cls + except AttributeError: + pass + obj.__init__(*args, **kwds) + return obj + + +class Generic(metaclass=GenericMeta): + """Abstract base class for generic types. + + A generic type is typically declared by inheriting from + this class parameterized with one or more type variables. + For example, a generic mapping type might be defined as:: + + class Mapping(Generic[KT, VT]): + def __getitem__(self, key: KT) -> VT: + ... + # Etc. + + This class can then be used as follows:: + + def lookup_name(mapping: Mapping[KT, VT], key: KT, default: VT) -> VT: + try: + return mapping[key] + except KeyError: + return default + """ + + __slots__ = () + + def __new__(cls, *args, **kwds): + if cls._gorg is Generic: + raise TypeError("Type Generic cannot be instantiated; " + "it can be used only as a base class") + return _generic_new(cls.__next_in_mro__, cls, *args, **kwds) + + +class _TypingEmpty: + """Internal placeholder for () or []. Used by TupleMeta and CallableMeta + to allow empty list/tuple in specific places, without allowing them + to sneak in where prohibited. + """ + + +class _TypingEllipsis: + """Internal placeholder for ... (ellipsis).""" + + +class TupleMeta(GenericMeta): + """Metaclass for Tuple (internal).""" + + @_tp_cache + def __getitem__(self, parameters): + if self.__origin__ is not None or self._gorg is not Tuple: + # Normal generic rules apply if this is not the first subscription + # or a subscription of a subclass. + return super().__getitem__(parameters) + if parameters == (): + return super().__getitem__((_TypingEmpty,)) + if not isinstance(parameters, tuple): + parameters = (parameters,) + if len(parameters) == 2 and parameters[1] is ...: + msg = "Tuple[t, ...]: t must be a type." + p = _type_check(parameters[0], msg) + return super().__getitem__((p, _TypingEllipsis)) + msg = "Tuple[t0, t1, ...]: each t must be a type." + parameters = tuple(_type_check(p, msg) for p in parameters) + return super().__getitem__(parameters) + + def __instancecheck__(self, obj): + if self.__args__ is None: + return isinstance(obj, tuple) + raise TypeError("Parameterized Tuple cannot be used " + "with isinstance().") + + def __subclasscheck__(self, cls): + if self.__args__ is None: + return issubclass(cls, tuple) + raise TypeError("Parameterized Tuple cannot be used " + "with issubclass().") + + +class Tuple(tuple, extra=tuple, metaclass=TupleMeta): + """Tuple type; Tuple[X, Y] is the cross-product type of X and Y. + + Example: Tuple[T1, T2] is a tuple of two elements corresponding + to type variables T1 and T2. Tuple[int, float, str] is a tuple + of an int, a float and a string. + + To specify a variable-length tuple of homogeneous type, use Tuple[T, ...]. + """ + + __slots__ = () + + def __new__(cls, *args, **kwds): + if cls._gorg is Tuple: + raise TypeError("Type Tuple cannot be instantiated; " + "use tuple() instead") + return _generic_new(tuple, cls, *args, **kwds) + + +class CallableMeta(GenericMeta): + """Metaclass for Callable (internal).""" + + def __repr__(self): + if self.__origin__ is None: + return super().__repr__() + return self._tree_repr(self._subs_tree()) + + def _tree_repr(self, tree): + if self._gorg is not Callable: + return super()._tree_repr(tree) + # For actual Callable (not its subclass) we override + # super()._tree_repr() for nice formatting. + arg_list = [] + for arg in tree[1:]: + if not isinstance(arg, tuple): + arg_list.append(_type_repr(arg)) + else: + arg_list.append(arg[0]._tree_repr(arg)) + if arg_list[0] == '...': + return repr(tree[0]) + '[..., %s]' % arg_list[1] + return (repr(tree[0]) + + '[[%s], %s]' % (', '.join(arg_list[:-1]), arg_list[-1])) + + def __getitem__(self, parameters): + """A thin wrapper around __getitem_inner__ to provide the latter + with hashable arguments to improve speed. + """ + + if self.__origin__ is not None or self._gorg is not Callable: + return super().__getitem__(parameters) + if not isinstance(parameters, tuple) or len(parameters) != 2: + raise TypeError("Callable must be used as " + "Callable[[arg, ...], result].") + args, result = parameters + if args is Ellipsis: + parameters = (Ellipsis, result) + else: + if not isinstance(args, list): + raise TypeError("Callable[args, result]: args must be a list." + " Got %.100r." % (args,)) + parameters = (tuple(args), result) + return self.__getitem_inner__(parameters) + + @_tp_cache + def __getitem_inner__(self, parameters): + args, result = parameters + msg = "Callable[args, result]: result must be a type." + result = _type_check(result, msg) + if args is Ellipsis: + return super().__getitem__((_TypingEllipsis, result)) + msg = "Callable[[arg, ...], result]: each arg must be a type." + args = tuple(_type_check(arg, msg) for arg in args) + parameters = args + (result,) + return super().__getitem__(parameters) + + +class Callable(extra=collections_abc.Callable, metaclass=CallableMeta): + """Callable type; Callable[[int], str] is a function of (int) -> str. + + The subscription syntax must always be used with exactly two + values: the argument list and the return type. The argument list + must be a list of types or ellipsis; the return type must be a single type. + + There is no syntax to indicate optional or keyword arguments, + such function types are rarely used as callback types. + """ + + __slots__ = () + + def __new__(cls, *args, **kwds): + if cls._gorg is Callable: + raise TypeError("Type Callable cannot be instantiated; " + "use a non-abstract subclass instead") + return _generic_new(cls.__next_in_mro__, cls, *args, **kwds) + + +class _ClassVar(_FinalTypingBase, _root=True): + """Special type construct to mark class variables. + + An annotation wrapped in ClassVar indicates that a given + attribute is intended to be used as a class variable and + should not be set on instances of that class. Usage:: + + class Starship: + stats: ClassVar[Dict[str, int]] = {} # class variable + damage: int = 10 # instance variable + + ClassVar accepts only types and cannot be further subscribed. + + Note that ClassVar is not a class itself, and should not + be used with isinstance() or issubclass(). + """ + + __slots__ = ('__type__',) + + def __init__(self, tp=None, **kwds): + self.__type__ = tp + + def __getitem__(self, item): + cls = type(self) + if self.__type__ is None: + return cls(_type_check(item, + '{} accepts only single type.'.format(cls.__name__[1:])), + _root=True) + raise TypeError('{} cannot be further subscripted' + .format(cls.__name__[1:])) + + def _eval_type(self, globalns, localns): + new_tp = _eval_type(self.__type__, globalns, localns) + if new_tp == self.__type__: + return self + return type(self)(new_tp, _root=True) + + def __repr__(self): + r = super().__repr__() + if self.__type__ is not None: + r += '[{}]'.format(_type_repr(self.__type__)) + return r + + def __hash__(self): + return hash((type(self).__name__, self.__type__)) + + def __eq__(self, other): + if not isinstance(other, _ClassVar): + return NotImplemented + if self.__type__ is not None: + return self.__type__ == other.__type__ + return self is other + + +ClassVar = _ClassVar(_root=True) + + +def cast(typ, val): + """Cast a value to a type. + + This returns the value unchanged. To the type checker this + signals that the return value has the designated type, but at + runtime we intentionally don't check anything (we want this + to be as fast as possible). + """ + return val + + +def _get_defaults(func): + """Internal helper to extract the default arguments, by name.""" + try: + code = func.__code__ + except AttributeError: + # Some built-in functions don't have __code__, __defaults__, etc. + return {} + pos_count = code.co_argcount + arg_names = code.co_varnames + arg_names = arg_names[:pos_count] + defaults = func.__defaults__ or () + kwdefaults = func.__kwdefaults__ + res = dict(kwdefaults) if kwdefaults else {} + pos_offset = pos_count - len(defaults) + for name, value in zip(arg_names[pos_offset:], defaults): + assert name not in res + res[name] = value + return res + + +_allowed_types = (types.FunctionType, types.BuiltinFunctionType, + types.MethodType, types.ModuleType, + WrapperDescriptorType, MethodWrapperType, MethodDescriptorType) + + +def get_type_hints(obj, globalns=None, localns=None): + """Return type hints for an object. + + This is often the same as obj.__annotations__, but it handles + forward references encoded as string literals, and if necessary + adds Optional[t] if a default value equal to None is set. + + The argument may be a module, class, method, or function. The annotations + are returned as a dictionary. For classes, annotations include also + inherited members. + + TypeError is raised if the argument is not of a type that can contain + annotations, and an empty dictionary is returned if no annotations are + present. + + BEWARE -- the behavior of globalns and localns is counterintuitive + (unless you are familiar with how eval() and exec() work). The + search order is locals first, then globals. + + - If no dict arguments are passed, an attempt is made to use the + globals from obj (or the respective module's globals for classes), + and these are also used as the locals. If the object does not appear + to have globals, an empty dictionary is used. + + - If one dict argument is passed, it is used for both globals and + locals. + + - If two dict arguments are passed, they specify globals and + locals, respectively. + """ + + if getattr(obj, '__no_type_check__', None): + return {} + # Classes require a special treatment. + if isinstance(obj, type): + hints = {} + for base in reversed(obj.__mro__): + if globalns is None: + base_globals = sys.modules[base.__module__].__dict__ + else: + base_globals = globalns + ann = base.__dict__.get('__annotations__', {}) + for name, value in ann.items(): + if value is None: + value = type(None) + if isinstance(value, str): + value = _ForwardRef(value) + value = _eval_type(value, base_globals, localns) + hints[name] = value + return hints + + if globalns is None: + if isinstance(obj, types.ModuleType): + globalns = obj.__dict__ + else: + globalns = getattr(obj, '__globals__', {}) + if localns is None: + localns = globalns + elif localns is None: + localns = globalns + hints = getattr(obj, '__annotations__', None) + if hints is None: + # Return empty annotations for something that _could_ have them. + if isinstance(obj, _allowed_types): + return {} + else: + raise TypeError('{!r} is not a module, class, method, ' + 'or function.'.format(obj)) + defaults = _get_defaults(obj) + hints = dict(hints) + for name, value in hints.items(): + if value is None: + value = type(None) + if isinstance(value, str): + value = _ForwardRef(value) + value = _eval_type(value, globalns, localns) + if name in defaults and defaults[name] is None: + value = Optional[value] + hints[name] = value + return hints + + +def no_type_check(arg): + """Decorator to indicate that annotations are not type hints. + + The argument must be a class or function; if it is a class, it + applies recursively to all methods and classes defined in that class + (but not to methods defined in its superclasses or subclasses). + + This mutates the function(s) or class(es) in place. + """ + if isinstance(arg, type): + arg_attrs = arg.__dict__.copy() + for attr, val in arg.__dict__.items(): + if val in arg.__bases__ + (arg,): + arg_attrs.pop(attr) + for obj in arg_attrs.values(): + if isinstance(obj, types.FunctionType): + obj.__no_type_check__ = True + if isinstance(obj, type): + no_type_check(obj) + try: + arg.__no_type_check__ = True + except TypeError: # built-in classes + pass + return arg + + +def no_type_check_decorator(decorator): + """Decorator to give another decorator the @no_type_check effect. + + This wraps the decorator with something that wraps the decorated + function in @no_type_check. + """ + + @functools.wraps(decorator) + def wrapped_decorator(*args, **kwds): + func = decorator(*args, **kwds) + func = no_type_check(func) + return func + + return wrapped_decorator + + +def _overload_dummy(*args, **kwds): + """Helper for @overload to raise when called.""" + raise NotImplementedError( + "You should not call an overloaded function. " + "A series of @overload-decorated functions " + "outside a stub module should always be followed " + "by an implementation that is not @overload-ed.") + + +def overload(func): + """Decorator for overloaded functions/methods. + + In a stub file, place two or more stub definitions for the same + function in a row, each decorated with @overload. For example: + + @overload + def utf8(value: None) -> None: ... + @overload + def utf8(value: bytes) -> bytes: ... + @overload + def utf8(value: str) -> bytes: ... + + In a non-stub file (i.e. a regular .py file), do the same but + follow it with an implementation. The implementation should *not* + be decorated with @overload. For example: + + @overload + def utf8(value: None) -> None: ... + @overload + def utf8(value: bytes) -> bytes: ... + @overload + def utf8(value: str) -> bytes: ... + def utf8(value): + # implementation goes here + """ + return _overload_dummy + + +class _ProtocolMeta(GenericMeta): + """Internal metaclass for _Protocol. + + This exists so _Protocol classes can be generic without deriving + from Generic. + """ + + def __instancecheck__(self, obj): + if _Protocol not in self.__bases__: + return super().__instancecheck__(obj) + raise TypeError("Protocols cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + if not self._is_protocol: + # No structural checks since this isn't a protocol. + return NotImplemented + + if self is _Protocol: + # Every class is a subclass of the empty protocol. + return True + + # Find all attributes defined in the protocol. + attrs = self._get_protocol_attrs() + + for attr in attrs: + if not any(attr in d.__dict__ for d in cls.__mro__): + return False + return True + + def _get_protocol_attrs(self): + # Get all Protocol base classes. + protocol_bases = [] + for c in self.__mro__: + if getattr(c, '_is_protocol', False) and c.__name__ != '_Protocol': + protocol_bases.append(c) + + # Get attributes included in protocol. + attrs = set() + for base in protocol_bases: + for attr in base.__dict__.keys(): + # Include attributes not defined in any non-protocol bases. + for c in self.__mro__: + if (c is not base and attr in c.__dict__ and + not getattr(c, '_is_protocol', False)): + break + else: + if (not attr.startswith('_abc_') and + attr != '__abstractmethods__' and + attr != '__annotations__' and + attr != '__weakref__' and + attr != '_is_protocol' and + attr != '_gorg' and + attr != '__dict__' and + attr != '__args__' and + attr != '__slots__' and + attr != '_get_protocol_attrs' and + attr != '__next_in_mro__' and + attr != '__parameters__' and + attr != '__origin__' and + attr != '__orig_bases__' and + attr != '__extra__' and + attr != '__tree_hash__' and + attr != '__module__'): + attrs.add(attr) + + return attrs + + +class _Protocol(metaclass=_ProtocolMeta): + """Internal base class for protocol classes. + + This implements a simple-minded structural issubclass check + (similar but more general than the one-offs in collections.abc + such as Hashable). + """ + + __slots__ = () + + _is_protocol = True + + +# Various ABCs mimicking those in collections.abc. +# A few are simply re-exported for completeness. + +Hashable = collections_abc.Hashable # Not generic. + + +if hasattr(collections_abc, 'Awaitable'): + class Awaitable(Generic[T_co], extra=collections_abc.Awaitable): + __slots__ = () + + __all__.append('Awaitable') + + +if hasattr(collections_abc, 'Coroutine'): + class Coroutine(Awaitable[V_co], Generic[T_co, T_contra, V_co], + extra=collections_abc.Coroutine): + __slots__ = () + + __all__.append('Coroutine') + + +if hasattr(collections_abc, 'AsyncIterable'): + + class AsyncIterable(Generic[T_co], extra=collections_abc.AsyncIterable): + __slots__ = () + + class AsyncIterator(AsyncIterable[T_co], + extra=collections_abc.AsyncIterator): + __slots__ = () + + __all__.append('AsyncIterable') + __all__.append('AsyncIterator') + + +class Iterable(Generic[T_co], extra=collections_abc.Iterable): + __slots__ = () + + +class Iterator(Iterable[T_co], extra=collections_abc.Iterator): + __slots__ = () + + +class SupportsInt(_Protocol): + __slots__ = () + + @abstractmethod + def __int__(self) -> int: + pass + + +class SupportsFloat(_Protocol): + __slots__ = () + + @abstractmethod + def __float__(self) -> float: + pass + + +class SupportsComplex(_Protocol): + __slots__ = () + + @abstractmethod + def __complex__(self) -> complex: + pass + + +class SupportsBytes(_Protocol): + __slots__ = () + + @abstractmethod + def __bytes__(self) -> bytes: + pass + + +class SupportsIndex(_Protocol): + __slots__ = () + + @abstractmethod + def __index__(self) -> int: + pass + + +class SupportsAbs(_Protocol[T_co]): + __slots__ = () + + @abstractmethod + def __abs__(self) -> T_co: + pass + + +class SupportsRound(_Protocol[T_co]): + __slots__ = () + + @abstractmethod + def __round__(self, ndigits: int = 0) -> T_co: + pass + + +if hasattr(collections_abc, 'Reversible'): + class Reversible(Iterable[T_co], extra=collections_abc.Reversible): + __slots__ = () +else: + class Reversible(_Protocol[T_co]): + __slots__ = () + + @abstractmethod + def __reversed__(self) -> 'Iterator[T_co]': + pass + + +Sized = collections_abc.Sized # Not generic. + + +class Container(Generic[T_co], extra=collections_abc.Container): + __slots__ = () + + +if hasattr(collections_abc, 'Collection'): + class Collection(Sized, Iterable[T_co], Container[T_co], + extra=collections_abc.Collection): + __slots__ = () + + __all__.append('Collection') + + +# Callable was defined earlier. + +if hasattr(collections_abc, 'Collection'): + class AbstractSet(Collection[T_co], + extra=collections_abc.Set): + __slots__ = () +else: + class AbstractSet(Sized, Iterable[T_co], Container[T_co], + extra=collections_abc.Set): + __slots__ = () + + +class MutableSet(AbstractSet[T], extra=collections_abc.MutableSet): + __slots__ = () + + +# NOTE: It is only covariant in the value type. +if hasattr(collections_abc, 'Collection'): + class Mapping(Collection[KT], Generic[KT, VT_co], + extra=collections_abc.Mapping): + __slots__ = () +else: + class Mapping(Sized, Iterable[KT], Container[KT], Generic[KT, VT_co], + extra=collections_abc.Mapping): + __slots__ = () + + +class MutableMapping(Mapping[KT, VT], extra=collections_abc.MutableMapping): + __slots__ = () + + +if hasattr(collections_abc, 'Reversible'): + if hasattr(collections_abc, 'Collection'): + class Sequence(Reversible[T_co], Collection[T_co], + extra=collections_abc.Sequence): + __slots__ = () + else: + class Sequence(Sized, Reversible[T_co], Container[T_co], + extra=collections_abc.Sequence): + __slots__ = () +else: + class Sequence(Sized, Iterable[T_co], Container[T_co], + extra=collections_abc.Sequence): + __slots__ = () + + +class MutableSequence(Sequence[T], extra=collections_abc.MutableSequence): + __slots__ = () + + +class ByteString(Sequence[int], extra=collections_abc.ByteString): + __slots__ = () + + +class List(list, MutableSequence[T], extra=list): + + __slots__ = () + + def __new__(cls, *args, **kwds): + if cls._gorg is List: + raise TypeError("Type List cannot be instantiated; " + "use list() instead") + return _generic_new(list, cls, *args, **kwds) + + +class Deque(collections.deque, MutableSequence[T], extra=collections.deque): + + __slots__ = () + + def __new__(cls, *args, **kwds): + if cls._gorg is Deque: + return collections.deque(*args, **kwds) + return _generic_new(collections.deque, cls, *args, **kwds) + + +class Set(set, MutableSet[T], extra=set): + + __slots__ = () + + def __new__(cls, *args, **kwds): + if cls._gorg is Set: + raise TypeError("Type Set cannot be instantiated; " + "use set() instead") + return _generic_new(set, cls, *args, **kwds) + + +class FrozenSet(frozenset, AbstractSet[T_co], extra=frozenset): + __slots__ = () + + def __new__(cls, *args, **kwds): + if cls._gorg is FrozenSet: + raise TypeError("Type FrozenSet cannot be instantiated; " + "use frozenset() instead") + return _generic_new(frozenset, cls, *args, **kwds) + + +class MappingView(Sized, Iterable[T_co], extra=collections_abc.MappingView): + __slots__ = () + + +class KeysView(MappingView[KT], AbstractSet[KT], + extra=collections_abc.KeysView): + __slots__ = () + + +class ItemsView(MappingView[Tuple[KT, VT_co]], + AbstractSet[Tuple[KT, VT_co]], + Generic[KT, VT_co], + extra=collections_abc.ItemsView): + __slots__ = () + + +class ValuesView(MappingView[VT_co], extra=collections_abc.ValuesView): + __slots__ = () + + +if hasattr(contextlib, 'AbstractContextManager'): + class ContextManager(Generic[T_co], extra=contextlib.AbstractContextManager): + __slots__ = () +else: + class ContextManager(Generic[T_co]): + __slots__ = () + + def __enter__(self): + return self + + @abc.abstractmethod + def __exit__(self, exc_type, exc_value, traceback): + return None + + @classmethod + def __subclasshook__(cls, C): + if cls is ContextManager: + # In Python 3.6+, it is possible to set a method to None to + # explicitly indicate that the class does not implement an ABC + # (https://bugs.python.org/issue25958), but we do not support + # that pattern here because this fallback class is only used + # in Python 3.5 and earlier. + if (any("__enter__" in B.__dict__ for B in C.__mro__) and + any("__exit__" in B.__dict__ for B in C.__mro__)): + return True + return NotImplemented + + +if hasattr(contextlib, 'AbstractAsyncContextManager'): + class AsyncContextManager(Generic[T_co], + extra=contextlib.AbstractAsyncContextManager): + __slots__ = () + + __all__.append('AsyncContextManager') +elif sys.version_info[:2] >= (3, 5): + exec(""" +class AsyncContextManager(Generic[T_co]): + __slots__ = () + + async def __aenter__(self): + return self + + @abc.abstractmethod + async def __aexit__(self, exc_type, exc_value, traceback): + return None + + @classmethod + def __subclasshook__(cls, C): + if cls is AsyncContextManager: + if sys.version_info[:2] >= (3, 6): + return _collections_abc._check_methods(C, "__aenter__", "__aexit__") + if (any("__aenter__" in B.__dict__ for B in C.__mro__) and + any("__aexit__" in B.__dict__ for B in C.__mro__)): + return True + return NotImplemented + +__all__.append('AsyncContextManager') +""") + + +class Dict(dict, MutableMapping[KT, VT], extra=dict): + + __slots__ = () + + def __new__(cls, *args, **kwds): + if cls._gorg is Dict: + raise TypeError("Type Dict cannot be instantiated; " + "use dict() instead") + return _generic_new(dict, cls, *args, **kwds) + + +class DefaultDict(collections.defaultdict, MutableMapping[KT, VT], + extra=collections.defaultdict): + + __slots__ = () + + def __new__(cls, *args, **kwds): + if cls._gorg is DefaultDict: + return collections.defaultdict(*args, **kwds) + return _generic_new(collections.defaultdict, cls, *args, **kwds) + + +class Counter(collections.Counter, Dict[T, int], extra=collections.Counter): + + __slots__ = () + + def __new__(cls, *args, **kwds): + if cls._gorg is Counter: + return collections.Counter(*args, **kwds) + return _generic_new(collections.Counter, cls, *args, **kwds) + + +if hasattr(collections, 'ChainMap'): + # ChainMap only exists in 3.3+ + __all__.append('ChainMap') + + class ChainMap(collections.ChainMap, MutableMapping[KT, VT], + extra=collections.ChainMap): + + __slots__ = () + + def __new__(cls, *args, **kwds): + if cls._gorg is ChainMap: + return collections.ChainMap(*args, **kwds) + return _generic_new(collections.ChainMap, cls, *args, **kwds) + + +# Determine what base class to use for Generator. +if hasattr(collections_abc, 'Generator'): + # Sufficiently recent versions of 3.5 have a Generator ABC. + _G_base = collections_abc.Generator +else: + # Fall back on the exact type. + _G_base = types.GeneratorType + + +class Generator(Iterator[T_co], Generic[T_co, T_contra, V_co], + extra=_G_base): + __slots__ = () + + def __new__(cls, *args, **kwds): + if cls._gorg is Generator: + raise TypeError("Type Generator cannot be instantiated; " + "create a subclass instead") + return _generic_new(_G_base, cls, *args, **kwds) + + +if hasattr(collections_abc, 'AsyncGenerator'): + class AsyncGenerator(AsyncIterator[T_co], Generic[T_co, T_contra], + extra=collections_abc.AsyncGenerator): + __slots__ = () + + __all__.append('AsyncGenerator') + + +# Internal type variable used for Type[]. +CT_co = TypeVar('CT_co', covariant=True, bound=type) + + +# This is not a real generic class. Don't use outside annotations. +class Type(Generic[CT_co], extra=type): + """A special construct usable to annotate class objects. + + For example, suppose we have the following classes:: + + class User: ... # Abstract base for User classes + class BasicUser(User): ... + class ProUser(User): ... + class TeamUser(User): ... + + And a function that takes a class argument that's a subclass of + User and returns an instance of the corresponding class:: + + U = TypeVar('U', bound=User) + def new_user(user_class: Type[U]) -> U: + user = user_class() + # (Here we could write the user object to a database) + return user + + joe = new_user(BasicUser) + + At this point the type checker knows that joe has type BasicUser. + """ + + __slots__ = () + + +def _make_nmtuple(name, types): + msg = "NamedTuple('Name', [(f0, t0), (f1, t1), ...]); each t must be a type" + types = [(n, _type_check(t, msg)) for n, t in types] + nm_tpl = collections.namedtuple(name, [n for n, t in types]) + # Prior to PEP 526, only _field_types attribute was assigned. + # Now, both __annotations__ and _field_types are used to maintain compatibility. + nm_tpl.__annotations__ = nm_tpl._field_types = collections.OrderedDict(types) + try: + nm_tpl.__module__ = sys._getframe(2).f_globals.get('__name__', '__main__') + except (AttributeError, ValueError): + pass + return nm_tpl + + +_PY36 = sys.version_info[:2] >= (3, 6) + +# attributes prohibited to set in NamedTuple class syntax +_prohibited = ('__new__', '__init__', '__slots__', '__getnewargs__', + '_fields', '_field_defaults', '_field_types', + '_make', '_replace', '_asdict', '_source') + +_special = ('__module__', '__name__', '__qualname__', '__annotations__') + + +class NamedTupleMeta(type): + + def __new__(cls, typename, bases, ns): + if ns.get('_root', False): + return super().__new__(cls, typename, bases, ns) + if not _PY36: + raise TypeError("Class syntax for NamedTuple is only supported" + " in Python 3.6+") + types = ns.get('__annotations__', {}) + nm_tpl = _make_nmtuple(typename, types.items()) + defaults = [] + defaults_dict = {} + for field_name in types: + if field_name in ns: + default_value = ns[field_name] + defaults.append(default_value) + defaults_dict[field_name] = default_value + elif defaults: + raise TypeError("Non-default namedtuple field {field_name} cannot " + "follow default field(s) {default_names}" + .format(field_name=field_name, + default_names=', '.join(defaults_dict.keys()))) + nm_tpl.__new__.__annotations__ = collections.OrderedDict(types) + nm_tpl.__new__.__defaults__ = tuple(defaults) + nm_tpl._field_defaults = defaults_dict + # update from user namespace without overriding special namedtuple attributes + for key in ns: + if key in _prohibited: + raise AttributeError("Cannot overwrite NamedTuple attribute " + key) + elif key not in _special and key not in nm_tpl._fields: + setattr(nm_tpl, key, ns[key]) + return nm_tpl + + +class NamedTuple(metaclass=NamedTupleMeta): + """Typed version of namedtuple. + + Usage in Python versions >= 3.6:: + + class Employee(NamedTuple): + name: str + id: int + + This is equivalent to:: + + Employee = collections.namedtuple('Employee', ['name', 'id']) + + The resulting class has extra __annotations__ and _field_types + attributes, giving an ordered dict mapping field names to types. + __annotations__ should be preferred, while _field_types + is kept to maintain pre PEP 526 compatibility. (The field names + are in the _fields attribute, which is part of the namedtuple + API.) Alternative equivalent keyword syntax is also accepted:: + + Employee = NamedTuple('Employee', name=str, id=int) + + In Python versions <= 3.5 use:: + + Employee = NamedTuple('Employee', [('name', str), ('id', int)]) + """ + _root = True + + def __new__(self, typename, fields=None, **kwargs): + if kwargs and not _PY36: + raise TypeError("Keyword syntax for NamedTuple is only supported" + " in Python 3.6+") + if fields is None: + fields = kwargs.items() + elif kwargs: + raise TypeError("Either list of fields or keywords" + " can be provided to NamedTuple, not both") + return _make_nmtuple(typename, fields) + + +def NewType(name, tp): + """NewType creates simple unique types with almost zero + runtime overhead. NewType(name, tp) is considered a subtype of tp + by static type checkers. At runtime, NewType(name, tp) returns + a dummy function that simply returns its argument. Usage:: + + UserId = NewType('UserId', int) + + def name_by_id(user_id: UserId) -> str: + ... + + UserId('user') # Fails type check + + name_by_id(42) # Fails type check + name_by_id(UserId(42)) # OK + + num = UserId(5) + 1 # type: int + """ + + def new_type(x): + return x + + new_type.__name__ = name + new_type.__supertype__ = tp + return new_type + + +# Python-version-specific alias (Python 2: unicode; Python 3: str) +Text = str + + +# Constant that's True when type checking, but False here. +TYPE_CHECKING = False + + +class IO(Generic[AnyStr]): + """Generic base class for TextIO and BinaryIO. + + This is an abstract, generic version of the return of open(). + + NOTE: This does not distinguish between the different possible + classes (text vs. binary, read vs. write vs. read/write, + append-only, unbuffered). The TextIO and BinaryIO subclasses + below capture the distinctions between text vs. binary, which is + pervasive in the interface; however we currently do not offer a + way to track the other distinctions in the type system. + """ + + __slots__ = () + + @abstractproperty + def mode(self) -> str: + pass + + @abstractproperty + def name(self) -> str: + pass + + @abstractmethod + def close(self) -> None: + pass + + @abstractproperty + def closed(self) -> bool: + pass + + @abstractmethod + def fileno(self) -> int: + pass + + @abstractmethod + def flush(self) -> None: + pass + + @abstractmethod + def isatty(self) -> bool: + pass + + @abstractmethod + def read(self, n: int = -1) -> AnyStr: + pass + + @abstractmethod + def readable(self) -> bool: + pass + + @abstractmethod + def readline(self, limit: int = -1) -> AnyStr: + pass + + @abstractmethod + def readlines(self, hint: int = -1) -> List[AnyStr]: + pass + + @abstractmethod + def seek(self, offset: int, whence: int = 0) -> int: + pass + + @abstractmethod + def seekable(self) -> bool: + pass + + @abstractmethod + def tell(self) -> int: + pass + + @abstractmethod + def truncate(self, size: int = None) -> int: + pass + + @abstractmethod + def writable(self) -> bool: + pass + + @abstractmethod + def write(self, s: AnyStr) -> int: + pass + + @abstractmethod + def writelines(self, lines: List[AnyStr]) -> None: + pass + + @abstractmethod + def __enter__(self) -> 'IO[AnyStr]': + pass + + @abstractmethod + def __exit__(self, type, value, traceback) -> None: + pass + + +class BinaryIO(IO[bytes]): + """Typed version of the return of open() in binary mode.""" + + __slots__ = () + + @abstractmethod + def write(self, s: Union[bytes, bytearray]) -> int: + pass + + @abstractmethod + def __enter__(self) -> 'BinaryIO': + pass + + +class TextIO(IO[str]): + """Typed version of the return of open() in text mode.""" + + __slots__ = () + + @abstractproperty + def buffer(self) -> BinaryIO: + pass + + @abstractproperty + def encoding(self) -> str: + pass + + @abstractproperty + def errors(self) -> Optional[str]: + pass + + @abstractproperty + def line_buffering(self) -> bool: + pass + + @abstractproperty + def newlines(self) -> Any: + pass + + @abstractmethod + def __enter__(self) -> 'TextIO': + pass + + +class io: + """Wrapper namespace for IO generic classes.""" + + __all__ = ['IO', 'TextIO', 'BinaryIO'] + IO = IO + TextIO = TextIO + BinaryIO = BinaryIO + + +io.__name__ = __name__ + '.io' +sys.modules[io.__name__] = io + + +Pattern = _TypeAlias('Pattern', AnyStr, type(stdlib_re.compile('')), + lambda p: p.pattern) +Match = _TypeAlias('Match', AnyStr, type(stdlib_re.match('', '')), + lambda m: m.re.pattern) + + +class re: + """Wrapper namespace for re type aliases.""" + + __all__ = ['Pattern', 'Match'] + Pattern = Pattern + Match = Match + + +re.__name__ = __name__ + '.re' +sys.modules[re.__name__] = re diff --git a/third_party/python/virtualenv/__virtualenv__/typing_extensions-3.10.0.0-py3-none-any/typing_extensions-3.10.0.0.dist-info/LICENSE b/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.3-py2-none-any/typing-3.7.4.3.dist-info/LICENSE similarity index 100% rename from third_party/python/virtualenv/__virtualenv__/typing_extensions-3.10.0.0-py3-none-any/typing_extensions-3.10.0.0.dist-info/LICENSE rename to third_party/python/virtualenv/__virtualenv__/typing-3.7.4.3-py2-none-any/typing-3.7.4.3.dist-info/LICENSE diff --git a/third_party/python/virtualenv/__virtualenv__/typing-3.10.0.0-py2-none-any/typing-3.10.0.0.dist-info/METADATA b/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.3-py2-none-any/typing-3.7.4.3.dist-info/METADATA similarity index 96% rename from third_party/python/virtualenv/__virtualenv__/typing-3.10.0.0-py2-none-any/typing-3.10.0.0.dist-info/METADATA rename to third_party/python/virtualenv/__virtualenv__/typing-3.7.4.3-py2-none-any/typing-3.7.4.3.dist-info/METADATA index 30047d3e60e5..d6853448925a 100644 --- a/third_party/python/virtualenv/__virtualenv__/typing-3.10.0.0-py2-none-any/typing-3.10.0.0.dist-info/METADATA +++ b/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.3-py2-none-any/typing-3.7.4.3.dist-info/METADATA @@ -1,6 +1,6 @@ Metadata-Version: 2.1 Name: typing -Version: 3.10.0.0 +Version: 3.7.4.3 Summary: Type Hints for Python Home-page: https://docs.python.org/3/library/typing.html Author: Guido van Rossum, Jukka Lehtosalo, Łukasz Langa, Ivan Levkivskyi @@ -17,7 +17,7 @@ Classifier: Operating System :: OS Independent Classifier: Programming Language :: Python :: 2.7 Classifier: Programming Language :: Python :: 3.4 Classifier: Topic :: Software Development -Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, <3.5 +Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.* Typing -- Type Hints for Python diff --git a/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.3-py2-none-any/typing-3.7.4.3.dist-info/RECORD b/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.3-py2-none-any/typing-3.7.4.3.dist-info/RECORD new file mode 100644 index 000000000000..3d7934223221 --- /dev/null +++ b/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.3-py2-none-any/typing-3.7.4.3.dist-info/RECORD @@ -0,0 +1,6 @@ +typing.py,sha256=yP2fxy8eprK-cHMe9bAcvU7QL7n_YGtoTFOG3bsWVJQ,84492 +typing-3.7.4.3.dist-info/LICENSE,sha256=_xfOlOECAk3raHc-scx0ynbaTmWPNzUx8Kwi1oprsa0,12755 +typing-3.7.4.3.dist-info/METADATA,sha256=t3uvms3cJatf6uhsaHM3PP7HWbkjVUh4AE9tb8xCSsQ,2258 +typing-3.7.4.3.dist-info/WHEEL,sha256=CbUdLTqD3-4zWemf83rgR_2_MC4TeXw9qXwrXte5w4w,92 +typing-3.7.4.3.dist-info/top_level.txt,sha256=oG8QCMTRcfcgGpEVbdwBU2DM8MthjmZSDaaQ6WWHx4o,7 +typing-3.7.4.3.dist-info/RECORD,, diff --git a/third_party/python/virtualenv/__virtualenv__/typing-3.10.0.0-py2-none-any/typing-3.10.0.0.dist-info/WHEEL b/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.3-py2-none-any/typing-3.7.4.3.dist-info/WHEEL similarity index 65% rename from third_party/python/virtualenv/__virtualenv__/typing-3.10.0.0-py2-none-any/typing-3.10.0.0.dist-info/WHEEL rename to third_party/python/virtualenv/__virtualenv__/typing-3.7.4.3-py2-none-any/typing-3.7.4.3.dist-info/WHEEL index 60b427d61f9d..2b098df983d5 100644 --- a/third_party/python/virtualenv/__virtualenv__/typing-3.10.0.0-py2-none-any/typing-3.10.0.0.dist-info/WHEEL +++ b/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.3-py2-none-any/typing-3.7.4.3.dist-info/WHEEL @@ -1,5 +1,5 @@ Wheel-Version: 1.0 -Generator: bdist_wheel (0.36.2) +Generator: bdist_wheel (0.34.2) Root-Is-Purelib: true Tag: py2-none-any diff --git a/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.3-py2-none-any/typing-3.7.4.3.dist-info/top_level.txt b/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.3-py2-none-any/typing-3.7.4.3.dist-info/top_level.txt new file mode 100644 index 000000000000..c997f364b496 --- /dev/null +++ b/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.3-py2-none-any/typing-3.7.4.3.dist-info/top_level.txt @@ -0,0 +1 @@ +typing diff --git a/third_party/python/virtualenv/__virtualenv__/typing-3.10.0.0-py2-none-any/typing.py b/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.3-py2-none-any/typing.py similarity index 100% rename from third_party/python/virtualenv/__virtualenv__/typing-3.10.0.0-py2-none-any/typing.py rename to third_party/python/virtualenv/__virtualenv__/typing-3.7.4.3-py2-none-any/typing.py diff --git a/third_party/python/virtualenv/__virtualenv__/typing_extensions-3.10.0.0-py3-none-any/typing_extensions-3.10.0.0.dist-info/METADATA b/third_party/python/virtualenv/__virtualenv__/typing_extensions-3.10.0.0-py3-none-any/typing_extensions-3.10.0.0.dist-info/METADATA deleted file mode 100644 index fa314015ef78..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/typing_extensions-3.10.0.0-py3-none-any/typing_extensions-3.10.0.0.dist-info/METADATA +++ /dev/null @@ -1,45 +0,0 @@ -Metadata-Version: 2.1 -Name: typing-extensions -Version: 3.10.0.0 -Summary: Backported and Experimental Type Hints for Python 3.5+ -Home-page: https://github.com/python/typing/blob/master/typing_extensions/README.rst -Author: Guido van Rossum, Jukka Lehtosalo, Łukasz Langa, Michael Lee -Author-email: levkivskyi@gmail.com -License: PSF -Keywords: typing function annotations type hints hinting checking checker typehints typehinting typechecking backport -Platform: UNKNOWN -Classifier: Development Status :: 3 - Alpha -Classifier: Environment :: Console -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: Python Software Foundation License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Topic :: Software Development -Requires-Dist: typing (>=3.7.4) ; python_version < "3.5" - -Typing Extensions -- Backported and Experimental Type Hints for Python - -The ``typing`` module was added to the standard library in Python 3.5, but -many new features have been added to the module since then. -This means users of Python 3.5 - 3.6 who are unable to upgrade will not be -able to take advantage of new types added to the ``typing`` module, such as -``typing.Protocol`` or ``typing.TypedDict``. - -The ``typing_extensions`` module contains backports of these changes. -Experimental types that will eventually be added to the ``typing`` -module are also included in ``typing_extensions``, such as -``typing.ParamSpec`` and ``typing.TypeGuard``. - -Users of Python versions before 3.5 should install and use -the ``typing`` module from PyPI instead of using this one, unless specifically -writing code that must be compatible with multiple Python versions or requires -experimental types. - - diff --git a/third_party/python/virtualenv/__virtualenv__/typing_extensions-3.10.0.0-py3-none-any/typing_extensions-3.10.0.0.dist-info/RECORD b/third_party/python/virtualenv/__virtualenv__/typing_extensions-3.10.0.0-py3-none-any/typing_extensions-3.10.0.0.dist-info/RECORD deleted file mode 100644 index 217df4acdde9..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/typing_extensions-3.10.0.0-py3-none-any/typing_extensions-3.10.0.0.dist-info/RECORD +++ /dev/null @@ -1,6 +0,0 @@ -typing_extensions.py,sha256=upcRc-ygmoZSgbJ4WZa34ZE_PVJsYrOlGM7WWbBrJuo,108429 -typing_extensions-3.10.0.0.dist-info/LICENSE,sha256=_xfOlOECAk3raHc-scx0ynbaTmWPNzUx8Kwi1oprsa0,12755 -typing_extensions-3.10.0.0.dist-info/METADATA,sha256=zjlcNCeUQUETPe37jftee4IwkGKxm8YPKQxFFOMgyqQ,2099 -typing_extensions-3.10.0.0.dist-info/WHEEL,sha256=OqRkF0eY5GHssMorFjlbTIq072vpHpF60fIQA6lS9xA,92 -typing_extensions-3.10.0.0.dist-info/top_level.txt,sha256=hkDmk3VmrfXPOD--jS4aKTCu6kFZo-kVT1cIFfq1eU8,18 -typing_extensions-3.10.0.0.dist-info/RECORD,, diff --git a/third_party/python/virtualenv/__virtualenv__/typing_extensions-3.10.0.0-py3-none-any/typing_extensions-3.10.0.0.dist-info/WHEEL b/third_party/python/virtualenv/__virtualenv__/typing_extensions-3.10.0.0-py3-none-any/typing_extensions-3.10.0.0.dist-info/WHEEL deleted file mode 100644 index 385faab0525c..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/typing_extensions-3.10.0.0-py3-none-any/typing_extensions-3.10.0.0.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.36.2) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/third_party/python/virtualenv/__virtualenv__/typing_extensions-3.10.0.0-py3-none-any/typing_extensions-3.10.0.0.dist-info/top_level.txt b/third_party/python/virtualenv/__virtualenv__/typing_extensions-3.10.0.0-py3-none-any/typing_extensions-3.10.0.0.dist-info/top_level.txt deleted file mode 100644 index 5fd4f05f341a..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/typing_extensions-3.10.0.0-py3-none-any/typing_extensions-3.10.0.0.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -typing_extensions diff --git a/third_party/python/virtualenv/__virtualenv__/typing_extensions-3.10.0.0-py3-none-any/typing_extensions.py b/third_party/python/virtualenv/__virtualenv__/typing_extensions-3.10.0.0-py3-none-any/typing_extensions.py deleted file mode 100644 index 82d1c2dc2c40..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/typing_extensions-3.10.0.0-py3-none-any/typing_extensions.py +++ /dev/null @@ -1,2805 +0,0 @@ -import abc -import collections -import contextlib -import sys -import typing -import collections.abc as collections_abc -import operator - -# These are used by Protocol implementation -# We use internal typing helpers here, but this significantly reduces -# code duplication. (Also this is only until Protocol is in typing.) -from typing import Generic, Callable, TypeVar, Tuple - -# After PEP 560, internal typing API was substantially reworked. -# This is especially important for Protocol class which uses internal APIs -# quite extensivelly. -PEP_560 = sys.version_info[:3] >= (3, 7, 0) - -if PEP_560: - GenericMeta = TypingMeta = type -else: - from typing import GenericMeta, TypingMeta -OLD_GENERICS = False -try: - from typing import _type_vars, _next_in_mro, _type_check -except ImportError: - OLD_GENERICS = True -try: - from typing import _subs_tree # noqa - SUBS_TREE = True -except ImportError: - SUBS_TREE = False -try: - from typing import _tp_cache -except ImportError: - def _tp_cache(x): - return x -try: - from typing import _TypingEllipsis, _TypingEmpty -except ImportError: - class _TypingEllipsis: - pass - - class _TypingEmpty: - pass - - -# The two functions below are copies of typing internal helpers. -# They are needed by _ProtocolMeta - - -def _no_slots_copy(dct): - dict_copy = dict(dct) - if '__slots__' in dict_copy: - for slot in dict_copy['__slots__']: - dict_copy.pop(slot, None) - return dict_copy - - -def _check_generic(cls, parameters): - if not cls.__parameters__: - raise TypeError("%s is not a generic class" % repr(cls)) - alen = len(parameters) - elen = len(cls.__parameters__) - if alen != elen: - raise TypeError("Too %s parameters for %s; actual %s, expected %s" % - ("many" if alen > elen else "few", repr(cls), alen, elen)) - - -if hasattr(typing, '_generic_new'): - _generic_new = typing._generic_new -else: - # Note: The '_generic_new(...)' function is used as a part of the - # process of creating a generic type and was added to the typing module - # as of Python 3.5.3. - # - # We've defined '_generic_new(...)' below to exactly match the behavior - # implemented in older versions of 'typing' bundled with Python 3.5.0 to - # 3.5.2. This helps eliminate redundancy when defining collection types - # like 'Deque' later. - # - # See https://github.com/python/typing/pull/308 for more details -- in - # particular, compare and contrast the definition of types like - # 'typing.List' before and after the merge. - - def _generic_new(base_cls, cls, *args, **kwargs): - return base_cls.__new__(cls, *args, **kwargs) - -# See https://github.com/python/typing/pull/439 -if hasattr(typing, '_geqv'): - from typing import _geqv - _geqv_defined = True -else: - _geqv = None - _geqv_defined = False - -if sys.version_info[:2] >= (3, 6): - import _collections_abc - _check_methods_in_mro = _collections_abc._check_methods -else: - def _check_methods_in_mro(C, *methods): - mro = C.__mro__ - for method in methods: - for B in mro: - if method in B.__dict__: - if B.__dict__[method] is None: - return NotImplemented - break - else: - return NotImplemented - return True - - -# Please keep __all__ alphabetized within each category. -__all__ = [ - # Super-special typing primitives. - 'ClassVar', - 'Concatenate', - 'Final', - 'ParamSpec', - 'Type', - - # ABCs (from collections.abc). - # The following are added depending on presence - # of their non-generic counterparts in stdlib: - # 'Awaitable', - # 'AsyncIterator', - # 'AsyncIterable', - # 'Coroutine', - # 'AsyncGenerator', - # 'AsyncContextManager', - # 'ChainMap', - - # Concrete collection types. - 'ContextManager', - 'Counter', - 'Deque', - 'DefaultDict', - 'OrderedDict' - 'TypedDict', - - # Structural checks, a.k.a. protocols. - 'SupportsIndex', - - # One-off things. - 'final', - 'IntVar', - 'Literal', - 'NewType', - 'overload', - 'Text', - 'TypeAlias', - 'TypeGuard', - 'TYPE_CHECKING', -] - -# Annotated relies on substitution trees of pep 560. It will not work for -# versions of typing older than 3.5.3 -HAVE_ANNOTATED = PEP_560 or SUBS_TREE - -if PEP_560: - __all__.extend(["get_args", "get_origin", "get_type_hints"]) - -if HAVE_ANNOTATED: - __all__.append("Annotated") - -# Protocols are hard to backport to the original version of typing 3.5.0 -HAVE_PROTOCOLS = sys.version_info[:3] != (3, 5, 0) - -if HAVE_PROTOCOLS: - __all__.extend(['Protocol', 'runtime', 'runtime_checkable']) - - -# TODO -if hasattr(typing, 'NoReturn'): - NoReturn = typing.NoReturn -elif hasattr(typing, '_FinalTypingBase'): - class _NoReturn(typing._FinalTypingBase, _root=True): - """Special type indicating functions that never return. - Example:: - - from typing import NoReturn - - def stop() -> NoReturn: - raise Exception('no way') - - This type is invalid in other positions, e.g., ``List[NoReturn]`` - will fail in static type checkers. - """ - __slots__ = () - - def __instancecheck__(self, obj): - raise TypeError("NoReturn cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("NoReturn cannot be used with issubclass().") - - NoReturn = _NoReturn(_root=True) -else: - class _NoReturnMeta(typing.TypingMeta): - """Metaclass for NoReturn""" - def __new__(cls, name, bases, namespace, _root=False): - return super().__new__(cls, name, bases, namespace, _root=_root) - - def __instancecheck__(self, obj): - raise TypeError("NoReturn cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("NoReturn cannot be used with issubclass().") - - class NoReturn(typing.Final, metaclass=_NoReturnMeta, _root=True): - """Special type indicating functions that never return. - Example:: - - from typing import NoReturn - - def stop() -> NoReturn: - raise Exception('no way') - - This type is invalid in other positions, e.g., ``List[NoReturn]`` - will fail in static type checkers. - """ - __slots__ = () - - -# Some unconstrained type variables. These are used by the container types. -# (These are not for export.) -T = typing.TypeVar('T') # Any type. -KT = typing.TypeVar('KT') # Key type. -VT = typing.TypeVar('VT') # Value type. -T_co = typing.TypeVar('T_co', covariant=True) # Any type covariant containers. -V_co = typing.TypeVar('V_co', covariant=True) # Any type covariant containers. -VT_co = typing.TypeVar('VT_co', covariant=True) # Value type covariant containers. -T_contra = typing.TypeVar('T_contra', contravariant=True) # Ditto contravariant. - - -if hasattr(typing, 'ClassVar'): - ClassVar = typing.ClassVar -elif hasattr(typing, '_FinalTypingBase'): - class _ClassVar(typing._FinalTypingBase, _root=True): - """Special type construct to mark class variables. - - An annotation wrapped in ClassVar indicates that a given - attribute is intended to be used as a class variable and - should not be set on instances of that class. Usage:: - - class Starship: - stats: ClassVar[Dict[str, int]] = {} # class variable - damage: int = 10 # instance variable - - ClassVar accepts only types and cannot be further subscribed. - - Note that ClassVar is not a class itself, and should not - be used with isinstance() or issubclass(). - """ - - __slots__ = ('__type__',) - - def __init__(self, tp=None, **kwds): - self.__type__ = tp - - def __getitem__(self, item): - cls = type(self) - if self.__type__ is None: - return cls(typing._type_check(item, - '{} accepts only single type.'.format(cls.__name__[1:])), - _root=True) - raise TypeError('{} cannot be further subscripted' - .format(cls.__name__[1:])) - - def _eval_type(self, globalns, localns): - new_tp = typing._eval_type(self.__type__, globalns, localns) - if new_tp == self.__type__: - return self - return type(self)(new_tp, _root=True) - - def __repr__(self): - r = super().__repr__() - if self.__type__ is not None: - r += '[{}]'.format(typing._type_repr(self.__type__)) - return r - - def __hash__(self): - return hash((type(self).__name__, self.__type__)) - - def __eq__(self, other): - if not isinstance(other, _ClassVar): - return NotImplemented - if self.__type__ is not None: - return self.__type__ == other.__type__ - return self is other - - ClassVar = _ClassVar(_root=True) -else: - class _ClassVarMeta(typing.TypingMeta): - """Metaclass for ClassVar""" - - def __new__(cls, name, bases, namespace, tp=None, _root=False): - self = super().__new__(cls, name, bases, namespace, _root=_root) - if tp is not None: - self.__type__ = tp - return self - - def __instancecheck__(self, obj): - raise TypeError("ClassVar cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("ClassVar cannot be used with issubclass().") - - def __getitem__(self, item): - cls = type(self) - if self.__type__ is not None: - raise TypeError('{} cannot be further subscripted' - .format(cls.__name__[1:])) - - param = typing._type_check( - item, - '{} accepts only single type.'.format(cls.__name__[1:])) - return cls(self.__name__, self.__bases__, - dict(self.__dict__), tp=param, _root=True) - - def _eval_type(self, globalns, localns): - new_tp = typing._eval_type(self.__type__, globalns, localns) - if new_tp == self.__type__: - return self - return type(self)(self.__name__, self.__bases__, - dict(self.__dict__), tp=self.__type__, - _root=True) - - def __repr__(self): - r = super().__repr__() - if self.__type__ is not None: - r += '[{}]'.format(typing._type_repr(self.__type__)) - return r - - def __hash__(self): - return hash((type(self).__name__, self.__type__)) - - def __eq__(self, other): - if not isinstance(other, ClassVar): - return NotImplemented - if self.__type__ is not None: - return self.__type__ == other.__type__ - return self is other - - class ClassVar(typing.Final, metaclass=_ClassVarMeta, _root=True): - """Special type construct to mark class variables. - - An annotation wrapped in ClassVar indicates that a given - attribute is intended to be used as a class variable and - should not be set on instances of that class. Usage:: - - class Starship: - stats: ClassVar[Dict[str, int]] = {} # class variable - damage: int = 10 # instance variable - - ClassVar accepts only types and cannot be further subscribed. - - Note that ClassVar is not a class itself, and should not - be used with isinstance() or issubclass(). - """ - - __type__ = None - -# On older versions of typing there is an internal class named "Final". -if hasattr(typing, 'Final') and sys.version_info[:2] >= (3, 7): - Final = typing.Final -elif sys.version_info[:2] >= (3, 7): - class _FinalForm(typing._SpecialForm, _root=True): - - def __repr__(self): - return 'typing_extensions.' + self._name - - def __getitem__(self, parameters): - item = typing._type_check(parameters, - '{} accepts only single type'.format(self._name)) - return _GenericAlias(self, (item,)) - - Final = _FinalForm('Final', - doc="""A special typing construct to indicate that a name - cannot be re-assigned or overridden in a subclass. - For example: - - MAX_SIZE: Final = 9000 - MAX_SIZE += 1 # Error reported by type checker - - class Connection: - TIMEOUT: Final[int] = 10 - class FastConnector(Connection): - TIMEOUT = 1 # Error reported by type checker - - There is no runtime checking of these properties.""") -elif hasattr(typing, '_FinalTypingBase'): - class _Final(typing._FinalTypingBase, _root=True): - """A special typing construct to indicate that a name - cannot be re-assigned or overridden in a subclass. - For example: - - MAX_SIZE: Final = 9000 - MAX_SIZE += 1 # Error reported by type checker - - class Connection: - TIMEOUT: Final[int] = 10 - class FastConnector(Connection): - TIMEOUT = 1 # Error reported by type checker - - There is no runtime checking of these properties. - """ - - __slots__ = ('__type__',) - - def __init__(self, tp=None, **kwds): - self.__type__ = tp - - def __getitem__(self, item): - cls = type(self) - if self.__type__ is None: - return cls(typing._type_check(item, - '{} accepts only single type.'.format(cls.__name__[1:])), - _root=True) - raise TypeError('{} cannot be further subscripted' - .format(cls.__name__[1:])) - - def _eval_type(self, globalns, localns): - new_tp = typing._eval_type(self.__type__, globalns, localns) - if new_tp == self.__type__: - return self - return type(self)(new_tp, _root=True) - - def __repr__(self): - r = super().__repr__() - if self.__type__ is not None: - r += '[{}]'.format(typing._type_repr(self.__type__)) - return r - - def __hash__(self): - return hash((type(self).__name__, self.__type__)) - - def __eq__(self, other): - if not isinstance(other, _Final): - return NotImplemented - if self.__type__ is not None: - return self.__type__ == other.__type__ - return self is other - - Final = _Final(_root=True) -else: - class _FinalMeta(typing.TypingMeta): - """Metaclass for Final""" - - def __new__(cls, name, bases, namespace, tp=None, _root=False): - self = super().__new__(cls, name, bases, namespace, _root=_root) - if tp is not None: - self.__type__ = tp - return self - - def __instancecheck__(self, obj): - raise TypeError("Final cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("Final cannot be used with issubclass().") - - def __getitem__(self, item): - cls = type(self) - if self.__type__ is not None: - raise TypeError('{} cannot be further subscripted' - .format(cls.__name__[1:])) - - param = typing._type_check( - item, - '{} accepts only single type.'.format(cls.__name__[1:])) - return cls(self.__name__, self.__bases__, - dict(self.__dict__), tp=param, _root=True) - - def _eval_type(self, globalns, localns): - new_tp = typing._eval_type(self.__type__, globalns, localns) - if new_tp == self.__type__: - return self - return type(self)(self.__name__, self.__bases__, - dict(self.__dict__), tp=self.__type__, - _root=True) - - def __repr__(self): - r = super().__repr__() - if self.__type__ is not None: - r += '[{}]'.format(typing._type_repr(self.__type__)) - return r - - def __hash__(self): - return hash((type(self).__name__, self.__type__)) - - def __eq__(self, other): - if not isinstance(other, Final): - return NotImplemented - if self.__type__ is not None: - return self.__type__ == other.__type__ - return self is other - - class Final(typing.Final, metaclass=_FinalMeta, _root=True): - """A special typing construct to indicate that a name - cannot be re-assigned or overridden in a subclass. - For example: - - MAX_SIZE: Final = 9000 - MAX_SIZE += 1 # Error reported by type checker - - class Connection: - TIMEOUT: Final[int] = 10 - class FastConnector(Connection): - TIMEOUT = 1 # Error reported by type checker - - There is no runtime checking of these properties. - """ - - __type__ = None - - -if hasattr(typing, 'final'): - final = typing.final -else: - def final(f): - """This decorator can be used to indicate to type checkers that - the decorated method cannot be overridden, and decorated class - cannot be subclassed. For example: - - class Base: - @final - def done(self) -> None: - ... - class Sub(Base): - def done(self) -> None: # Error reported by type checker - ... - @final - class Leaf: - ... - class Other(Leaf): # Error reported by type checker - ... - - There is no runtime checking of these properties. - """ - return f - - -def IntVar(name): - return TypeVar(name) - - -if hasattr(typing, 'Literal'): - Literal = typing.Literal -elif sys.version_info[:2] >= (3, 7): - class _LiteralForm(typing._SpecialForm, _root=True): - - def __repr__(self): - return 'typing_extensions.' + self._name - - def __getitem__(self, parameters): - return _GenericAlias(self, parameters) - - Literal = _LiteralForm('Literal', - doc="""A type that can be used to indicate to type checkers - that the corresponding value has a value literally equivalent - to the provided parameter. For example: - - var: Literal[4] = 4 - - The type checker understands that 'var' is literally equal to - the value 4 and no other value. - - Literal[...] cannot be subclassed. There is no runtime - checking verifying that the parameter is actually a value - instead of a type.""") -elif hasattr(typing, '_FinalTypingBase'): - class _Literal(typing._FinalTypingBase, _root=True): - """A type that can be used to indicate to type checkers that the - corresponding value has a value literally equivalent to the - provided parameter. For example: - - var: Literal[4] = 4 - - The type checker understands that 'var' is literally equal to the - value 4 and no other value. - - Literal[...] cannot be subclassed. There is no runtime checking - verifying that the parameter is actually a value instead of a type. - """ - - __slots__ = ('__values__',) - - def __init__(self, values=None, **kwds): - self.__values__ = values - - def __getitem__(self, values): - cls = type(self) - if self.__values__ is None: - if not isinstance(values, tuple): - values = (values,) - return cls(values, _root=True) - raise TypeError('{} cannot be further subscripted' - .format(cls.__name__[1:])) - - def _eval_type(self, globalns, localns): - return self - - def __repr__(self): - r = super().__repr__() - if self.__values__ is not None: - r += '[{}]'.format(', '.join(map(typing._type_repr, self.__values__))) - return r - - def __hash__(self): - return hash((type(self).__name__, self.__values__)) - - def __eq__(self, other): - if not isinstance(other, _Literal): - return NotImplemented - if self.__values__ is not None: - return self.__values__ == other.__values__ - return self is other - - Literal = _Literal(_root=True) -else: - class _LiteralMeta(typing.TypingMeta): - """Metaclass for Literal""" - - def __new__(cls, name, bases, namespace, values=None, _root=False): - self = super().__new__(cls, name, bases, namespace, _root=_root) - if values is not None: - self.__values__ = values - return self - - def __instancecheck__(self, obj): - raise TypeError("Literal cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("Literal cannot be used with issubclass().") - - def __getitem__(self, item): - cls = type(self) - if self.__values__ is not None: - raise TypeError('{} cannot be further subscripted' - .format(cls.__name__[1:])) - - if not isinstance(item, tuple): - item = (item,) - return cls(self.__name__, self.__bases__, - dict(self.__dict__), values=item, _root=True) - - def _eval_type(self, globalns, localns): - return self - - def __repr__(self): - r = super().__repr__() - if self.__values__ is not None: - r += '[{}]'.format(', '.join(map(typing._type_repr, self.__values__))) - return r - - def __hash__(self): - return hash((type(self).__name__, self.__values__)) - - def __eq__(self, other): - if not isinstance(other, Literal): - return NotImplemented - if self.__values__ is not None: - return self.__values__ == other.__values__ - return self is other - - class Literal(typing.Final, metaclass=_LiteralMeta, _root=True): - """A type that can be used to indicate to type checkers that the - corresponding value has a value literally equivalent to the - provided parameter. For example: - - var: Literal[4] = 4 - - The type checker understands that 'var' is literally equal to the - value 4 and no other value. - - Literal[...] cannot be subclassed. There is no runtime checking - verifying that the parameter is actually a value instead of a type. - """ - - __values__ = None - - -def _overload_dummy(*args, **kwds): - """Helper for @overload to raise when called.""" - raise NotImplementedError( - "You should not call an overloaded function. " - "A series of @overload-decorated functions " - "outside a stub module should always be followed " - "by an implementation that is not @overload-ed.") - - -def overload(func): - """Decorator for overloaded functions/methods. - - In a stub file, place two or more stub definitions for the same - function in a row, each decorated with @overload. For example: - - @overload - def utf8(value: None) -> None: ... - @overload - def utf8(value: bytes) -> bytes: ... - @overload - def utf8(value: str) -> bytes: ... - - In a non-stub file (i.e. a regular .py file), do the same but - follow it with an implementation. The implementation should *not* - be decorated with @overload. For example: - - @overload - def utf8(value: None) -> None: ... - @overload - def utf8(value: bytes) -> bytes: ... - @overload - def utf8(value: str) -> bytes: ... - def utf8(value): - # implementation goes here - """ - return _overload_dummy - - -# This is not a real generic class. Don't use outside annotations. -if hasattr(typing, 'Type'): - Type = typing.Type -else: - # Internal type variable used for Type[]. - CT_co = typing.TypeVar('CT_co', covariant=True, bound=type) - - class Type(typing.Generic[CT_co], extra=type): - """A special construct usable to annotate class objects. - - For example, suppose we have the following classes:: - - class User: ... # Abstract base for User classes - class BasicUser(User): ... - class ProUser(User): ... - class TeamUser(User): ... - - And a function that takes a class argument that's a subclass of - User and returns an instance of the corresponding class:: - - U = TypeVar('U', bound=User) - def new_user(user_class: Type[U]) -> U: - user = user_class() - # (Here we could write the user object to a database) - return user - joe = new_user(BasicUser) - - At this point the type checker knows that joe has type BasicUser. - """ - - __slots__ = () - - -# Various ABCs mimicking those in collections.abc. -# A few are simply re-exported for completeness. - -def _define_guard(type_name): - """ - Returns True if the given type isn't defined in typing but - is defined in collections_abc. - - Adds the type to __all__ if the collection is found in either - typing or collection_abc. - """ - if hasattr(typing, type_name): - __all__.append(type_name) - globals()[type_name] = getattr(typing, type_name) - return False - elif hasattr(collections_abc, type_name): - __all__.append(type_name) - return True - else: - return False - - -class _ExtensionsGenericMeta(GenericMeta): - def __subclasscheck__(self, subclass): - """This mimics a more modern GenericMeta.__subclasscheck__() logic - (that does not have problems with recursion) to work around interactions - between collections, typing, and typing_extensions on older - versions of Python, see https://github.com/python/typing/issues/501. - """ - if sys.version_info[:3] >= (3, 5, 3) or sys.version_info[:3] < (3, 5, 0): - if self.__origin__ is not None: - if sys._getframe(1).f_globals['__name__'] not in ['abc', 'functools']: - raise TypeError("Parameterized generics cannot be used with class " - "or instance checks") - return False - if not self.__extra__: - return super().__subclasscheck__(subclass) - res = self.__extra__.__subclasshook__(subclass) - if res is not NotImplemented: - return res - if self.__extra__ in subclass.__mro__: - return True - for scls in self.__extra__.__subclasses__(): - if isinstance(scls, GenericMeta): - continue - if issubclass(subclass, scls): - return True - return False - - -if _define_guard('Awaitable'): - class Awaitable(typing.Generic[T_co], metaclass=_ExtensionsGenericMeta, - extra=collections_abc.Awaitable): - __slots__ = () - - -if _define_guard('Coroutine'): - class Coroutine(Awaitable[V_co], typing.Generic[T_co, T_contra, V_co], - metaclass=_ExtensionsGenericMeta, - extra=collections_abc.Coroutine): - __slots__ = () - - -if _define_guard('AsyncIterable'): - class AsyncIterable(typing.Generic[T_co], - metaclass=_ExtensionsGenericMeta, - extra=collections_abc.AsyncIterable): - __slots__ = () - - -if _define_guard('AsyncIterator'): - class AsyncIterator(AsyncIterable[T_co], - metaclass=_ExtensionsGenericMeta, - extra=collections_abc.AsyncIterator): - __slots__ = () - - -if hasattr(typing, 'Deque'): - Deque = typing.Deque -elif _geqv_defined: - class Deque(collections.deque, typing.MutableSequence[T], - metaclass=_ExtensionsGenericMeta, - extra=collections.deque): - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, Deque): - return collections.deque(*args, **kwds) - return _generic_new(collections.deque, cls, *args, **kwds) -else: - class Deque(collections.deque, typing.MutableSequence[T], - metaclass=_ExtensionsGenericMeta, - extra=collections.deque): - __slots__ = () - - def __new__(cls, *args, **kwds): - if cls._gorg is Deque: - return collections.deque(*args, **kwds) - return _generic_new(collections.deque, cls, *args, **kwds) - - -if hasattr(typing, 'ContextManager'): - ContextManager = typing.ContextManager -elif hasattr(contextlib, 'AbstractContextManager'): - class ContextManager(typing.Generic[T_co], - metaclass=_ExtensionsGenericMeta, - extra=contextlib.AbstractContextManager): - __slots__ = () -else: - class ContextManager(typing.Generic[T_co]): - __slots__ = () - - def __enter__(self): - return self - - @abc.abstractmethod - def __exit__(self, exc_type, exc_value, traceback): - return None - - @classmethod - def __subclasshook__(cls, C): - if cls is ContextManager: - # In Python 3.6+, it is possible to set a method to None to - # explicitly indicate that the class does not implement an ABC - # (https://bugs.python.org/issue25958), but we do not support - # that pattern here because this fallback class is only used - # in Python 3.5 and earlier. - if (any("__enter__" in B.__dict__ for B in C.__mro__) and - any("__exit__" in B.__dict__ for B in C.__mro__)): - return True - return NotImplemented - - -if hasattr(typing, 'AsyncContextManager'): - AsyncContextManager = typing.AsyncContextManager - __all__.append('AsyncContextManager') -elif hasattr(contextlib, 'AbstractAsyncContextManager'): - class AsyncContextManager(typing.Generic[T_co], - metaclass=_ExtensionsGenericMeta, - extra=contextlib.AbstractAsyncContextManager): - __slots__ = () - - __all__.append('AsyncContextManager') -elif sys.version_info[:2] >= (3, 5): - exec(""" -class AsyncContextManager(typing.Generic[T_co]): - __slots__ = () - - async def __aenter__(self): - return self - - @abc.abstractmethod - async def __aexit__(self, exc_type, exc_value, traceback): - return None - - @classmethod - def __subclasshook__(cls, C): - if cls is AsyncContextManager: - return _check_methods_in_mro(C, "__aenter__", "__aexit__") - return NotImplemented - -__all__.append('AsyncContextManager') -""") - - -if hasattr(typing, 'DefaultDict'): - DefaultDict = typing.DefaultDict -elif _geqv_defined: - class DefaultDict(collections.defaultdict, typing.MutableMapping[KT, VT], - metaclass=_ExtensionsGenericMeta, - extra=collections.defaultdict): - - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, DefaultDict): - return collections.defaultdict(*args, **kwds) - return _generic_new(collections.defaultdict, cls, *args, **kwds) -else: - class DefaultDict(collections.defaultdict, typing.MutableMapping[KT, VT], - metaclass=_ExtensionsGenericMeta, - extra=collections.defaultdict): - - __slots__ = () - - def __new__(cls, *args, **kwds): - if cls._gorg is DefaultDict: - return collections.defaultdict(*args, **kwds) - return _generic_new(collections.defaultdict, cls, *args, **kwds) - - -if hasattr(typing, 'OrderedDict'): - OrderedDict = typing.OrderedDict -elif (3, 7, 0) <= sys.version_info[:3] < (3, 7, 2): - OrderedDict = typing._alias(collections.OrderedDict, (KT, VT)) -elif _geqv_defined: - class OrderedDict(collections.OrderedDict, typing.MutableMapping[KT, VT], - metaclass=_ExtensionsGenericMeta, - extra=collections.OrderedDict): - - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, OrderedDict): - return collections.OrderedDict(*args, **kwds) - return _generic_new(collections.OrderedDict, cls, *args, **kwds) -else: - class OrderedDict(collections.OrderedDict, typing.MutableMapping[KT, VT], - metaclass=_ExtensionsGenericMeta, - extra=collections.OrderedDict): - - __slots__ = () - - def __new__(cls, *args, **kwds): - if cls._gorg is OrderedDict: - return collections.OrderedDict(*args, **kwds) - return _generic_new(collections.OrderedDict, cls, *args, **kwds) - - -if hasattr(typing, 'Counter'): - Counter = typing.Counter -elif (3, 5, 0) <= sys.version_info[:3] <= (3, 5, 1): - assert _geqv_defined - _TInt = typing.TypeVar('_TInt') - - class _CounterMeta(typing.GenericMeta): - """Metaclass for Counter""" - def __getitem__(self, item): - return super().__getitem__((item, int)) - - class Counter(collections.Counter, - typing.Dict[T, int], - metaclass=_CounterMeta, - extra=collections.Counter): - - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, Counter): - return collections.Counter(*args, **kwds) - return _generic_new(collections.Counter, cls, *args, **kwds) - -elif _geqv_defined: - class Counter(collections.Counter, - typing.Dict[T, int], - metaclass=_ExtensionsGenericMeta, extra=collections.Counter): - - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, Counter): - return collections.Counter(*args, **kwds) - return _generic_new(collections.Counter, cls, *args, **kwds) - -else: - class Counter(collections.Counter, - typing.Dict[T, int], - metaclass=_ExtensionsGenericMeta, extra=collections.Counter): - - __slots__ = () - - def __new__(cls, *args, **kwds): - if cls._gorg is Counter: - return collections.Counter(*args, **kwds) - return _generic_new(collections.Counter, cls, *args, **kwds) - - -if hasattr(typing, 'ChainMap'): - ChainMap = typing.ChainMap - __all__.append('ChainMap') -elif hasattr(collections, 'ChainMap'): - # ChainMap only exists in 3.3+ - if _geqv_defined: - class ChainMap(collections.ChainMap, typing.MutableMapping[KT, VT], - metaclass=_ExtensionsGenericMeta, - extra=collections.ChainMap): - - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, ChainMap): - return collections.ChainMap(*args, **kwds) - return _generic_new(collections.ChainMap, cls, *args, **kwds) - else: - class ChainMap(collections.ChainMap, typing.MutableMapping[KT, VT], - metaclass=_ExtensionsGenericMeta, - extra=collections.ChainMap): - - __slots__ = () - - def __new__(cls, *args, **kwds): - if cls._gorg is ChainMap: - return collections.ChainMap(*args, **kwds) - return _generic_new(collections.ChainMap, cls, *args, **kwds) - - __all__.append('ChainMap') - - -if _define_guard('AsyncGenerator'): - class AsyncGenerator(AsyncIterator[T_co], typing.Generic[T_co, T_contra], - metaclass=_ExtensionsGenericMeta, - extra=collections_abc.AsyncGenerator): - __slots__ = () - - -if hasattr(typing, 'NewType'): - NewType = typing.NewType -else: - def NewType(name, tp): - """NewType creates simple unique types with almost zero - runtime overhead. NewType(name, tp) is considered a subtype of tp - by static type checkers. At runtime, NewType(name, tp) returns - a dummy function that simply returns its argument. Usage:: - - UserId = NewType('UserId', int) - - def name_by_id(user_id: UserId) -> str: - ... - - UserId('user') # Fails type check - - name_by_id(42) # Fails type check - name_by_id(UserId(42)) # OK - - num = UserId(5) + 1 # type: int - """ - - def new_type(x): - return x - - new_type.__name__ = name - new_type.__supertype__ = tp - return new_type - - -if hasattr(typing, 'Text'): - Text = typing.Text -else: - Text = str - - -if hasattr(typing, 'TYPE_CHECKING'): - TYPE_CHECKING = typing.TYPE_CHECKING -else: - # Constant that's True when type checking, but False here. - TYPE_CHECKING = False - - -def _gorg(cls): - """This function exists for compatibility with old typing versions.""" - assert isinstance(cls, GenericMeta) - if hasattr(cls, '_gorg'): - return cls._gorg - while cls.__origin__ is not None: - cls = cls.__origin__ - return cls - - -if OLD_GENERICS: - def _next_in_mro(cls): # noqa - """This function exists for compatibility with old typing versions.""" - next_in_mro = object - for i, c in enumerate(cls.__mro__[:-1]): - if isinstance(c, GenericMeta) and _gorg(c) is Generic: - next_in_mro = cls.__mro__[i + 1] - return next_in_mro - - -_PROTO_WHITELIST = ['Callable', 'Awaitable', - 'Iterable', 'Iterator', 'AsyncIterable', 'AsyncIterator', - 'Hashable', 'Sized', 'Container', 'Collection', 'Reversible', - 'ContextManager', 'AsyncContextManager'] - - -def _get_protocol_attrs(cls): - attrs = set() - for base in cls.__mro__[:-1]: # without object - if base.__name__ in ('Protocol', 'Generic'): - continue - annotations = getattr(base, '__annotations__', {}) - for attr in list(base.__dict__.keys()) + list(annotations.keys()): - if (not attr.startswith('_abc_') and attr not in ( - '__abstractmethods__', '__annotations__', '__weakref__', - '_is_protocol', '_is_runtime_protocol', '__dict__', - '__args__', '__slots__', - '__next_in_mro__', '__parameters__', '__origin__', - '__orig_bases__', '__extra__', '__tree_hash__', - '__doc__', '__subclasshook__', '__init__', '__new__', - '__module__', '_MutableMapping__marker', '_gorg')): - attrs.add(attr) - return attrs - - -def _is_callable_members_only(cls): - return all(callable(getattr(cls, attr, None)) for attr in _get_protocol_attrs(cls)) - - -if hasattr(typing, 'Protocol'): - Protocol = typing.Protocol -elif HAVE_PROTOCOLS and not PEP_560: - - def _no_init(self, *args, **kwargs): - if type(self)._is_protocol: - raise TypeError('Protocols cannot be instantiated') - - class _ProtocolMeta(GenericMeta): - """Internal metaclass for Protocol. - - This exists so Protocol classes can be generic without deriving - from Generic. - """ - if not OLD_GENERICS: - def __new__(cls, name, bases, namespace, - tvars=None, args=None, origin=None, extra=None, orig_bases=None): - # This is just a version copied from GenericMeta.__new__ that - # includes "Protocol" special treatment. (Comments removed for brevity.) - assert extra is None # Protocols should not have extra - if tvars is not None: - assert origin is not None - assert all(isinstance(t, TypeVar) for t in tvars), tvars - else: - tvars = _type_vars(bases) - gvars = None - for base in bases: - if base is Generic: - raise TypeError("Cannot inherit from plain Generic") - if (isinstance(base, GenericMeta) and - base.__origin__ in (Generic, Protocol)): - if gvars is not None: - raise TypeError( - "Cannot inherit from Generic[...] or" - " Protocol[...] multiple times.") - gvars = base.__parameters__ - if gvars is None: - gvars = tvars - else: - tvarset = set(tvars) - gvarset = set(gvars) - if not tvarset <= gvarset: - raise TypeError( - "Some type variables (%s) " - "are not listed in %s[%s]" % - (", ".join(str(t) for t in tvars if t not in gvarset), - "Generic" if any(b.__origin__ is Generic - for b in bases) else "Protocol", - ", ".join(str(g) for g in gvars))) - tvars = gvars - - initial_bases = bases - if (extra is not None and type(extra) is abc.ABCMeta and - extra not in bases): - bases = (extra,) + bases - bases = tuple(_gorg(b) if isinstance(b, GenericMeta) else b - for b in bases) - if any(isinstance(b, GenericMeta) and b is not Generic for b in bases): - bases = tuple(b for b in bases if b is not Generic) - namespace.update({'__origin__': origin, '__extra__': extra}) - self = super(GenericMeta, cls).__new__(cls, name, bases, namespace, - _root=True) - super(GenericMeta, self).__setattr__('_gorg', - self if not origin else - _gorg(origin)) - self.__parameters__ = tvars - self.__args__ = tuple(... if a is _TypingEllipsis else - () if a is _TypingEmpty else - a for a in args) if args else None - self.__next_in_mro__ = _next_in_mro(self) - if orig_bases is None: - self.__orig_bases__ = initial_bases - elif origin is not None: - self._abc_registry = origin._abc_registry - self._abc_cache = origin._abc_cache - if hasattr(self, '_subs_tree'): - self.__tree_hash__ = (hash(self._subs_tree()) if origin else - super(GenericMeta, self).__hash__()) - return self - - def __init__(cls, *args, **kwargs): - super().__init__(*args, **kwargs) - if not cls.__dict__.get('_is_protocol', None): - cls._is_protocol = any(b is Protocol or - isinstance(b, _ProtocolMeta) and - b.__origin__ is Protocol - for b in cls.__bases__) - if cls._is_protocol: - for base in cls.__mro__[1:]: - if not (base in (object, Generic) or - base.__module__ == 'collections.abc' and - base.__name__ in _PROTO_WHITELIST or - isinstance(base, TypingMeta) and base._is_protocol or - isinstance(base, GenericMeta) and - base.__origin__ is Generic): - raise TypeError('Protocols can only inherit from other' - ' protocols, got %r' % base) - - cls.__init__ = _no_init - - def _proto_hook(other): - if not cls.__dict__.get('_is_protocol', None): - return NotImplemented - if not isinstance(other, type): - # Same error as for issubclass(1, int) - raise TypeError('issubclass() arg 1 must be a class') - for attr in _get_protocol_attrs(cls): - for base in other.__mro__: - if attr in base.__dict__: - if base.__dict__[attr] is None: - return NotImplemented - break - annotations = getattr(base, '__annotations__', {}) - if (isinstance(annotations, typing.Mapping) and - attr in annotations and - isinstance(other, _ProtocolMeta) and - other._is_protocol): - break - else: - return NotImplemented - return True - if '__subclasshook__' not in cls.__dict__: - cls.__subclasshook__ = _proto_hook - - def __instancecheck__(self, instance): - # We need this method for situations where attributes are - # assigned in __init__. - if ((not getattr(self, '_is_protocol', False) or - _is_callable_members_only(self)) and - issubclass(instance.__class__, self)): - return True - if self._is_protocol: - if all(hasattr(instance, attr) and - (not callable(getattr(self, attr, None)) or - getattr(instance, attr) is not None) - for attr in _get_protocol_attrs(self)): - return True - return super(GenericMeta, self).__instancecheck__(instance) - - def __subclasscheck__(self, cls): - if self.__origin__ is not None: - if sys._getframe(1).f_globals['__name__'] not in ['abc', 'functools']: - raise TypeError("Parameterized generics cannot be used with class " - "or instance checks") - return False - if (self.__dict__.get('_is_protocol', None) and - not self.__dict__.get('_is_runtime_protocol', None)): - if sys._getframe(1).f_globals['__name__'] in ['abc', - 'functools', - 'typing']: - return False - raise TypeError("Instance and class checks can only be used with" - " @runtime protocols") - if (self.__dict__.get('_is_runtime_protocol', None) and - not _is_callable_members_only(self)): - if sys._getframe(1).f_globals['__name__'] in ['abc', - 'functools', - 'typing']: - return super(GenericMeta, self).__subclasscheck__(cls) - raise TypeError("Protocols with non-method members" - " don't support issubclass()") - return super(GenericMeta, self).__subclasscheck__(cls) - - if not OLD_GENERICS: - @_tp_cache - def __getitem__(self, params): - # We also need to copy this from GenericMeta.__getitem__ to get - # special treatment of "Protocol". (Comments removed for brevity.) - if not isinstance(params, tuple): - params = (params,) - if not params and _gorg(self) is not Tuple: - raise TypeError( - "Parameter list to %s[...] cannot be empty" % self.__qualname__) - msg = "Parameters to generic types must be types." - params = tuple(_type_check(p, msg) for p in params) - if self in (Generic, Protocol): - if not all(isinstance(p, TypeVar) for p in params): - raise TypeError( - "Parameters to %r[...] must all be type variables" % self) - if len(set(params)) != len(params): - raise TypeError( - "Parameters to %r[...] must all be unique" % self) - tvars = params - args = params - elif self in (Tuple, Callable): - tvars = _type_vars(params) - args = params - elif self.__origin__ in (Generic, Protocol): - raise TypeError("Cannot subscript already-subscripted %s" % - repr(self)) - else: - _check_generic(self, params) - tvars = _type_vars(params) - args = params - - prepend = (self,) if self.__origin__ is None else () - return self.__class__(self.__name__, - prepend + self.__bases__, - _no_slots_copy(self.__dict__), - tvars=tvars, - args=args, - origin=self, - extra=self.__extra__, - orig_bases=self.__orig_bases__) - - class Protocol(metaclass=_ProtocolMeta): - """Base class for protocol classes. Protocol classes are defined as:: - - class Proto(Protocol): - def meth(self) -> int: - ... - - Such classes are primarily used with static type checkers that recognize - structural subtyping (static duck-typing), for example:: - - class C: - def meth(self) -> int: - return 0 - - def func(x: Proto) -> int: - return x.meth() - - func(C()) # Passes static type check - - See PEP 544 for details. Protocol classes decorated with - @typing_extensions.runtime act as simple-minded runtime protocol that checks - only the presence of given attributes, ignoring their type signatures. - - Protocol classes can be generic, they are defined as:: - - class GenProto({bases}): - def meth(self) -> T: - ... - """ - __slots__ = () - _is_protocol = True - - def __new__(cls, *args, **kwds): - if _gorg(cls) is Protocol: - raise TypeError("Type Protocol cannot be instantiated; " - "it can be used only as a base class") - if OLD_GENERICS: - return _generic_new(_next_in_mro(cls), cls, *args, **kwds) - return _generic_new(cls.__next_in_mro__, cls, *args, **kwds) - if Protocol.__doc__ is not None: - Protocol.__doc__ = Protocol.__doc__.format(bases="Protocol, Generic[T]" if - OLD_GENERICS else "Protocol[T]") - - -elif PEP_560: - from typing import _type_check, _GenericAlias, _collect_type_vars # noqa - - def _no_init(self, *args, **kwargs): - if type(self)._is_protocol: - raise TypeError('Protocols cannot be instantiated') - - class _ProtocolMeta(abc.ABCMeta): - # This metaclass is a bit unfortunate and exists only because of the lack - # of __instancehook__. - def __instancecheck__(cls, instance): - # We need this method for situations where attributes are - # assigned in __init__. - if ((not getattr(cls, '_is_protocol', False) or - _is_callable_members_only(cls)) and - issubclass(instance.__class__, cls)): - return True - if cls._is_protocol: - if all(hasattr(instance, attr) and - (not callable(getattr(cls, attr, None)) or - getattr(instance, attr) is not None) - for attr in _get_protocol_attrs(cls)): - return True - return super().__instancecheck__(instance) - - class Protocol(metaclass=_ProtocolMeta): - # There is quite a lot of overlapping code with typing.Generic. - # Unfortunately it is hard to avoid this while these live in two different - # modules. The duplicated code will be removed when Protocol is moved to typing. - """Base class for protocol classes. Protocol classes are defined as:: - - class Proto(Protocol): - def meth(self) -> int: - ... - - Such classes are primarily used with static type checkers that recognize - structural subtyping (static duck-typing), for example:: - - class C: - def meth(self) -> int: - return 0 - - def func(x: Proto) -> int: - return x.meth() - - func(C()) # Passes static type check - - See PEP 544 for details. Protocol classes decorated with - @typing_extensions.runtime act as simple-minded runtime protocol that checks - only the presence of given attributes, ignoring their type signatures. - - Protocol classes can be generic, they are defined as:: - - class GenProto(Protocol[T]): - def meth(self) -> T: - ... - """ - __slots__ = () - _is_protocol = True - - def __new__(cls, *args, **kwds): - if cls is Protocol: - raise TypeError("Type Protocol cannot be instantiated; " - "it can only be used as a base class") - return super().__new__(cls) - - @_tp_cache - def __class_getitem__(cls, params): - if not isinstance(params, tuple): - params = (params,) - if not params and cls is not Tuple: - raise TypeError( - "Parameter list to {}[...] cannot be empty".format(cls.__qualname__)) - msg = "Parameters to generic types must be types." - params = tuple(_type_check(p, msg) for p in params) - if cls is Protocol: - # Generic can only be subscripted with unique type variables. - if not all(isinstance(p, TypeVar) for p in params): - i = 0 - while isinstance(params[i], TypeVar): - i += 1 - raise TypeError( - "Parameters to Protocol[...] must all be type variables." - " Parameter {} is {}".format(i + 1, params[i])) - if len(set(params)) != len(params): - raise TypeError( - "Parameters to Protocol[...] must all be unique") - else: - # Subscripting a regular Generic subclass. - _check_generic(cls, params) - return _GenericAlias(cls, params) - - def __init_subclass__(cls, *args, **kwargs): - tvars = [] - if '__orig_bases__' in cls.__dict__: - error = Generic in cls.__orig_bases__ - else: - error = Generic in cls.__bases__ - if error: - raise TypeError("Cannot inherit from plain Generic") - if '__orig_bases__' in cls.__dict__: - tvars = _collect_type_vars(cls.__orig_bases__) - # Look for Generic[T1, ..., Tn] or Protocol[T1, ..., Tn]. - # If found, tvars must be a subset of it. - # If not found, tvars is it. - # Also check for and reject plain Generic, - # and reject multiple Generic[...] and/or Protocol[...]. - gvars = None - for base in cls.__orig_bases__: - if (isinstance(base, _GenericAlias) and - base.__origin__ in (Generic, Protocol)): - # for error messages - the_base = 'Generic' if base.__origin__ is Generic else 'Protocol' - if gvars is not None: - raise TypeError( - "Cannot inherit from Generic[...]" - " and/or Protocol[...] multiple types.") - gvars = base.__parameters__ - if gvars is None: - gvars = tvars - else: - tvarset = set(tvars) - gvarset = set(gvars) - if not tvarset <= gvarset: - s_vars = ', '.join(str(t) for t in tvars if t not in gvarset) - s_args = ', '.join(str(g) for g in gvars) - raise TypeError("Some type variables ({}) are" - " not listed in {}[{}]".format(s_vars, - the_base, s_args)) - tvars = gvars - cls.__parameters__ = tuple(tvars) - - # Determine if this is a protocol or a concrete subclass. - if not cls.__dict__.get('_is_protocol', None): - cls._is_protocol = any(b is Protocol for b in cls.__bases__) - - # Set (or override) the protocol subclass hook. - def _proto_hook(other): - if not cls.__dict__.get('_is_protocol', None): - return NotImplemented - if not getattr(cls, '_is_runtime_protocol', False): - if sys._getframe(2).f_globals['__name__'] in ['abc', 'functools']: - return NotImplemented - raise TypeError("Instance and class checks can only be used with" - " @runtime protocols") - if not _is_callable_members_only(cls): - if sys._getframe(2).f_globals['__name__'] in ['abc', 'functools']: - return NotImplemented - raise TypeError("Protocols with non-method members" - " don't support issubclass()") - if not isinstance(other, type): - # Same error as for issubclass(1, int) - raise TypeError('issubclass() arg 1 must be a class') - for attr in _get_protocol_attrs(cls): - for base in other.__mro__: - if attr in base.__dict__: - if base.__dict__[attr] is None: - return NotImplemented - break - annotations = getattr(base, '__annotations__', {}) - if (isinstance(annotations, typing.Mapping) and - attr in annotations and - isinstance(other, _ProtocolMeta) and - other._is_protocol): - break - else: - return NotImplemented - return True - if '__subclasshook__' not in cls.__dict__: - cls.__subclasshook__ = _proto_hook - - # We have nothing more to do for non-protocols. - if not cls._is_protocol: - return - - # Check consistency of bases. - for base in cls.__bases__: - if not (base in (object, Generic) or - base.__module__ == 'collections.abc' and - base.__name__ in _PROTO_WHITELIST or - isinstance(base, _ProtocolMeta) and base._is_protocol): - raise TypeError('Protocols can only inherit from other' - ' protocols, got %r' % base) - cls.__init__ = _no_init - - -if hasattr(typing, 'runtime_checkable'): - runtime_checkable = typing.runtime_checkable -elif HAVE_PROTOCOLS: - def runtime_checkable(cls): - """Mark a protocol class as a runtime protocol, so that it - can be used with isinstance() and issubclass(). Raise TypeError - if applied to a non-protocol class. - - This allows a simple-minded structural check very similar to the - one-offs in collections.abc such as Hashable. - """ - if not isinstance(cls, _ProtocolMeta) or not cls._is_protocol: - raise TypeError('@runtime_checkable can be only applied to protocol classes,' - ' got %r' % cls) - cls._is_runtime_protocol = True - return cls - - -if HAVE_PROTOCOLS: - # Exists for backwards compatibility. - runtime = runtime_checkable - - -if hasattr(typing, 'SupportsIndex'): - SupportsIndex = typing.SupportsIndex -elif HAVE_PROTOCOLS: - @runtime_checkable - class SupportsIndex(Protocol): - __slots__ = () - - @abc.abstractmethod - def __index__(self) -> int: - pass - - -if sys.version_info >= (3, 9, 2): - # The standard library TypedDict in Python 3.8 does not store runtime information - # about which (if any) keys are optional. See https://bugs.python.org/issue38834 - # The standard library TypedDict in Python 3.9.0/1 does not honour the "total" - # keyword with old-style TypedDict(). See https://bugs.python.org/issue42059 - TypedDict = typing.TypedDict -else: - def _check_fails(cls, other): - try: - if sys._getframe(1).f_globals['__name__'] not in ['abc', - 'functools', - 'typing']: - # Typed dicts are only for static structural subtyping. - raise TypeError('TypedDict does not support instance and class checks') - except (AttributeError, ValueError): - pass - return False - - def _dict_new(*args, **kwargs): - if not args: - raise TypeError('TypedDict.__new__(): not enough arguments') - _, args = args[0], args[1:] # allow the "cls" keyword be passed - return dict(*args, **kwargs) - - _dict_new.__text_signature__ = '($cls, _typename, _fields=None, /, **kwargs)' - - def _typeddict_new(*args, total=True, **kwargs): - if not args: - raise TypeError('TypedDict.__new__(): not enough arguments') - _, args = args[0], args[1:] # allow the "cls" keyword be passed - if args: - typename, args = args[0], args[1:] # allow the "_typename" keyword be passed - elif '_typename' in kwargs: - typename = kwargs.pop('_typename') - import warnings - warnings.warn("Passing '_typename' as keyword argument is deprecated", - DeprecationWarning, stacklevel=2) - else: - raise TypeError("TypedDict.__new__() missing 1 required positional " - "argument: '_typename'") - if args: - try: - fields, = args # allow the "_fields" keyword be passed - except ValueError: - raise TypeError('TypedDict.__new__() takes from 2 to 3 ' - 'positional arguments but {} ' - 'were given'.format(len(args) + 2)) - elif '_fields' in kwargs and len(kwargs) == 1: - fields = kwargs.pop('_fields') - import warnings - warnings.warn("Passing '_fields' as keyword argument is deprecated", - DeprecationWarning, stacklevel=2) - else: - fields = None - - if fields is None: - fields = kwargs - elif kwargs: - raise TypeError("TypedDict takes either a dict or keyword arguments," - " but not both") - - ns = {'__annotations__': dict(fields)} - try: - # Setting correct module is necessary to make typed dict classes pickleable. - ns['__module__'] = sys._getframe(1).f_globals.get('__name__', '__main__') - except (AttributeError, ValueError): - pass - - return _TypedDictMeta(typename, (), ns, total=total) - - _typeddict_new.__text_signature__ = ('($cls, _typename, _fields=None,' - ' /, *, total=True, **kwargs)') - - class _TypedDictMeta(type): - def __init__(cls, name, bases, ns, total=True): - # In Python 3.4 and 3.5 the __init__ method also needs to support the keyword arguments. - # See https://www.python.org/dev/peps/pep-0487/#implementation-details - super(_TypedDictMeta, cls).__init__(name, bases, ns) - - def __new__(cls, name, bases, ns, total=True): - # Create new typed dict class object. - # This method is called directly when TypedDict is subclassed, - # or via _typeddict_new when TypedDict is instantiated. This way - # TypedDict supports all three syntaxes described in its docstring. - # Subclasses and instances of TypedDict return actual dictionaries - # via _dict_new. - ns['__new__'] = _typeddict_new if name == 'TypedDict' else _dict_new - tp_dict = super(_TypedDictMeta, cls).__new__(cls, name, (dict,), ns) - - annotations = {} - own_annotations = ns.get('__annotations__', {}) - own_annotation_keys = set(own_annotations.keys()) - msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type" - own_annotations = { - n: typing._type_check(tp, msg) for n, tp in own_annotations.items() - } - required_keys = set() - optional_keys = set() - - for base in bases: - annotations.update(base.__dict__.get('__annotations__', {})) - required_keys.update(base.__dict__.get('__required_keys__', ())) - optional_keys.update(base.__dict__.get('__optional_keys__', ())) - - annotations.update(own_annotations) - if total: - required_keys.update(own_annotation_keys) - else: - optional_keys.update(own_annotation_keys) - - tp_dict.__annotations__ = annotations - tp_dict.__required_keys__ = frozenset(required_keys) - tp_dict.__optional_keys__ = frozenset(optional_keys) - if not hasattr(tp_dict, '__total__'): - tp_dict.__total__ = total - return tp_dict - - __instancecheck__ = __subclasscheck__ = _check_fails - - TypedDict = _TypedDictMeta('TypedDict', (dict,), {}) - TypedDict.__module__ = __name__ - TypedDict.__doc__ = \ - """A simple typed name space. At runtime it is equivalent to a plain dict. - - TypedDict creates a dictionary type that expects all of its - instances to have a certain set of keys, with each key - associated with a value of a consistent type. This expectation - is not checked at runtime but is only enforced by type checkers. - Usage:: - - class Point2D(TypedDict): - x: int - y: int - label: str - - a: Point2D = {'x': 1, 'y': 2, 'label': 'good'} # OK - b: Point2D = {'z': 3, 'label': 'bad'} # Fails type check - - assert Point2D(x=1, y=2, label='first') == dict(x=1, y=2, label='first') - - The type info can be accessed via the Point2D.__annotations__ dict, and - the Point2D.__required_keys__ and Point2D.__optional_keys__ frozensets. - TypedDict supports two additional equivalent forms:: - - Point2D = TypedDict('Point2D', x=int, y=int, label=str) - Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str}) - - The class syntax is only supported in Python 3.6+, while two other - syntax forms work for Python 2.7 and 3.2+ - """ - - -# Python 3.9+ has PEP 593 (Annotated and modified get_type_hints) -if hasattr(typing, 'Annotated'): - Annotated = typing.Annotated - get_type_hints = typing.get_type_hints - # Not exported and not a public API, but needed for get_origin() and get_args() - # to work. - _AnnotatedAlias = typing._AnnotatedAlias -elif PEP_560: - class _AnnotatedAlias(typing._GenericAlias, _root=True): - """Runtime representation of an annotated type. - - At its core 'Annotated[t, dec1, dec2, ...]' is an alias for the type 't' - with extra annotations. The alias behaves like a normal typing alias, - instantiating is the same as instantiating the underlying type, binding - it to types is also the same. - """ - def __init__(self, origin, metadata): - if isinstance(origin, _AnnotatedAlias): - metadata = origin.__metadata__ + metadata - origin = origin.__origin__ - super().__init__(origin, origin) - self.__metadata__ = metadata - - def copy_with(self, params): - assert len(params) == 1 - new_type = params[0] - return _AnnotatedAlias(new_type, self.__metadata__) - - def __repr__(self): - return "typing_extensions.Annotated[{}, {}]".format( - typing._type_repr(self.__origin__), - ", ".join(repr(a) for a in self.__metadata__) - ) - - def __reduce__(self): - return operator.getitem, ( - Annotated, (self.__origin__,) + self.__metadata__ - ) - - def __eq__(self, other): - if not isinstance(other, _AnnotatedAlias): - return NotImplemented - if self.__origin__ != other.__origin__: - return False - return self.__metadata__ == other.__metadata__ - - def __hash__(self): - return hash((self.__origin__, self.__metadata__)) - - class Annotated: - """Add context specific metadata to a type. - - Example: Annotated[int, runtime_check.Unsigned] indicates to the - hypothetical runtime_check module that this type is an unsigned int. - Every other consumer of this type can ignore this metadata and treat - this type as int. - - The first argument to Annotated must be a valid type (and will be in - the __origin__ field), the remaining arguments are kept as a tuple in - the __extra__ field. - - Details: - - - It's an error to call `Annotated` with less than two arguments. - - Nested Annotated are flattened:: - - Annotated[Annotated[T, Ann1, Ann2], Ann3] == Annotated[T, Ann1, Ann2, Ann3] - - - Instantiating an annotated type is equivalent to instantiating the - underlying type:: - - Annotated[C, Ann1](5) == C(5) - - - Annotated can be used as a generic type alias:: - - Optimized = Annotated[T, runtime.Optimize()] - Optimized[int] == Annotated[int, runtime.Optimize()] - - OptimizedList = Annotated[List[T], runtime.Optimize()] - OptimizedList[int] == Annotated[List[int], runtime.Optimize()] - """ - - __slots__ = () - - def __new__(cls, *args, **kwargs): - raise TypeError("Type Annotated cannot be instantiated.") - - @_tp_cache - def __class_getitem__(cls, params): - if not isinstance(params, tuple) or len(params) < 2: - raise TypeError("Annotated[...] should be used " - "with at least two arguments (a type and an " - "annotation).") - msg = "Annotated[t, ...]: t must be a type." - origin = typing._type_check(params[0], msg) - metadata = tuple(params[1:]) - return _AnnotatedAlias(origin, metadata) - - def __init_subclass__(cls, *args, **kwargs): - raise TypeError( - "Cannot subclass {}.Annotated".format(cls.__module__) - ) - - def _strip_annotations(t): - """Strips the annotations from a given type. - """ - if isinstance(t, _AnnotatedAlias): - return _strip_annotations(t.__origin__) - if isinstance(t, typing._GenericAlias): - stripped_args = tuple(_strip_annotations(a) for a in t.__args__) - if stripped_args == t.__args__: - return t - res = t.copy_with(stripped_args) - res._special = t._special - return res - return t - - def get_type_hints(obj, globalns=None, localns=None, include_extras=False): - """Return type hints for an object. - - This is often the same as obj.__annotations__, but it handles - forward references encoded as string literals, adds Optional[t] if a - default value equal to None is set and recursively replaces all - 'Annotated[T, ...]' with 'T' (unless 'include_extras=True'). - - The argument may be a module, class, method, or function. The annotations - are returned as a dictionary. For classes, annotations include also - inherited members. - - TypeError is raised if the argument is not of a type that can contain - annotations, and an empty dictionary is returned if no annotations are - present. - - BEWARE -- the behavior of globalns and localns is counterintuitive - (unless you are familiar with how eval() and exec() work). The - search order is locals first, then globals. - - - If no dict arguments are passed, an attempt is made to use the - globals from obj (or the respective module's globals for classes), - and these are also used as the locals. If the object does not appear - to have globals, an empty dictionary is used. - - - If one dict argument is passed, it is used for both globals and - locals. - - - If two dict arguments are passed, they specify globals and - locals, respectively. - """ - hint = typing.get_type_hints(obj, globalns=globalns, localns=localns) - if include_extras: - return hint - return {k: _strip_annotations(t) for k, t in hint.items()} - -elif HAVE_ANNOTATED: - - def _is_dunder(name): - """Returns True if name is a __dunder_variable_name__.""" - return len(name) > 4 and name.startswith('__') and name.endswith('__') - - # Prior to Python 3.7 types did not have `copy_with`. A lot of the equality - # checks, argument expansion etc. are done on the _subs_tre. As a result we - # can't provide a get_type_hints function that strips out annotations. - - class AnnotatedMeta(typing.GenericMeta): - """Metaclass for Annotated""" - - def __new__(cls, name, bases, namespace, **kwargs): - if any(b is not object for b in bases): - raise TypeError("Cannot subclass " + str(Annotated)) - return super().__new__(cls, name, bases, namespace, **kwargs) - - @property - def __metadata__(self): - return self._subs_tree()[2] - - def _tree_repr(self, tree): - cls, origin, metadata = tree - if not isinstance(origin, tuple): - tp_repr = typing._type_repr(origin) - else: - tp_repr = origin[0]._tree_repr(origin) - metadata_reprs = ", ".join(repr(arg) for arg in metadata) - return '%s[%s, %s]' % (cls, tp_repr, metadata_reprs) - - def _subs_tree(self, tvars=None, args=None): # noqa - if self is Annotated: - return Annotated - res = super()._subs_tree(tvars=tvars, args=args) - # Flatten nested Annotated - if isinstance(res[1], tuple) and res[1][0] is Annotated: - sub_tp = res[1][1] - sub_annot = res[1][2] - return (Annotated, sub_tp, sub_annot + res[2]) - return res - - def _get_cons(self): - """Return the class used to create instance of this type.""" - if self.__origin__ is None: - raise TypeError("Cannot get the underlying type of a " - "non-specialized Annotated type.") - tree = self._subs_tree() - while isinstance(tree, tuple) and tree[0] is Annotated: - tree = tree[1] - if isinstance(tree, tuple): - return tree[0] - else: - return tree - - @_tp_cache - def __getitem__(self, params): - if not isinstance(params, tuple): - params = (params,) - if self.__origin__ is not None: # specializing an instantiated type - return super().__getitem__(params) - elif not isinstance(params, tuple) or len(params) < 2: - raise TypeError("Annotated[...] should be instantiated " - "with at least two arguments (a type and an " - "annotation).") - else: - msg = "Annotated[t, ...]: t must be a type." - tp = typing._type_check(params[0], msg) - metadata = tuple(params[1:]) - return self.__class__( - self.__name__, - self.__bases__, - _no_slots_copy(self.__dict__), - tvars=_type_vars((tp,)), - # Metadata is a tuple so it won't be touched by _replace_args et al. - args=(tp, metadata), - origin=self, - ) - - def __call__(self, *args, **kwargs): - cons = self._get_cons() - result = cons(*args, **kwargs) - try: - result.__orig_class__ = self - except AttributeError: - pass - return result - - def __getattr__(self, attr): - # For simplicity we just don't relay all dunder names - if self.__origin__ is not None and not _is_dunder(attr): - return getattr(self._get_cons(), attr) - raise AttributeError(attr) - - def __setattr__(self, attr, value): - if _is_dunder(attr) or attr.startswith('_abc_'): - super().__setattr__(attr, value) - elif self.__origin__ is None: - raise AttributeError(attr) - else: - setattr(self._get_cons(), attr, value) - - def __instancecheck__(self, obj): - raise TypeError("Annotated cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("Annotated cannot be used with issubclass().") - - class Annotated(metaclass=AnnotatedMeta): - """Add context specific metadata to a type. - - Example: Annotated[int, runtime_check.Unsigned] indicates to the - hypothetical runtime_check module that this type is an unsigned int. - Every other consumer of this type can ignore this metadata and treat - this type as int. - - The first argument to Annotated must be a valid type, the remaining - arguments are kept as a tuple in the __metadata__ field. - - Details: - - - It's an error to call `Annotated` with less than two arguments. - - Nested Annotated are flattened:: - - Annotated[Annotated[T, Ann1, Ann2], Ann3] == Annotated[T, Ann1, Ann2, Ann3] - - - Instantiating an annotated type is equivalent to instantiating the - underlying type:: - - Annotated[C, Ann1](5) == C(5) - - - Annotated can be used as a generic type alias:: - - Optimized = Annotated[T, runtime.Optimize()] - Optimized[int] == Annotated[int, runtime.Optimize()] - - OptimizedList = Annotated[List[T], runtime.Optimize()] - OptimizedList[int] == Annotated[List[int], runtime.Optimize()] - """ - -# Python 3.8 has get_origin() and get_args() but those implementations aren't -# Annotated-aware, so we can't use those, only Python 3.9 versions will do. -# Similarly, Python 3.9's implementation doesn't support ParamSpecArgs and -# ParamSpecKwargs. -if sys.version_info[:2] >= (3, 10): - get_origin = typing.get_origin - get_args = typing.get_args -elif PEP_560: - from typing import _GenericAlias - try: - # 3.9+ - from typing import _BaseGenericAlias - except ImportError: - _BaseGenericAlias = _GenericAlias - try: - # 3.9+ - from typing import GenericAlias - except ImportError: - GenericAlias = _GenericAlias - - def get_origin(tp): - """Get the unsubscripted version of a type. - - This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar - and Annotated. Return None for unsupported types. Examples:: - - get_origin(Literal[42]) is Literal - get_origin(int) is None - get_origin(ClassVar[int]) is ClassVar - get_origin(Generic) is Generic - get_origin(Generic[T]) is Generic - get_origin(Union[T, int]) is Union - get_origin(List[Tuple[T, T]][int]) == list - get_origin(P.args) is P - """ - if isinstance(tp, _AnnotatedAlias): - return Annotated - if isinstance(tp, (_GenericAlias, GenericAlias, _BaseGenericAlias, - ParamSpecArgs, ParamSpecKwargs)): - return tp.__origin__ - if tp is Generic: - return Generic - return None - - def get_args(tp): - """Get type arguments with all substitutions performed. - - For unions, basic simplifications used by Union constructor are performed. - Examples:: - get_args(Dict[str, int]) == (str, int) - get_args(int) == () - get_args(Union[int, Union[T, int], str][int]) == (int, str) - get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int]) - get_args(Callable[[], T][int]) == ([], int) - """ - if isinstance(tp, _AnnotatedAlias): - return (tp.__origin__,) + tp.__metadata__ - if isinstance(tp, (_GenericAlias, GenericAlias)): - if getattr(tp, "_special", False): - return () - res = tp.__args__ - if get_origin(tp) is collections.abc.Callable and res[0] is not Ellipsis: - res = (list(res[:-1]), res[-1]) - return res - return () - - -if hasattr(typing, 'TypeAlias'): - TypeAlias = typing.TypeAlias -elif sys.version_info[:2] >= (3, 9): - class _TypeAliasForm(typing._SpecialForm, _root=True): - def __repr__(self): - return 'typing_extensions.' + self._name - - @_TypeAliasForm - def TypeAlias(self, parameters): - """Special marker indicating that an assignment should - be recognized as a proper type alias definition by type - checkers. - - For example:: - - Predicate: TypeAlias = Callable[..., bool] - - It's invalid when used anywhere except as in the example above. - """ - raise TypeError("{} is not subscriptable".format(self)) - -elif sys.version_info[:2] >= (3, 7): - class _TypeAliasForm(typing._SpecialForm, _root=True): - def __repr__(self): - return 'typing_extensions.' + self._name - - TypeAlias = _TypeAliasForm('TypeAlias', - doc="""Special marker indicating that an assignment should - be recognized as a proper type alias definition by type - checkers. - - For example:: - - Predicate: TypeAlias = Callable[..., bool] - - It's invalid when used anywhere except as in the example - above.""") - -elif hasattr(typing, '_FinalTypingBase'): - class _TypeAliasMeta(typing.TypingMeta): - """Metaclass for TypeAlias""" - - def __repr__(self): - return 'typing_extensions.TypeAlias' - - class _TypeAliasBase(typing._FinalTypingBase, metaclass=_TypeAliasMeta, _root=True): - """Special marker indicating that an assignment should - be recognized as a proper type alias definition by type - checkers. - - For example:: - - Predicate: TypeAlias = Callable[..., bool] - - It's invalid when used anywhere except as in the example above. - """ - __slots__ = () - - def __instancecheck__(self, obj): - raise TypeError("TypeAlias cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("TypeAlias cannot be used with issubclass().") - - def __repr__(self): - return 'typing_extensions.TypeAlias' - - TypeAlias = _TypeAliasBase(_root=True) -else: - class _TypeAliasMeta(typing.TypingMeta): - """Metaclass for TypeAlias""" - - def __instancecheck__(self, obj): - raise TypeError("TypeAlias cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("TypeAlias cannot be used with issubclass().") - - def __call__(self, *args, **kwargs): - raise TypeError("Cannot instantiate TypeAlias") - - class TypeAlias(metaclass=_TypeAliasMeta, _root=True): - """Special marker indicating that an assignment should - be recognized as a proper type alias definition by type - checkers. - - For example:: - - Predicate: TypeAlias = Callable[..., bool] - - It's invalid when used anywhere except as in the example above. - """ - __slots__ = () - - -# Python 3.10+ has PEP 612 -if hasattr(typing, 'ParamSpecArgs'): - ParamSpecArgs = typing.ParamSpecArgs - ParamSpecKwargs = typing.ParamSpecKwargs -else: - class _Immutable: - """Mixin to indicate that object should not be copied.""" - __slots__ = () - - def __copy__(self): - return self - - def __deepcopy__(self, memo): - return self - - class ParamSpecArgs(_Immutable): - """The args for a ParamSpec object. - - Given a ParamSpec object P, P.args is an instance of ParamSpecArgs. - - ParamSpecArgs objects have a reference back to their ParamSpec: - - P.args.__origin__ is P - - This type is meant for runtime introspection and has no special meaning to - static type checkers. - """ - def __init__(self, origin): - self.__origin__ = origin - - def __repr__(self): - return "{}.args".format(self.__origin__.__name__) - - class ParamSpecKwargs(_Immutable): - """The kwargs for a ParamSpec object. - - Given a ParamSpec object P, P.kwargs is an instance of ParamSpecKwargs. - - ParamSpecKwargs objects have a reference back to their ParamSpec: - - P.kwargs.__origin__ is P - - This type is meant for runtime introspection and has no special meaning to - static type checkers. - """ - def __init__(self, origin): - self.__origin__ = origin - - def __repr__(self): - return "{}.kwargs".format(self.__origin__.__name__) - -if hasattr(typing, 'ParamSpec'): - ParamSpec = typing.ParamSpec -else: - - # Inherits from list as a workaround for Callable checks in Python < 3.9.2. - class ParamSpec(list): - """Parameter specification variable. - - Usage:: - - P = ParamSpec('P') - - Parameter specification variables exist primarily for the benefit of static - type checkers. They are used to forward the parameter types of one - callable to another callable, a pattern commonly found in higher order - functions and decorators. They are only valid when used in ``Concatenate``, - or s the first argument to ``Callable``. In Python 3.10 and higher, - they are also supported in user-defined Generics at runtime. - See class Generic for more information on generic types. An - example for annotating a decorator:: - - T = TypeVar('T') - P = ParamSpec('P') - - def add_logging(f: Callable[P, T]) -> Callable[P, T]: - '''A type-safe decorator to add logging to a function.''' - def inner(*args: P.args, **kwargs: P.kwargs) -> T: - logging.info(f'{f.__name__} was called') - return f(*args, **kwargs) - return inner - - @add_logging - def add_two(x: float, y: float) -> float: - '''Add two numbers together.''' - return x + y - - Parameter specification variables defined with covariant=True or - contravariant=True can be used to declare covariant or contravariant - generic types. These keyword arguments are valid, but their actual semantics - are yet to be decided. See PEP 612 for details. - - Parameter specification variables can be introspected. e.g.: - - P.__name__ == 'T' - P.__bound__ == None - P.__covariant__ == False - P.__contravariant__ == False - - Note that only parameter specification variables defined in global scope can - be pickled. - """ - - @property - def args(self): - return ParamSpecArgs(self) - - @property - def kwargs(self): - return ParamSpecKwargs(self) - - def __init__(self, name, *, bound=None, covariant=False, contravariant=False): - super().__init__([self]) - self.__name__ = name - self.__covariant__ = bool(covariant) - self.__contravariant__ = bool(contravariant) - if bound: - self.__bound__ = typing._type_check(bound, 'Bound must be a type.') - else: - self.__bound__ = None - - # for pickling: - try: - def_mod = sys._getframe(1).f_globals.get('__name__', '__main__') - except (AttributeError, ValueError): - def_mod = None - if def_mod != 'typing_extensions': - self.__module__ = def_mod - - def __repr__(self): - if self.__covariant__: - prefix = '+' - elif self.__contravariant__: - prefix = '-' - else: - prefix = '~' - return prefix + self.__name__ - - def __hash__(self): - return object.__hash__(self) - - def __eq__(self, other): - return self is other - - def __reduce__(self): - return self.__name__ - - # Hack to get typing._type_check to pass. - def __call__(self, *args, **kwargs): - pass - - # Note: Can't fake ParamSpec as a TypeVar to get it to work - # with Generics. ParamSpec isn't an instance of TypeVar in 3.10. - # So encouraging code like isinstance(ParamSpec('P'), TypeVar)) - # will lead to breakage in 3.10. - # This also means no accurate __parameters__ for GenericAliases. - -# Inherits from list as a workaround for Callable checks in Python < 3.9.2. -class _ConcatenateGenericAlias(list): - def __init__(self, origin, args): - super().__init__(args) - self.__origin__ = origin - self.__args__ = args - - def __repr__(self): - _type_repr = typing._type_repr - return '{origin}[{args}]' \ - .format(origin=_type_repr(self.__origin__), - args=', '.join(_type_repr(arg) for arg in self.__args__)) - - def __hash__(self): - return hash((self.__origin__, self.__args__)) - -@_tp_cache -def _concatenate_getitem(self, parameters): - if parameters == (): - raise TypeError("Cannot take a Concatenate of no types.") - if not isinstance(parameters, tuple): - parameters = (parameters,) - if not isinstance(parameters[-1], ParamSpec): - raise TypeError("The last parameter to Concatenate should be a " - "ParamSpec variable.") - msg = "Concatenate[arg, ...]: each arg must be a type." - parameters = tuple(typing._type_check(p, msg) for p in parameters) - return _ConcatenateGenericAlias(self, parameters) - - -if hasattr(typing, 'Concatenate'): - Concatenate = typing.Concatenate - _ConcatenateGenericAlias = typing._ConcatenateGenericAlias # noqa -elif sys.version_info[:2] >= (3, 9): - @_TypeAliasForm - def Concatenate(self, parameters): - """Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a - higher order function which adds, removes or transforms parameters of a - callable. - - For example:: - - Callable[Concatenate[int, P], int] - - See PEP 612 for detailed information. - """ - return _concatenate_getitem(self, parameters) - -elif sys.version_info[:2] >= (3, 7): - class _ConcatenateForm(typing._SpecialForm, _root=True): - def __repr__(self): - return 'typing_extensions.' + self._name - - def __getitem__(self, parameters): - return _concatenate_getitem(self, parameters) - - Concatenate = _ConcatenateForm('Concatenate', - doc="""Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a - higher order function which adds, removes or transforms parameters of a - callable. - - For example:: - - Callable[Concatenate[int, P], int] - - See PEP 612 for detailed information. - """) - -elif hasattr(typing, '_FinalTypingBase'): - class _ConcatenateAliasMeta(typing.TypingMeta): - """Metaclass for Concatenate.""" - - def __repr__(self): - return 'typing_extensions.Concatenate' - - class _ConcatenateAliasBase(typing._FinalTypingBase, - metaclass=_ConcatenateAliasMeta, - _root=True): - """Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a - higher order function which adds, removes or transforms parameters of a - callable. - - For example:: - - Callable[Concatenate[int, P], int] - - See PEP 612 for detailed information. - """ - __slots__ = () - - def __instancecheck__(self, obj): - raise TypeError("Concatenate cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("Concatenate cannot be used with issubclass().") - - def __repr__(self): - return 'typing_extensions.Concatenate' - - def __getitem__(self, parameters): - return _concatenate_getitem(self, parameters) - - Concatenate = _ConcatenateAliasBase(_root=True) -# For 3.5.0 - 3.5.2 -else: - class _ConcatenateAliasMeta(typing.TypingMeta): - """Metaclass for Concatenate.""" - - def __instancecheck__(self, obj): - raise TypeError("TypeAlias cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("TypeAlias cannot be used with issubclass().") - - def __call__(self, *args, **kwargs): - raise TypeError("Cannot instantiate TypeAlias") - - def __getitem__(self, parameters): - return _concatenate_getitem(self, parameters) - - class Concatenate(metaclass=_ConcatenateAliasMeta, _root=True): - """Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a - higher order function which adds, removes or transforms parameters of a - callable. - - For example:: - - Callable[Concatenate[int, P], int] - - See PEP 612 for detailed information. - """ - __slots__ = () - -if hasattr(typing, 'TypeGuard'): - TypeGuard = typing.TypeGuard -elif sys.version_info[:2] >= (3, 9): - class _TypeGuardForm(typing._SpecialForm, _root=True): - def __repr__(self): - return 'typing_extensions.' + self._name - - @_TypeGuardForm - def TypeGuard(self, parameters): - """Special typing form used to annotate the return type of a user-defined - type guard function. ``TypeGuard`` only accepts a single type argument. - At runtime, functions marked this way should return a boolean. - - ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static - type checkers to determine a more precise type of an expression within a - program's code flow. Usually type narrowing is done by analyzing - conditional code flow and applying the narrowing to a block of code. The - conditional expression here is sometimes referred to as a "type guard". - - Sometimes it would be convenient to use a user-defined boolean function - as a type guard. Such a function should use ``TypeGuard[...]`` as its - return type to alert static type checkers to this intention. - - Using ``-> TypeGuard`` tells the static type checker that for a given - function: - - 1. The return value is a boolean. - 2. If the return value is ``True``, the type of its argument - is the type inside ``TypeGuard``. - - For example:: - - def is_str(val: Union[str, float]): - # "isinstance" type guard - if isinstance(val, str): - # Type of ``val`` is narrowed to ``str`` - ... - else: - # Else, type of ``val`` is narrowed to ``float``. - ... - - Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower - form of ``TypeA`` (it can even be a wider form) and this may lead to - type-unsafe results. The main reason is to allow for things like - narrowing ``List[object]`` to ``List[str]`` even though the latter is not - a subtype of the former, since ``List`` is invariant. The responsibility of - writing type-safe type guards is left to the user. - - ``TypeGuard`` also works with type variables. For more information, see - PEP 647 (User-Defined Type Guards). - """ - item = typing._type_check(parameters, '{} accepts only single type.'.format(self)) - return _GenericAlias(self, (item,)) - -elif sys.version_info[:2] >= (3, 7): - class _TypeGuardForm(typing._SpecialForm, _root=True): - - def __repr__(self): - return 'typing_extensions.' + self._name - - def __getitem__(self, parameters): - item = typing._type_check(parameters, - '{} accepts only a single type'.format(self._name)) - return _GenericAlias(self, (item,)) - - TypeGuard = _TypeGuardForm( - 'TypeGuard', - doc="""Special typing form used to annotate the return type of a user-defined - type guard function. ``TypeGuard`` only accepts a single type argument. - At runtime, functions marked this way should return a boolean. - - ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static - type checkers to determine a more precise type of an expression within a - program's code flow. Usually type narrowing is done by analyzing - conditional code flow and applying the narrowing to a block of code. The - conditional expression here is sometimes referred to as a "type guard". - - Sometimes it would be convenient to use a user-defined boolean function - as a type guard. Such a function should use ``TypeGuard[...]`` as its - return type to alert static type checkers to this intention. - - Using ``-> TypeGuard`` tells the static type checker that for a given - function: - - 1. The return value is a boolean. - 2. If the return value is ``True``, the type of its argument - is the type inside ``TypeGuard``. - - For example:: - - def is_str(val: Union[str, float]): - # "isinstance" type guard - if isinstance(val, str): - # Type of ``val`` is narrowed to ``str`` - ... - else: - # Else, type of ``val`` is narrowed to ``float``. - ... - - Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower - form of ``TypeA`` (it can even be a wider form) and this may lead to - type-unsafe results. The main reason is to allow for things like - narrowing ``List[object]`` to ``List[str]`` even though the latter is not - a subtype of the former, since ``List`` is invariant. The responsibility of - writing type-safe type guards is left to the user. - - ``TypeGuard`` also works with type variables. For more information, see - PEP 647 (User-Defined Type Guards). - """) -elif hasattr(typing, '_FinalTypingBase'): - class _TypeGuard(typing._FinalTypingBase, _root=True): - """Special typing form used to annotate the return type of a user-defined - type guard function. ``TypeGuard`` only accepts a single type argument. - At runtime, functions marked this way should return a boolean. - - ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static - type checkers to determine a more precise type of an expression within a - program's code flow. Usually type narrowing is done by analyzing - conditional code flow and applying the narrowing to a block of code. The - conditional expression here is sometimes referred to as a "type guard". - - Sometimes it would be convenient to use a user-defined boolean function - as a type guard. Such a function should use ``TypeGuard[...]`` as its - return type to alert static type checkers to this intention. - - Using ``-> TypeGuard`` tells the static type checker that for a given - function: - - 1. The return value is a boolean. - 2. If the return value is ``True``, the type of its argument - is the type inside ``TypeGuard``. - - For example:: - - def is_str(val: Union[str, float]): - # "isinstance" type guard - if isinstance(val, str): - # Type of ``val`` is narrowed to ``str`` - ... - else: - # Else, type of ``val`` is narrowed to ``float``. - ... - - Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower - form of ``TypeA`` (it can even be a wider form) and this may lead to - type-unsafe results. The main reason is to allow for things like - narrowing ``List[object]`` to ``List[str]`` even though the latter is not - a subtype of the former, since ``List`` is invariant. The responsibility of - writing type-safe type guards is left to the user. - - ``TypeGuard`` also works with type variables. For more information, see - PEP 647 (User-Defined Type Guards). - """ - - __slots__ = ('__type__',) - - def __init__(self, tp=None, **kwds): - self.__type__ = tp - - def __getitem__(self, item): - cls = type(self) - if self.__type__ is None: - return cls(typing._type_check(item, - '{} accepts only a single type.'.format(cls.__name__[1:])), - _root=True) - raise TypeError('{} cannot be further subscripted' - .format(cls.__name__[1:])) - - def _eval_type(self, globalns, localns): - new_tp = typing._eval_type(self.__type__, globalns, localns) - if new_tp == self.__type__: - return self - return type(self)(new_tp, _root=True) - - def __repr__(self): - r = super().__repr__() - if self.__type__ is not None: - r += '[{}]'.format(typing._type_repr(self.__type__)) - return r - - def __hash__(self): - return hash((type(self).__name__, self.__type__)) - - def __eq__(self, other): - if not isinstance(other, _TypeGuard): - return NotImplemented - if self.__type__ is not None: - return self.__type__ == other.__type__ - return self is other - - TypeGuard = _TypeGuard(_root=True) -else: - class _TypeGuardMeta(typing.TypingMeta): - """Metaclass for TypeGuard""" - - def __new__(cls, name, bases, namespace, tp=None, _root=False): - self = super().__new__(cls, name, bases, namespace, _root=_root) - if tp is not None: - self.__type__ = tp - return self - - def __instancecheck__(self, obj): - raise TypeError("TypeGuard cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("TypeGuard cannot be used with issubclass().") - - def __getitem__(self, item): - cls = type(self) - if self.__type__ is not None: - raise TypeError('{} cannot be further subscripted' - .format(cls.__name__[1:])) - - param = typing._type_check( - item, - '{} accepts only single type.'.format(cls.__name__[1:])) - return cls(self.__name__, self.__bases__, - dict(self.__dict__), tp=param, _root=True) - - def _eval_type(self, globalns, localns): - new_tp = typing._eval_type(self.__type__, globalns, localns) - if new_tp == self.__type__: - return self - return type(self)(self.__name__, self.__bases__, - dict(self.__dict__), tp=self.__type__, - _root=True) - - def __repr__(self): - r = super().__repr__() - if self.__type__ is not None: - r += '[{}]'.format(typing._type_repr(self.__type__)) - return r - - def __hash__(self): - return hash((type(self).__name__, self.__type__)) - - def __eq__(self, other): - if not hasattr(other, "__type__"): - return NotImplemented - if self.__type__ is not None: - return self.__type__ == other.__type__ - return self is other - - class TypeGuard(typing.Final, metaclass=_TypeGuardMeta, _root=True): - """Special typing form used to annotate the return type of a user-defined - type guard function. ``TypeGuard`` only accepts a single type argument. - At runtime, functions marked this way should return a boolean. - - ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static - type checkers to determine a more precise type of an expression within a - program's code flow. Usually type narrowing is done by analyzing - conditional code flow and applying the narrowing to a block of code. The - conditional expression here is sometimes referred to as a "type guard". - - Sometimes it would be convenient to use a user-defined boolean function - as a type guard. Such a function should use ``TypeGuard[...]`` as its - return type to alert static type checkers to this intention. - - Using ``-> TypeGuard`` tells the static type checker that for a given - function: - - 1. The return value is a boolean. - 2. If the return value is ``True``, the type of its argument - is the type inside ``TypeGuard``. - - For example:: - - def is_str(val: Union[str, float]): - # "isinstance" type guard - if isinstance(val, str): - # Type of ``val`` is narrowed to ``str`` - ... - else: - # Else, type of ``val`` is narrowed to ``float``. - ... - - Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower - form of ``TypeA`` (it can even be a wider form) and this may lead to - type-unsafe results. The main reason is to allow for things like - narrowing ``List[object]`` to ``List[str]`` even though the latter is not - a subtype of the former, since ``List`` is invariant. The responsibility of - writing type-safe type guards is left to the user. - - ``TypeGuard`` also works with type variables. For more information, see - PEP 647 (User-Defined Type Guards). - """ - __type__ = None diff --git a/third_party/python/virtualenv/__virtualenv__/backports.entry_points_selectable-1.1.0-py2.py3-none-any/backports.entry_points_selectable-1.1.0.dist-info/LICENSE b/third_party/python/virtualenv/__virtualenv__/zipp-3.4.0-py3-none-any/zipp-3.4.0.dist-info/LICENSE similarity index 100% rename from third_party/python/virtualenv/__virtualenv__/backports.entry_points_selectable-1.1.0-py2.py3-none-any/backports.entry_points_selectable-1.1.0.dist-info/LICENSE rename to third_party/python/virtualenv/__virtualenv__/zipp-3.4.0-py3-none-any/zipp-3.4.0.dist-info/LICENSE diff --git a/third_party/python/virtualenv/__virtualenv__/zipp-3.5.0-py3-none-any/zipp-3.5.0.dist-info/METADATA b/third_party/python/virtualenv/__virtualenv__/zipp-3.4.0-py3-none-any/zipp-3.4.0.dist-info/METADATA similarity index 63% rename from third_party/python/virtualenv/__virtualenv__/zipp-3.5.0-py3-none-any/zipp-3.5.0.dist-info/METADATA rename to third_party/python/virtualenv/__virtualenv__/zipp-3.4.0-py3-none-any/zipp-3.4.0.dist-info/METADATA index 79ff30b0339d..9e71c5a8c4f5 100644 --- a/third_party/python/virtualenv/__virtualenv__/zipp-3.5.0-py3-none-any/zipp-3.5.0.dist-info/METADATA +++ b/third_party/python/virtualenv/__virtualenv__/zipp-3.4.0-py3-none-any/zipp-3.4.0.dist-info/METADATA @@ -1,6 +1,6 @@ Metadata-Version: 2.1 Name: zipp -Version: 3.5.0 +Version: 3.4.0 Summary: Backport of pathlib-compatible object wrapper for zip files Home-page: https://github.com/jaraco/zipp Author: Jason R. Coombs @@ -13,21 +13,20 @@ Classifier: License :: OSI Approved :: MIT License Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3 :: Only Requires-Python: >=3.6 -License-File: LICENSE Provides-Extra: docs Requires-Dist: sphinx ; extra == 'docs' -Requires-Dist: jaraco.packaging (>=8.2) ; extra == 'docs' +Requires-Dist: jaraco.packaging (>=3.2) ; extra == 'docs' Requires-Dist: rst.linker (>=1.9) ; extra == 'docs' Provides-Extra: testing -Requires-Dist: pytest (>=4.6) ; extra == 'testing' -Requires-Dist: pytest-checkdocs (>=2.4) ; extra == 'testing' +Requires-Dist: pytest (!=3.7.3,>=3.5) ; extra == 'testing' +Requires-Dist: pytest-checkdocs (>=1.2.3) ; extra == 'testing' Requires-Dist: pytest-flake8 ; extra == 'testing' Requires-Dist: pytest-cov ; extra == 'testing' -Requires-Dist: pytest-enabler (>=1.0.1) ; extra == 'testing' +Requires-Dist: jaraco.test (>=3.2.0) ; extra == 'testing' Requires-Dist: jaraco.itertools ; extra == 'testing' Requires-Dist: func-timeout ; extra == 'testing' -Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy" and python_version < "3.10") and extra == 'testing' -Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy" and python_version < "3.10") and extra == 'testing' +Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing' +Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy") and extra == 'testing' .. image:: https://img.shields.io/pypi/v/zipp.svg :target: `PyPI link`_ @@ -37,22 +36,19 @@ Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy" and pytho .. _PyPI link: https://pypi.org/project/zipp -.. image:: https://github.com/jaraco/zipp/workflows/tests/badge.svg - :target: https://github.com/jaraco/zipp/actions?query=workflow%3A%22tests%22 - :alt: tests +.. image:: https://github.com/jaraco/zipp/workflows/Automated%20Tests/badge.svg + :target: https://github.com/jaraco/zipp/actions?query=workflow%3A%22Automated+Tests%22 + :alt: Automated Tests .. image:: https://img.shields.io/badge/code%20style-black-000000.svg :target: https://github.com/psf/black :alt: Code style: Black -.. .. image:: https://readthedocs.org/projects/skeleton/badge/?version=latest -.. :target: https://skeleton.readthedocs.io/en/latest/?badge=latest - -.. image:: https://img.shields.io/badge/skeleton-2021-informational - :target: https://blog.jaraco.com/skeleton +.. .. image:: https://readthedocs.org/projects/zipp/badge/?version=latest +.. :target: https://zipp.readthedocs.io/en/latest/?badge=latest -A pathlib-compatible Zipfile object wrapper. Official backport of the standard library +A pathlib-compatible Zipfile object wrapper. A backport of the `Path object `_. diff --git a/third_party/python/virtualenv/__virtualenv__/zipp-3.4.0-py3-none-any/zipp-3.4.0.dist-info/RECORD b/third_party/python/virtualenv/__virtualenv__/zipp-3.4.0-py3-none-any/zipp-3.4.0.dist-info/RECORD new file mode 100644 index 000000000000..3c441ec9bd6e --- /dev/null +++ b/third_party/python/virtualenv/__virtualenv__/zipp-3.4.0-py3-none-any/zipp-3.4.0.dist-info/RECORD @@ -0,0 +1,6 @@ +zipp.py,sha256=wMSoYxAIPgYnqJAW0JcAl5sWaIcFc5xk3dNjf6ElGgU,8089 +zipp-3.4.0.dist-info/LICENSE,sha256=2z8CRrH5J48VhFuZ_sR4uLUG63ZIeZNyL4xuJUKF-vg,1050 +zipp-3.4.0.dist-info/METADATA,sha256=noSfks-ReGCmOSTxll7TELBJy0P_yAvVLa0FCFyhMeM,2134 +zipp-3.4.0.dist-info/WHEEL,sha256=EVRjI69F5qVjm_YgqcTXPnTAv3BfSUr0WVAHuSP3Xoo,92 +zipp-3.4.0.dist-info/top_level.txt,sha256=iAbdoSHfaGqBfVb2XuR9JqSQHCoOsOtG6y9C_LSpqFw,5 +zipp-3.4.0.dist-info/RECORD,, diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_metadata-4.6.2-py3-none-any/importlib_metadata-4.6.2.dist-info/WHEEL b/third_party/python/virtualenv/__virtualenv__/zipp-3.4.0-py3-none-any/zipp-3.4.0.dist-info/WHEEL similarity index 65% rename from third_party/python/virtualenv/__virtualenv__/importlib_metadata-4.6.2-py3-none-any/importlib_metadata-4.6.2.dist-info/WHEEL rename to third_party/python/virtualenv/__virtualenv__/zipp-3.4.0-py3-none-any/zipp-3.4.0.dist-info/WHEEL index 385faab0525c..83ff02e961fc 100644 --- a/third_party/python/virtualenv/__virtualenv__/importlib_metadata-4.6.2-py3-none-any/importlib_metadata-4.6.2.dist-info/WHEEL +++ b/third_party/python/virtualenv/__virtualenv__/zipp-3.4.0-py3-none-any/zipp-3.4.0.dist-info/WHEEL @@ -1,5 +1,5 @@ Wheel-Version: 1.0 -Generator: bdist_wheel (0.36.2) +Generator: bdist_wheel (0.35.1) Root-Is-Purelib: true Tag: py3-none-any diff --git a/third_party/python/virtualenv/__virtualenv__/zipp-3.5.0-py3-none-any/zipp-3.5.0.dist-info/top_level.txt b/third_party/python/virtualenv/__virtualenv__/zipp-3.4.0-py3-none-any/zipp-3.4.0.dist-info/top_level.txt similarity index 50% rename from third_party/python/virtualenv/__virtualenv__/zipp-3.5.0-py3-none-any/zipp-3.5.0.dist-info/top_level.txt rename to third_party/python/virtualenv/__virtualenv__/zipp-3.4.0-py3-none-any/zipp-3.4.0.dist-info/top_level.txt index e8bbac6f50af..e82f676f82a3 100644 --- a/third_party/python/virtualenv/__virtualenv__/zipp-3.5.0-py3-none-any/zipp-3.5.0.dist-info/top_level.txt +++ b/third_party/python/virtualenv/__virtualenv__/zipp-3.4.0-py3-none-any/zipp-3.4.0.dist-info/top_level.txt @@ -1,2 +1 @@ -Misc zipp diff --git a/third_party/python/virtualenv/__virtualenv__/zipp-3.5.0-py3-none-any/zipp.py b/third_party/python/virtualenv/__virtualenv__/zipp-3.4.0-py3-none-any/zipp.py similarity index 96% rename from third_party/python/virtualenv/__virtualenv__/zipp-3.5.0-py3-none-any/zipp.py rename to third_party/python/virtualenv/__virtualenv__/zipp-3.4.0-py3-none-any/zipp.py index 69cdaad4a9c5..25ef06e929da 100644 --- a/third_party/python/virtualenv/__virtualenv__/zipp-3.5.0-py3-none-any/zipp.py +++ b/third_party/python/virtualenv/__virtualenv__/zipp-3.4.0-py3-none-any/zipp.py @@ -259,18 +259,6 @@ class Path: def name(self): return pathlib.Path(self.at).name or self.filename.name - @property - def suffix(self): - return pathlib.Path(self.at).suffix or self.filename.suffix - - @property - def suffixes(self): - return pathlib.Path(self.at).suffixes or self.filename.suffixes - - @property - def stem(self): - return pathlib.Path(self.at).stem or self.filename.stem - @property def filename(self): return pathlib.Path(self.root.filename).joinpath(self.at) diff --git a/third_party/python/virtualenv/__virtualenv__/zipp-3.5.0-py3-none-any/Misc/NEWS.d/next/Library/2021-05-14-16-06-02.bpo-44095.v_pLwY.rst b/third_party/python/virtualenv/__virtualenv__/zipp-3.5.0-py3-none-any/Misc/NEWS.d/next/Library/2021-05-14-16-06-02.bpo-44095.v_pLwY.rst deleted file mode 100644 index ee03e933f35d..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/zipp-3.5.0-py3-none-any/Misc/NEWS.d/next/Library/2021-05-14-16-06-02.bpo-44095.v_pLwY.rst +++ /dev/null @@ -1,2 +0,0 @@ -:class:`zipfile.Path` now supports :attr:`zipfile.Path.stem`, -:attr:`zipfile.Path.suffixes`, and :attr:`zipfile.Path.suffix` attributes. diff --git a/third_party/python/virtualenv/__virtualenv__/zipp-3.5.0-py3-none-any/zipp-3.5.0.dist-info/LICENSE b/third_party/python/virtualenv/__virtualenv__/zipp-3.5.0-py3-none-any/zipp-3.5.0.dist-info/LICENSE deleted file mode 100644 index 353924be0e59..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/zipp-3.5.0-py3-none-any/zipp-3.5.0.dist-info/LICENSE +++ /dev/null @@ -1,19 +0,0 @@ -Copyright Jason R. Coombs - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to -deal in the Software without restriction, including without limitation the -rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -sell copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -IN THE SOFTWARE. diff --git a/third_party/python/virtualenv/__virtualenv__/zipp-3.5.0-py3-none-any/zipp-3.5.0.dist-info/RECORD b/third_party/python/virtualenv/__virtualenv__/zipp-3.5.0-py3-none-any/zipp-3.5.0.dist-info/RECORD deleted file mode 100644 index 51869dec1c4d..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/zipp-3.5.0-py3-none-any/zipp-3.5.0.dist-info/RECORD +++ /dev/null @@ -1,7 +0,0 @@ -zipp.py,sha256=U5sG89qrj3m5hM9ZikRv07W7zO8kEpn5iSK-NbOahT8,8404 -Misc/NEWS.d/next/Library/2021-05-14-16-06-02.bpo-44095.v_pLwY.rst,sha256=-G0bwl7-tvBZIg0EF8_cjm55hgyh4Pt1OA5C8mVcmcM,137 -zipp-3.5.0.dist-info/LICENSE,sha256=2z8CRrH5J48VhFuZ_sR4uLUG63ZIeZNyL4xuJUKF-vg,1050 -zipp-3.5.0.dist-info/METADATA,sha256=GVKdMrKTQc149Stxy_Vhace_kQRQCD6GKZB6wn4-t20,2319 -zipp-3.5.0.dist-info/WHEEL,sha256=OqRkF0eY5GHssMorFjlbTIq072vpHpF60fIQA6lS9xA,92 -zipp-3.5.0.dist-info/top_level.txt,sha256=kw8TUG2PPApygEOBqun0FQ31H5vu8AjxfDxlqwAVzAc,10 -zipp-3.5.0.dist-info/RECORD,, diff --git a/third_party/python/virtualenv/__virtualenv__/zipp-3.5.0-py3-none-any/zipp-3.5.0.dist-info/WHEEL b/third_party/python/virtualenv/__virtualenv__/zipp-3.5.0-py3-none-any/zipp-3.5.0.dist-info/WHEEL deleted file mode 100644 index 385faab0525c..000000000000 --- a/third_party/python/virtualenv/__virtualenv__/zipp-3.5.0-py3-none-any/zipp-3.5.0.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.36.2) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/third_party/python/virtualenv/distributions.json b/third_party/python/virtualenv/distributions.json index 7d6f9686657a..7a56846d1cd5 100644 --- a/third_party/python/virtualenv/distributions.json +++ b/third_party/python/virtualenv/distributions.json @@ -1,87 +1,83 @@ { - "3.10": { - "==any": { - "backports.entry_points_selectable": "__virtualenv__/backports.entry_points_selectable-1.1.0-py2.py3-none-any/backports.entry_points_selectable-1.1.0.dist-info", - "distlib": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib-0.3.2.dist-info", - "filelock": "__virtualenv__/filelock-3.0.12-py3-none-any/filelock-3.0.12.dist-info", - "platformdirs": "__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs-2.2.0.dist-info", - "six": "__virtualenv__/six-1.16.0-py2.py3-none-any/six-1.16.0.dist-info" - } - }, "3.9": { "==any": { - "backports.entry_points_selectable": "__virtualenv__/backports.entry_points_selectable-1.1.0-py2.py3-none-any/backports.entry_points_selectable-1.1.0.dist-info", - "distlib": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib-0.3.2.dist-info", + "appdirs": "__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs-1.4.4.dist-info", + "distlib": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib-0.3.1.dist-info", "filelock": "__virtualenv__/filelock-3.0.12-py3-none-any/filelock-3.0.12.dist-info", - "platformdirs": "__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs-2.2.0.dist-info", - "six": "__virtualenv__/six-1.16.0-py2.py3-none-any/six-1.16.0.dist-info" + "six": "__virtualenv__/six-1.15.0-py2.py3-none-any/six-1.15.0.dist-info" } }, "3.8": { "==any": { - "backports.entry_points_selectable": "__virtualenv__/backports.entry_points_selectable-1.1.0-py2.py3-none-any/backports.entry_points_selectable-1.1.0.dist-info", - "distlib": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib-0.3.2.dist-info", + "appdirs": "__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs-1.4.4.dist-info", + "distlib": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib-0.3.1.dist-info", "filelock": "__virtualenv__/filelock-3.0.12-py3-none-any/filelock-3.0.12.dist-info", - "platformdirs": "__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs-2.2.0.dist-info", - "six": "__virtualenv__/six-1.16.0-py2.py3-none-any/six-1.16.0.dist-info" + "six": "__virtualenv__/six-1.15.0-py2.py3-none-any/six-1.15.0.dist-info" } }, "3.7": { "==any": { - "backports.entry_points_selectable": "__virtualenv__/backports.entry_points_selectable-1.1.0-py2.py3-none-any/backports.entry_points_selectable-1.1.0.dist-info", - "distlib": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib-0.3.2.dist-info", + "appdirs": "__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs-1.4.4.dist-info", + "distlib": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib-0.3.1.dist-info", "filelock": "__virtualenv__/filelock-3.0.12-py3-none-any/filelock-3.0.12.dist-info", - "platformdirs": "__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs-2.2.0.dist-info", - "six": "__virtualenv__/six-1.16.0-py2.py3-none-any/six-1.16.0.dist-info", - "importlib_metadata": "__virtualenv__/importlib_metadata-4.6.2-py3-none-any/importlib_metadata-4.6.2.dist-info", - "zipp": "__virtualenv__/zipp-3.5.0-py3-none-any/zipp-3.5.0.dist-info", - "typing_extensions": "__virtualenv__/typing_extensions-3.10.0.0-py3-none-any/typing_extensions-3.10.0.0.dist-info" + "six": "__virtualenv__/six-1.15.0-py2.py3-none-any/six-1.15.0.dist-info", + "importlib_metadata": "__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata-3.1.1.dist-info", + "zipp": "__virtualenv__/zipp-3.4.0-py3-none-any/zipp-3.4.0.dist-info" } }, "3.6": { "==any": { - "backports.entry_points_selectable": "__virtualenv__/backports.entry_points_selectable-1.1.0-py2.py3-none-any/backports.entry_points_selectable-1.1.0.dist-info", - "distlib": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib-0.3.2.dist-info", + "appdirs": "__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs-1.4.4.dist-info", + "distlib": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib-0.3.1.dist-info", "filelock": "__virtualenv__/filelock-3.0.12-py3-none-any/filelock-3.0.12.dist-info", - "platformdirs": "__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs-2.2.0.dist-info", - "six": "__virtualenv__/six-1.16.0-py2.py3-none-any/six-1.16.0.dist-info", - "importlib_metadata": "__virtualenv__/importlib_metadata-4.6.2-py3-none-any/importlib_metadata-4.6.2.dist-info", - "zipp": "__virtualenv__/zipp-3.5.0-py3-none-any/zipp-3.5.0.dist-info", - "typing_extensions": "__virtualenv__/typing_extensions-3.10.0.0-py3-none-any/typing_extensions-3.10.0.0.dist-info", - "importlib_resources": "__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources-5.2.2.dist-info" + "six": "__virtualenv__/six-1.15.0-py2.py3-none-any/six-1.15.0.dist-info", + "importlib_metadata": "__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata-3.1.1.dist-info", + "zipp": "__virtualenv__/zipp-3.4.0-py3-none-any/zipp-3.4.0.dist-info", + "importlib_resources": "__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources-3.3.0.dist-info" } }, "3.5": { "==any": { - "backports.entry_points_selectable": "__virtualenv__/backports.entry_points_selectable-1.1.0-py2.py3-none-any/backports.entry_points_selectable-1.1.0.dist-info", - "distlib": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib-0.3.2.dist-info", + "appdirs": "__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs-1.4.4.dist-info", + "distlib": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib-0.3.1.dist-info", "filelock": "__virtualenv__/filelock-3.0.12-py3-none-any/filelock-3.0.12.dist-info", - "platformdirs": "__virtualenv__/platformdirs-2.0.2-py2.py3-none-any/platformdirs-2.0.2.dist-info", - "six": "__virtualenv__/six-1.16.0-py2.py3-none-any/six-1.16.0.dist-info", + "six": "__virtualenv__/six-1.15.0-py2.py3-none-any/six-1.15.0.dist-info", "importlib_metadata": "__virtualenv__/importlib_metadata-2.1.1-py2.py3-none-any/importlib_metadata-2.1.1.dist-info", "zipp": "__virtualenv__/zipp-1.2.0-py2.py3-none-any/zipp-1.2.0.dist-info", "importlib_resources": "__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources-3.2.1.dist-info" } }, + "3.4": { + "==any": { + "appdirs": "__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs-1.4.4.dist-info", + "distlib": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib-0.3.1.dist-info", + "filelock": "__virtualenv__/filelock-3.0.12-py3-none-any/filelock-3.0.12.dist-info", + "six": "__virtualenv__/six-1.15.0-py2.py3-none-any/six-1.15.0.dist-info", + "importlib_metadata": "__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata-1.1.3.dist-info", + "zipp": "__virtualenv__/zipp-1.2.0-py2.py3-none-any/zipp-1.2.0.dist-info", + "importlib_resources": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources-1.0.2.dist-info", + "typing": "__virtualenv__/typing-3.7.4.1-py3-none-any/typing-3.7.4.1.dist-info", + "pathlib2": "__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2-2.3.5.dist-info", + "scandir": "__virtualenv__/scandir-1.10.0-cp39-cp39-macosx_10_15_x86_64/scandir-1.10.0.dist-info" + } + }, "2.7": { "==any": { - "backports.entry_points_selectable": "__virtualenv__/backports.entry_points_selectable-1.1.0-py2.py3-none-any/backports.entry_points_selectable-1.1.0.dist-info", - "distlib": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib-0.3.2.dist-info", + "appdirs": "__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs-1.4.4.dist-info", + "distlib": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib-0.3.1.dist-info", "filelock": "__virtualenv__/filelock-3.0.12-py3-none-any/filelock-3.0.12.dist-info", - "platformdirs": "__virtualenv__/platformdirs-2.0.2-py2.py3-none-any/platformdirs-2.0.2.dist-info", - "six": "__virtualenv__/six-1.16.0-py2.py3-none-any/six-1.16.0.dist-info", - "importlib_metadata": "__virtualenv__/importlib_metadata-2.1.1-py2.py3-none-any/importlib_metadata-2.1.1.dist-info", + "six": "__virtualenv__/six-1.15.0-py2.py3-none-any/six-1.15.0.dist-info", + "importlib_metadata": "__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata-1.1.3.dist-info", "zipp": "__virtualenv__/zipp-1.2.0-py2.py3-none-any/zipp-1.2.0.dist-info", - "importlib_resources": "__virtualenv__/importlib_resources-3.3.1-py2.py3-none-any/importlib_resources-3.3.1.dist-info", - "pathlib2": "__virtualenv__/pathlib2-2.3.6-py2.py3-none-any/pathlib2-2.3.6.dist-info", - "scandir": "__virtualenv__/scandir-1.10.0-cp39-cp39-linux_x86_64/scandir-1.10.0.dist-info", + "importlib_resources": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources-1.0.2.dist-info", + "typing": "__virtualenv__/typing-3.7.4.3-py2-none-any/typing-3.7.4.3.dist-info", + "pathlib2": "__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2-2.3.5.dist-info", + "scandir": "__virtualenv__/scandir-1.10.0-cp39-cp39-macosx_10_15_x86_64/scandir-1.10.0.dist-info", "contextlib2": "__virtualenv__/contextlib2-0.6.0.post1-py2.py3-none-any/contextlib2-0.6.0.post1.dist-info", - "singledispatch": "__virtualenv__/singledispatch-3.6.2-py2.py3-none-any/singledispatch-3.6.2.dist-info", - "typing": "__virtualenv__/typing-3.10.0.0-py2-none-any/typing-3.10.0.0.dist-info", "configparser": "__virtualenv__/configparser-4.0.2-py2.py3-none-any/configparser-4.0.2.dist-info" }, "!=win32": { - "pathlib2": "__virtualenv__/pathlib2-2.3.6-py2.py3-none-any/pathlib2-2.3.6.dist-info" + "pathlib2": "__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2-2.3.5.dist-info" } } } \ No newline at end of file diff --git a/third_party/python/virtualenv/modules.json b/third_party/python/virtualenv/modules.json index 23f9c5ff96c6..de3c039da733 100644 --- a/third_party/python/virtualenv/modules.json +++ b/third_party/python/virtualenv/modules.json @@ -1,247 +1,158 @@ { - "3.10": { - "==any": { - "backports": "__virtualenv__/backports.entry_points_selectable-1.1.0-py2.py3-none-any/backports/__init__.py", - "backports.entry_points_selectable": "__virtualenv__/backports.entry_points_selectable-1.1.0-py2.py3-none-any/backports/entry_points_selectable.py", - "distlib": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/__init__.py", - "distlib.compat": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/compat.py", - "distlib.database": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/database.py", - "distlib.index": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/index.py", - "distlib.locators": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/locators.py", - "distlib.manifest": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/manifest.py", - "distlib.markers": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/markers.py", - "distlib.metadata": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/metadata.py", - "distlib.resources": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/resources.py", - "distlib.scripts": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/scripts.py", - "distlib.util": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/util.py", - "distlib.version": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/version.py", - "distlib.wheel": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/wheel.py", - "distlib._backport": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/_backport/__init__.py", - "distlib._backport.misc": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/_backport/misc.py", - "distlib._backport.shutil": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/_backport/shutil.py", - "distlib._backport.sysconfig": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/_backport/sysconfig.py", - "distlib._backport.tarfile": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/_backport/tarfile.py", - "filelock": "__virtualenv__/filelock-3.0.12-py3-none-any/filelock.py", - "platformdirs": "__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/__init__.py", - "platformdirs.__main__": "__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/__main__.py", - "platformdirs.android": "__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/android.py", - "platformdirs.api": "__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/api.py", - "platformdirs.macos": "__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/macos.py", - "platformdirs.unix": "__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/unix.py", - "platformdirs.version": "__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/version.py", - "platformdirs.windows": "__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/windows.py", - "six": "__virtualenv__/six-1.16.0-py2.py3-none-any/six.py" - } - }, "3.9": { "==any": { - "backports": "__virtualenv__/backports.entry_points_selectable-1.1.0-py2.py3-none-any/backports/__init__.py", - "backports.entry_points_selectable": "__virtualenv__/backports.entry_points_selectable-1.1.0-py2.py3-none-any/backports/entry_points_selectable.py", - "distlib": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/__init__.py", - "distlib.compat": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/compat.py", - "distlib.database": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/database.py", - "distlib.index": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/index.py", - "distlib.locators": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/locators.py", - "distlib.manifest": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/manifest.py", - "distlib.markers": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/markers.py", - "distlib.metadata": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/metadata.py", - "distlib.resources": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/resources.py", - "distlib.scripts": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/scripts.py", - "distlib.util": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/util.py", - "distlib.version": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/version.py", - "distlib.wheel": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/wheel.py", - "distlib._backport": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/_backport/__init__.py", - "distlib._backport.misc": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/_backport/misc.py", - "distlib._backport.shutil": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/_backport/shutil.py", - "distlib._backport.sysconfig": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/_backport/sysconfig.py", - "distlib._backport.tarfile": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/_backport/tarfile.py", + "appdirs": "__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs.py", + "distlib": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/__init__.py", + "distlib.compat": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/compat.py", + "distlib.database": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/database.py", + "distlib.index": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/index.py", + "distlib.locators": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/locators.py", + "distlib.manifest": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/manifest.py", + "distlib.markers": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/markers.py", + "distlib.metadata": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/metadata.py", + "distlib.resources": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/resources.py", + "distlib.scripts": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/scripts.py", + "distlib.util": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/util.py", + "distlib.version": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/version.py", + "distlib.wheel": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/wheel.py", + "distlib._backport": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/__init__.py", + "distlib._backport.misc": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/misc.py", + "distlib._backport.shutil": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/shutil.py", + "distlib._backport.sysconfig": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/sysconfig.py", + "distlib._backport.tarfile": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/tarfile.py", "filelock": "__virtualenv__/filelock-3.0.12-py3-none-any/filelock.py", - "platformdirs": "__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/__init__.py", - "platformdirs.__main__": "__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/__main__.py", - "platformdirs.android": "__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/android.py", - "platformdirs.api": "__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/api.py", - "platformdirs.macos": "__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/macos.py", - "platformdirs.unix": "__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/unix.py", - "platformdirs.version": "__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/version.py", - "platformdirs.windows": "__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/windows.py", - "six": "__virtualenv__/six-1.16.0-py2.py3-none-any/six.py" + "six": "__virtualenv__/six-1.15.0-py2.py3-none-any/six.py" } }, "3.8": { "==any": { - "backports": "__virtualenv__/backports.entry_points_selectable-1.1.0-py2.py3-none-any/backports/__init__.py", - "backports.entry_points_selectable": "__virtualenv__/backports.entry_points_selectable-1.1.0-py2.py3-none-any/backports/entry_points_selectable.py", - "distlib": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/__init__.py", - "distlib.compat": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/compat.py", - "distlib.database": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/database.py", - "distlib.index": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/index.py", - "distlib.locators": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/locators.py", - "distlib.manifest": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/manifest.py", - "distlib.markers": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/markers.py", - "distlib.metadata": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/metadata.py", - "distlib.resources": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/resources.py", - "distlib.scripts": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/scripts.py", - "distlib.util": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/util.py", - "distlib.version": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/version.py", - "distlib.wheel": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/wheel.py", - "distlib._backport": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/_backport/__init__.py", - "distlib._backport.misc": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/_backport/misc.py", - "distlib._backport.shutil": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/_backport/shutil.py", - "distlib._backport.sysconfig": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/_backport/sysconfig.py", - "distlib._backport.tarfile": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/_backport/tarfile.py", + "appdirs": "__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs.py", + "distlib": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/__init__.py", + "distlib.compat": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/compat.py", + "distlib.database": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/database.py", + "distlib.index": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/index.py", + "distlib.locators": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/locators.py", + "distlib.manifest": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/manifest.py", + "distlib.markers": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/markers.py", + "distlib.metadata": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/metadata.py", + "distlib.resources": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/resources.py", + "distlib.scripts": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/scripts.py", + "distlib.util": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/util.py", + "distlib.version": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/version.py", + "distlib.wheel": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/wheel.py", + "distlib._backport": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/__init__.py", + "distlib._backport.misc": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/misc.py", + "distlib._backport.shutil": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/shutil.py", + "distlib._backport.sysconfig": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/sysconfig.py", + "distlib._backport.tarfile": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/tarfile.py", "filelock": "__virtualenv__/filelock-3.0.12-py3-none-any/filelock.py", - "platformdirs": "__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/__init__.py", - "platformdirs.__main__": "__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/__main__.py", - "platformdirs.android": "__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/android.py", - "platformdirs.api": "__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/api.py", - "platformdirs.macos": "__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/macos.py", - "platformdirs.unix": "__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/unix.py", - "platformdirs.version": "__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/version.py", - "platformdirs.windows": "__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/windows.py", - "six": "__virtualenv__/six-1.16.0-py2.py3-none-any/six.py" + "six": "__virtualenv__/six-1.15.0-py2.py3-none-any/six.py" } }, "3.7": { "==any": { - "backports": "__virtualenv__/backports.entry_points_selectable-1.1.0-py2.py3-none-any/backports/__init__.py", - "backports.entry_points_selectable": "__virtualenv__/backports.entry_points_selectable-1.1.0-py2.py3-none-any/backports/entry_points_selectable.py", - "distlib": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/__init__.py", - "distlib.compat": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/compat.py", - "distlib.database": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/database.py", - "distlib.index": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/index.py", - "distlib.locators": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/locators.py", - "distlib.manifest": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/manifest.py", - "distlib.markers": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/markers.py", - "distlib.metadata": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/metadata.py", - "distlib.resources": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/resources.py", - "distlib.scripts": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/scripts.py", - "distlib.util": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/util.py", - "distlib.version": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/version.py", - "distlib.wheel": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/wheel.py", - "distlib._backport": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/_backport/__init__.py", - "distlib._backport.misc": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/_backport/misc.py", - "distlib._backport.shutil": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/_backport/shutil.py", - "distlib._backport.sysconfig": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/_backport/sysconfig.py", - "distlib._backport.tarfile": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/_backport/tarfile.py", + "appdirs": "__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs.py", + "distlib": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/__init__.py", + "distlib.compat": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/compat.py", + "distlib.database": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/database.py", + "distlib.index": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/index.py", + "distlib.locators": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/locators.py", + "distlib.manifest": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/manifest.py", + "distlib.markers": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/markers.py", + "distlib.metadata": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/metadata.py", + "distlib.resources": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/resources.py", + "distlib.scripts": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/scripts.py", + "distlib.util": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/util.py", + "distlib.version": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/version.py", + "distlib.wheel": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/wheel.py", + "distlib._backport": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/__init__.py", + "distlib._backport.misc": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/misc.py", + "distlib._backport.shutil": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/shutil.py", + "distlib._backport.sysconfig": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/sysconfig.py", + "distlib._backport.tarfile": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/tarfile.py", "filelock": "__virtualenv__/filelock-3.0.12-py3-none-any/filelock.py", - "platformdirs": "__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/__init__.py", - "platformdirs.__main__": "__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/__main__.py", - "platformdirs.android": "__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/android.py", - "platformdirs.api": "__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/api.py", - "platformdirs.macos": "__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/macos.py", - "platformdirs.unix": "__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/unix.py", - "platformdirs.version": "__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/version.py", - "platformdirs.windows": "__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/windows.py", - "six": "__virtualenv__/six-1.16.0-py2.py3-none-any/six.py", - "importlib_metadata": "__virtualenv__/importlib_metadata-4.6.2-py3-none-any/importlib_metadata/__init__.py", - "importlib_metadata._adapters": "__virtualenv__/importlib_metadata-4.6.2-py3-none-any/importlib_metadata/_adapters.py", - "importlib_metadata._collections": "__virtualenv__/importlib_metadata-4.6.2-py3-none-any/importlib_metadata/_collections.py", - "importlib_metadata._compat": "__virtualenv__/importlib_metadata-4.6.2-py3-none-any/importlib_metadata/_compat.py", - "importlib_metadata._functools": "__virtualenv__/importlib_metadata-4.6.2-py3-none-any/importlib_metadata/_functools.py", - "importlib_metadata._itertools": "__virtualenv__/importlib_metadata-4.6.2-py3-none-any/importlib_metadata/_itertools.py", - "importlib_metadata._meta": "__virtualenv__/importlib_metadata-4.6.2-py3-none-any/importlib_metadata/_meta.py", - "importlib_metadata._text": "__virtualenv__/importlib_metadata-4.6.2-py3-none-any/importlib_metadata/_text.py", - "zipp": "__virtualenv__/zipp-3.5.0-py3-none-any/zipp.py", - "typing_extensions": "__virtualenv__/typing_extensions-3.10.0.0-py3-none-any/typing_extensions.py" + "six": "__virtualenv__/six-1.15.0-py2.py3-none-any/six.py", + "importlib_metadata": "__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata/__init__.py", + "importlib_metadata._compat": "__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata/_compat.py", + "zipp": "__virtualenv__/zipp-3.4.0-py3-none-any/zipp.py" } }, "3.6": { "==any": { - "backports": "__virtualenv__/backports.entry_points_selectable-1.1.0-py2.py3-none-any/backports/__init__.py", - "backports.entry_points_selectable": "__virtualenv__/backports.entry_points_selectable-1.1.0-py2.py3-none-any/backports/entry_points_selectable.py", - "distlib": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/__init__.py", - "distlib.compat": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/compat.py", - "distlib.database": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/database.py", - "distlib.index": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/index.py", - "distlib.locators": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/locators.py", - "distlib.manifest": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/manifest.py", - "distlib.markers": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/markers.py", - "distlib.metadata": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/metadata.py", - "distlib.resources": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/resources.py", - "distlib.scripts": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/scripts.py", - "distlib.util": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/util.py", - "distlib.version": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/version.py", - "distlib.wheel": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/wheel.py", - "distlib._backport": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/_backport/__init__.py", - "distlib._backport.misc": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/_backport/misc.py", - "distlib._backport.shutil": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/_backport/shutil.py", - "distlib._backport.sysconfig": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/_backport/sysconfig.py", - "distlib._backport.tarfile": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/_backport/tarfile.py", + "appdirs": "__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs.py", + "distlib": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/__init__.py", + "distlib.compat": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/compat.py", + "distlib.database": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/database.py", + "distlib.index": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/index.py", + "distlib.locators": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/locators.py", + "distlib.manifest": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/manifest.py", + "distlib.markers": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/markers.py", + "distlib.metadata": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/metadata.py", + "distlib.resources": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/resources.py", + "distlib.scripts": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/scripts.py", + "distlib.util": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/util.py", + "distlib.version": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/version.py", + "distlib.wheel": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/wheel.py", + "distlib._backport": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/__init__.py", + "distlib._backport.misc": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/misc.py", + "distlib._backport.shutil": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/shutil.py", + "distlib._backport.sysconfig": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/sysconfig.py", + "distlib._backport.tarfile": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/tarfile.py", "filelock": "__virtualenv__/filelock-3.0.12-py3-none-any/filelock.py", - "platformdirs": "__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/__init__.py", - "platformdirs.__main__": "__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/__main__.py", - "platformdirs.android": "__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/android.py", - "platformdirs.api": "__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/api.py", - "platformdirs.macos": "__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/macos.py", - "platformdirs.unix": "__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/unix.py", - "platformdirs.version": "__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/version.py", - "platformdirs.windows": "__virtualenv__/platformdirs-2.2.0-py3-none-any/platformdirs/windows.py", - "six": "__virtualenv__/six-1.16.0-py2.py3-none-any/six.py", - "importlib_metadata": "__virtualenv__/importlib_metadata-4.6.2-py3-none-any/importlib_metadata/__init__.py", - "importlib_metadata._adapters": "__virtualenv__/importlib_metadata-4.6.2-py3-none-any/importlib_metadata/_adapters.py", - "importlib_metadata._collections": "__virtualenv__/importlib_metadata-4.6.2-py3-none-any/importlib_metadata/_collections.py", - "importlib_metadata._compat": "__virtualenv__/importlib_metadata-4.6.2-py3-none-any/importlib_metadata/_compat.py", - "importlib_metadata._functools": "__virtualenv__/importlib_metadata-4.6.2-py3-none-any/importlib_metadata/_functools.py", - "importlib_metadata._itertools": "__virtualenv__/importlib_metadata-4.6.2-py3-none-any/importlib_metadata/_itertools.py", - "importlib_metadata._meta": "__virtualenv__/importlib_metadata-4.6.2-py3-none-any/importlib_metadata/_meta.py", - "importlib_metadata._text": "__virtualenv__/importlib_metadata-4.6.2-py3-none-any/importlib_metadata/_text.py", - "zipp": "__virtualenv__/zipp-3.5.0-py3-none-any/zipp.py", - "typing_extensions": "__virtualenv__/typing_extensions-3.10.0.0-py3-none-any/typing_extensions.py", - "importlib_resources": "__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources/__init__.py", - "importlib_resources._adapters": "__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources/_adapters.py", - "importlib_resources._common": "__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources/_common.py", - "importlib_resources._compat": "__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources/_compat.py", - "importlib_resources._itertools": "__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources/_itertools.py", - "importlib_resources._legacy": "__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources/_legacy.py", - "importlib_resources.abc": "__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources/abc.py", - "importlib_resources.readers": "__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources/readers.py", - "importlib_resources.simple": "__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources/simple.py", - "importlib_resources.tests": "__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources/tests/__init__.py", - "importlib_resources.tests._compat": "__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources/tests/_compat.py", - "importlib_resources.tests.test_compatibilty_files": "__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources/tests/test_compatibilty_files.py", - "importlib_resources.tests.test_contents": "__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources/tests/test_contents.py", - "importlib_resources.tests.test_files": "__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources/tests/test_files.py", - "importlib_resources.tests.test_open": "__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources/tests/test_open.py", - "importlib_resources.tests.test_path": "__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources/tests/test_path.py", - "importlib_resources.tests.test_read": "__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources/tests/test_read.py", - "importlib_resources.tests.test_reader": "__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources/tests/test_reader.py", - "importlib_resources.tests.test_resource": "__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources/tests/test_resource.py", - "importlib_resources.tests.update-zips": "__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources/tests/update-zips.py", - "importlib_resources.tests.util": "__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources/tests/util.py", - "importlib_resources.tests.data01": "__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources/tests/data01/__init__.py", - "importlib_resources.tests.data01.subdirectory": "__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources/tests/data01/subdirectory/__init__.py", - "importlib_resources.tests.data02": "__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources/tests/data02/__init__.py", - "importlib_resources.tests.data02.one": "__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources/tests/data02/one/__init__.py", - "importlib_resources.tests.data02.two": "__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources/tests/data02/two/__init__.py", - "importlib_resources.tests.zipdata01": "__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources/tests/zipdata01/__init__.py", - "importlib_resources.tests.zipdata02": "__virtualenv__/importlib_resources-5.2.2-py3-none-any/importlib_resources/tests/zipdata02/__init__.py" + "six": "__virtualenv__/six-1.15.0-py2.py3-none-any/six.py", + "importlib_metadata": "__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata/__init__.py", + "importlib_metadata._compat": "__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata/_compat.py", + "zipp": "__virtualenv__/zipp-3.4.0-py3-none-any/zipp.py", + "importlib_resources": "__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/__init__.py", + "importlib_resources._common": "__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/_common.py", + "importlib_resources._compat": "__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/_compat.py", + "importlib_resources._py2": "__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/_py2.py", + "importlib_resources._py3": "__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/_py3.py", + "importlib_resources.abc": "__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/abc.py", + "importlib_resources.readers": "__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/readers.py", + "importlib_resources.trees": "__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/trees.py", + "importlib_resources.tests": "__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/tests/__init__.py", + "importlib_resources.tests._compat": "__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/tests/_compat.py", + "importlib_resources.tests.py27compat": "__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/tests/py27compat.py", + "importlib_resources.tests.test_files": "__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/tests/test_files.py", + "importlib_resources.tests.test_open": "__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/tests/test_open.py", + "importlib_resources.tests.test_path": "__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/tests/test_path.py", + "importlib_resources.tests.test_read": "__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/tests/test_read.py", + "importlib_resources.tests.test_reader": "__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/tests/test_reader.py", + "importlib_resources.tests.test_resource": "__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/tests/test_resource.py", + "importlib_resources.tests.util": "__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/tests/util.py", + "importlib_resources.tests.data01": "__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/tests/data01/__init__.py", + "importlib_resources.tests.data01.subdirectory": "__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/tests/data01/subdirectory/__init__.py", + "importlib_resources.tests.data02": "__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/tests/data02/__init__.py", + "importlib_resources.tests.data02.one": "__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/tests/data02/one/__init__.py", + "importlib_resources.tests.data02.two": "__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/tests/data02/two/__init__.py", + "importlib_resources.tests.zipdata01": "__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/tests/zipdata01/__init__.py", + "importlib_resources.tests.zipdata02": "__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/tests/zipdata02/__init__.py" } }, "3.5": { "==any": { - "backports": "__virtualenv__/backports.entry_points_selectable-1.1.0-py2.py3-none-any/backports/__init__.py", - "backports.entry_points_selectable": "__virtualenv__/backports.entry_points_selectable-1.1.0-py2.py3-none-any/backports/entry_points_selectable.py", - "distlib": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/__init__.py", - "distlib.compat": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/compat.py", - "distlib.database": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/database.py", - "distlib.index": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/index.py", - "distlib.locators": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/locators.py", - "distlib.manifest": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/manifest.py", - "distlib.markers": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/markers.py", - "distlib.metadata": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/metadata.py", - "distlib.resources": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/resources.py", - "distlib.scripts": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/scripts.py", - "distlib.util": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/util.py", - "distlib.version": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/version.py", - "distlib.wheel": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/wheel.py", - "distlib._backport": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/_backport/__init__.py", - "distlib._backport.misc": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/_backport/misc.py", - "distlib._backport.shutil": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/_backport/shutil.py", - "distlib._backport.sysconfig": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/_backport/sysconfig.py", - "distlib._backport.tarfile": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/_backport/tarfile.py", + "appdirs": "__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs.py", + "distlib": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/__init__.py", + "distlib.compat": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/compat.py", + "distlib.database": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/database.py", + "distlib.index": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/index.py", + "distlib.locators": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/locators.py", + "distlib.manifest": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/manifest.py", + "distlib.markers": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/markers.py", + "distlib.metadata": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/metadata.py", + "distlib.resources": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/resources.py", + "distlib.scripts": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/scripts.py", + "distlib.util": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/util.py", + "distlib.version": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/version.py", + "distlib.wheel": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/wheel.py", + "distlib._backport": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/__init__.py", + "distlib._backport.misc": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/misc.py", + "distlib._backport.shutil": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/shutil.py", + "distlib._backport.sysconfig": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/sysconfig.py", + "distlib._backport.tarfile": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/tarfile.py", "filelock": "__virtualenv__/filelock-3.0.12-py3-none-any/filelock.py", - "platformdirs": "__virtualenv__/platformdirs-2.0.2-py2.py3-none-any/platformdirs.py", - "six": "__virtualenv__/six-1.16.0-py2.py3-none-any/six.py", + "six": "__virtualenv__/six-1.15.0-py2.py3-none-any/six.py", "importlib_metadata": "__virtualenv__/importlib_metadata-2.1.1-py2.py3-none-any/importlib_metadata/__init__.py", "importlib_metadata._compat": "__virtualenv__/importlib_metadata-2.1.1-py2.py3-none-any/importlib_metadata/_compat.py", "zipp": "__virtualenv__/zipp-1.2.0-py2.py3-none-any/zipp.py", @@ -272,71 +183,132 @@ "importlib_resources.tests.zipdata02": "__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/tests/zipdata02/__init__.py" } }, + "3.4": { + "==any": { + "appdirs": "__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs.py", + "distlib": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/__init__.py", + "distlib.compat": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/compat.py", + "distlib.database": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/database.py", + "distlib.index": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/index.py", + "distlib.locators": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/locators.py", + "distlib.manifest": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/manifest.py", + "distlib.markers": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/markers.py", + "distlib.metadata": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/metadata.py", + "distlib.resources": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/resources.py", + "distlib.scripts": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/scripts.py", + "distlib.util": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/util.py", + "distlib.version": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/version.py", + "distlib.wheel": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/wheel.py", + "distlib._backport": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/__init__.py", + "distlib._backport.misc": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/misc.py", + "distlib._backport.shutil": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/shutil.py", + "distlib._backport.sysconfig": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/sysconfig.py", + "distlib._backport.tarfile": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/tarfile.py", + "filelock": "__virtualenv__/filelock-3.0.12-py3-none-any/filelock.py", + "six": "__virtualenv__/six-1.15.0-py2.py3-none-any/six.py", + "importlib_metadata": "__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata/__init__.py", + "importlib_metadata._compat": "__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata/_compat.py", + "importlib_metadata.docs": "__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata/docs/__init__.py", + "importlib_metadata.docs.conf": "__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata/docs/conf.py", + "importlib_metadata.tests": "__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata/tests/__init__.py", + "importlib_metadata.tests.fixtures": "__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata/tests/fixtures.py", + "importlib_metadata.tests.test_api": "__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata/tests/test_api.py", + "importlib_metadata.tests.test_integration": "__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata/tests/test_integration.py", + "importlib_metadata.tests.test_main": "__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata/tests/test_main.py", + "importlib_metadata.tests.test_zip": "__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata/tests/test_zip.py", + "importlib_metadata.tests.data": "__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata/tests/data/__init__.py", + "zipp": "__virtualenv__/zipp-1.2.0-py2.py3-none-any/zipp.py", + "importlib_resources": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/__init__.py", + "importlib_resources._compat": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/_compat.py", + "importlib_resources._py2": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/_py2.py", + "importlib_resources._py3": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/_py3.py", + "importlib_resources.abc": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/abc.py", + "importlib_resources.docs.conf": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/docs/conf.py", + "importlib_resources.tests": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/tests/__init__.py", + "importlib_resources.tests.test_open": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/tests/test_open.py", + "importlib_resources.tests.test_path": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/tests/test_path.py", + "importlib_resources.tests.test_read": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/tests/test_read.py", + "importlib_resources.tests.test_resource": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/tests/test_resource.py", + "importlib_resources.tests.util": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/tests/util.py", + "importlib_resources.tests.data01": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/tests/data01/__init__.py", + "importlib_resources.tests.data01.subdirectory": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/tests/data01/subdirectory/__init__.py", + "importlib_resources.tests.data02": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/tests/data02/__init__.py", + "importlib_resources.tests.data02.one": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/tests/data02/one/__init__.py", + "importlib_resources.tests.data02.two": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/tests/data02/two/__init__.py", + "importlib_resources.tests.data03": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/tests/data03/__init__.py", + "importlib_resources.tests.zipdata01": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/tests/zipdata01/__init__.py", + "importlib_resources.tests.zipdata02": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/tests/zipdata02/__init__.py", + "typing": "__virtualenv__/typing-3.7.4.1-py3-none-any/typing.py", + "pathlib2": "__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2/__init__.py", + "scandir": "__virtualenv__/scandir-1.10.0-cp39-cp39-macosx_10_15_x86_64/scandir.py" + } + }, "2.7": { "==any": { - "backports": "__virtualenv__/configparser-4.0.2-py2.py3-none-any/backports/__init__.py", - "backports.entry_points_selectable": "__virtualenv__/backports.entry_points_selectable-1.1.0-py2.py3-none-any/backports/entry_points_selectable.py", - "distlib": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/__init__.py", - "distlib.compat": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/compat.py", - "distlib.database": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/database.py", - "distlib.index": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/index.py", - "distlib.locators": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/locators.py", - "distlib.manifest": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/manifest.py", - "distlib.markers": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/markers.py", - "distlib.metadata": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/metadata.py", - "distlib.resources": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/resources.py", - "distlib.scripts": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/scripts.py", - "distlib.util": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/util.py", - "distlib.version": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/version.py", - "distlib.wheel": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/wheel.py", - "distlib._backport": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/_backport/__init__.py", - "distlib._backport.misc": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/_backport/misc.py", - "distlib._backport.shutil": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/_backport/shutil.py", - "distlib._backport.sysconfig": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/_backport/sysconfig.py", - "distlib._backport.tarfile": "__virtualenv__/distlib-0.3.2-py2.py3-none-any/distlib/_backport/tarfile.py", + "appdirs": "__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs.py", + "distlib": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/__init__.py", + "distlib.compat": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/compat.py", + "distlib.database": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/database.py", + "distlib.index": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/index.py", + "distlib.locators": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/locators.py", + "distlib.manifest": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/manifest.py", + "distlib.markers": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/markers.py", + "distlib.metadata": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/metadata.py", + "distlib.resources": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/resources.py", + "distlib.scripts": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/scripts.py", + "distlib.util": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/util.py", + "distlib.version": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/version.py", + "distlib.wheel": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/wheel.py", + "distlib._backport": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/__init__.py", + "distlib._backport.misc": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/misc.py", + "distlib._backport.shutil": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/shutil.py", + "distlib._backport.sysconfig": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/sysconfig.py", + "distlib._backport.tarfile": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/tarfile.py", "filelock": "__virtualenv__/filelock-3.0.12-py3-none-any/filelock.py", - "platformdirs": "__virtualenv__/platformdirs-2.0.2-py2.py3-none-any/platformdirs.py", - "six": "__virtualenv__/six-1.16.0-py2.py3-none-any/six.py", - "importlib_metadata": "__virtualenv__/importlib_metadata-2.1.1-py2.py3-none-any/importlib_metadata/__init__.py", - "importlib_metadata._compat": "__virtualenv__/importlib_metadata-2.1.1-py2.py3-none-any/importlib_metadata/_compat.py", + "six": "__virtualenv__/six-1.15.0-py2.py3-none-any/six.py", + "importlib_metadata": "__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata/__init__.py", + "importlib_metadata._compat": "__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata/_compat.py", + "importlib_metadata.docs": "__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata/docs/__init__.py", + "importlib_metadata.docs.conf": "__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata/docs/conf.py", + "importlib_metadata.tests": "__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata/tests/__init__.py", + "importlib_metadata.tests.fixtures": "__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata/tests/fixtures.py", + "importlib_metadata.tests.test_api": "__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata/tests/test_api.py", + "importlib_metadata.tests.test_integration": "__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata/tests/test_integration.py", + "importlib_metadata.tests.test_main": "__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata/tests/test_main.py", + "importlib_metadata.tests.test_zip": "__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata/tests/test_zip.py", + "importlib_metadata.tests.data": "__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata/tests/data/__init__.py", "zipp": "__virtualenv__/zipp-1.2.0-py2.py3-none-any/zipp.py", - "importlib_resources": "__virtualenv__/importlib_resources-3.3.1-py2.py3-none-any/importlib_resources/__init__.py", - "importlib_resources._common": "__virtualenv__/importlib_resources-3.3.1-py2.py3-none-any/importlib_resources/_common.py", - "importlib_resources._compat": "__virtualenv__/importlib_resources-3.3.1-py2.py3-none-any/importlib_resources/_compat.py", - "importlib_resources._py2": "__virtualenv__/importlib_resources-3.3.1-py2.py3-none-any/importlib_resources/_py2.py", - "importlib_resources._py3": "__virtualenv__/importlib_resources-3.3.1-py2.py3-none-any/importlib_resources/_py3.py", - "importlib_resources.abc": "__virtualenv__/importlib_resources-3.3.1-py2.py3-none-any/importlib_resources/abc.py", - "importlib_resources.readers": "__virtualenv__/importlib_resources-3.3.1-py2.py3-none-any/importlib_resources/readers.py", - "importlib_resources.trees": "__virtualenv__/importlib_resources-3.3.1-py2.py3-none-any/importlib_resources/trees.py", - "importlib_resources.tests": "__virtualenv__/importlib_resources-3.3.1-py2.py3-none-any/importlib_resources/tests/__init__.py", - "importlib_resources.tests._compat": "__virtualenv__/importlib_resources-3.3.1-py2.py3-none-any/importlib_resources/tests/_compat.py", - "importlib_resources.tests.py27compat": "__virtualenv__/importlib_resources-3.3.1-py2.py3-none-any/importlib_resources/tests/py27compat.py", - "importlib_resources.tests.test_files": "__virtualenv__/importlib_resources-3.3.1-py2.py3-none-any/importlib_resources/tests/test_files.py", - "importlib_resources.tests.test_open": "__virtualenv__/importlib_resources-3.3.1-py2.py3-none-any/importlib_resources/tests/test_open.py", - "importlib_resources.tests.test_path": "__virtualenv__/importlib_resources-3.3.1-py2.py3-none-any/importlib_resources/tests/test_path.py", - "importlib_resources.tests.test_read": "__virtualenv__/importlib_resources-3.3.1-py2.py3-none-any/importlib_resources/tests/test_read.py", - "importlib_resources.tests.test_reader": "__virtualenv__/importlib_resources-3.3.1-py2.py3-none-any/importlib_resources/tests/test_reader.py", - "importlib_resources.tests.test_resource": "__virtualenv__/importlib_resources-3.3.1-py2.py3-none-any/importlib_resources/tests/test_resource.py", - "importlib_resources.tests.util": "__virtualenv__/importlib_resources-3.3.1-py2.py3-none-any/importlib_resources/tests/util.py", - "importlib_resources.tests.data01": "__virtualenv__/importlib_resources-3.3.1-py2.py3-none-any/importlib_resources/tests/data01/__init__.py", - "importlib_resources.tests.data01.subdirectory": "__virtualenv__/importlib_resources-3.3.1-py2.py3-none-any/importlib_resources/tests/data01/subdirectory/__init__.py", - "importlib_resources.tests.data02": "__virtualenv__/importlib_resources-3.3.1-py2.py3-none-any/importlib_resources/tests/data02/__init__.py", - "importlib_resources.tests.data02.one": "__virtualenv__/importlib_resources-3.3.1-py2.py3-none-any/importlib_resources/tests/data02/one/__init__.py", - "importlib_resources.tests.data02.two": "__virtualenv__/importlib_resources-3.3.1-py2.py3-none-any/importlib_resources/tests/data02/two/__init__.py", - "importlib_resources.tests.zipdata01": "__virtualenv__/importlib_resources-3.3.1-py2.py3-none-any/importlib_resources/tests/zipdata01/__init__.py", - "importlib_resources.tests.zipdata02": "__virtualenv__/importlib_resources-3.3.1-py2.py3-none-any/importlib_resources/tests/zipdata02/__init__.py", - "pathlib2": "__virtualenv__/pathlib2-2.3.6-py2.py3-none-any/pathlib2/__init__.py", - "scandir": "__virtualenv__/scandir-1.10.0-cp39-cp39-linux_x86_64/scandir.py", + "importlib_resources": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/__init__.py", + "importlib_resources._compat": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/_compat.py", + "importlib_resources._py2": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/_py2.py", + "importlib_resources._py3": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/_py3.py", + "importlib_resources.abc": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/abc.py", + "importlib_resources.docs.conf": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/docs/conf.py", + "importlib_resources.tests": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/tests/__init__.py", + "importlib_resources.tests.test_open": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/tests/test_open.py", + "importlib_resources.tests.test_path": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/tests/test_path.py", + "importlib_resources.tests.test_read": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/tests/test_read.py", + "importlib_resources.tests.test_resource": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/tests/test_resource.py", + "importlib_resources.tests.util": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/tests/util.py", + "importlib_resources.tests.data01": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/tests/data01/__init__.py", + "importlib_resources.tests.data01.subdirectory": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/tests/data01/subdirectory/__init__.py", + "importlib_resources.tests.data02": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/tests/data02/__init__.py", + "importlib_resources.tests.data02.one": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/tests/data02/one/__init__.py", + "importlib_resources.tests.data02.two": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/tests/data02/two/__init__.py", + "importlib_resources.tests.data03": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/tests/data03/__init__.py", + "importlib_resources.tests.zipdata01": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/tests/zipdata01/__init__.py", + "importlib_resources.tests.zipdata02": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/tests/zipdata02/__init__.py", + "typing": "__virtualenv__/typing-3.7.4.3-py2-none-any/typing.py", + "pathlib2": "__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2/__init__.py", + "scandir": "__virtualenv__/scandir-1.10.0-cp39-cp39-macosx_10_15_x86_64/scandir.py", "contextlib2": "__virtualenv__/contextlib2-0.6.0.post1-py2.py3-none-any/contextlib2.py", - "singledispatch": "__virtualenv__/singledispatch-3.6.2-py2.py3-none-any/singledispatch/__init__.py", - "singledispatch.helpers": "__virtualenv__/singledispatch-3.6.2-py2.py3-none-any/singledispatch/helpers.py", - "typing": "__virtualenv__/typing-3.10.0.0-py2-none-any/typing.py", "configparser": "__virtualenv__/configparser-4.0.2-py2.py3-none-any/configparser.py", + "backports": "__virtualenv__/configparser-4.0.2-py2.py3-none-any/backports/__init__.py", "backports.configparser": "__virtualenv__/configparser-4.0.2-py2.py3-none-any/backports/configparser/__init__.py", "backports.configparser.helpers": "__virtualenv__/configparser-4.0.2-py2.py3-none-any/backports/configparser/helpers.py" }, "!=win32": { - "pathlib2": "__virtualenv__/pathlib2-2.3.6-py2.py3-none-any/pathlib2/__init__.py" + "pathlib2": "__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2/__init__.py" } } } \ No newline at end of file diff --git a/third_party/python/virtualenv/virtualenv-20.7.0.dist-info/LICENSE b/third_party/python/virtualenv/virtualenv-20.2.2.dist-info/LICENSE similarity index 100% rename from third_party/python/virtualenv/virtualenv-20.7.0.dist-info/LICENSE rename to third_party/python/virtualenv/virtualenv-20.2.2.dist-info/LICENSE diff --git a/third_party/python/virtualenv/virtualenv-20.7.0.dist-info/METADATA b/third_party/python/virtualenv/virtualenv-20.2.2.dist-info/METADATA similarity index 90% rename from third_party/python/virtualenv/virtualenv-20.7.0.dist-info/METADATA rename to third_party/python/virtualenv/virtualenv-20.2.2.dist-info/METADATA index f5c3539ce5af..9c0e3ad119b7 100644 --- a/third_party/python/virtualenv/virtualenv-20.7.0.dist-info/METADATA +++ b/third_party/python/virtualenv/virtualenv-20.2.2.dist-info/METADATA @@ -1,6 +1,6 @@ Metadata-Version: 2.1 Name: virtualenv -Version: 20.7.0 +Version: 20.2.2 Summary: Virtual Python Environment builder Home-page: https://virtualenv.pypa.io/ Author: Bernat Gabor @@ -21,24 +21,22 @@ Classifier: Operating System :: POSIX Classifier: Programming Language :: Python :: 2 Classifier: Programming Language :: Python :: 2.7 Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 Classifier: Programming Language :: Python :: 3.5 Classifier: Programming Language :: Python :: 3.6 Classifier: Programming Language :: Python :: 3.7 Classifier: Programming Language :: Python :: 3.8 Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 Classifier: Programming Language :: Python :: Implementation :: CPython Classifier: Programming Language :: Python :: Implementation :: PyPy Classifier: Topic :: Software Development :: Libraries Classifier: Topic :: Software Development :: Testing Classifier: Topic :: Utilities -Requires-Python: !=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7 +Requires-Python: !=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7 Description-Content-Type: text/markdown -License-File: LICENSE -Requires-Dist: backports.entry-points-selectable (>=1.0.4) +Requires-Dist: appdirs (<2,>=1.4.3) Requires-Dist: distlib (<1,>=0.3.1) Requires-Dist: filelock (<4,>=3.0.0) -Requires-Dist: platformdirs (<3,>=2) Requires-Dist: six (<2,>=1.9.0) Requires-Dist: pathlib2 (<3,>=2.3.3) ; python_version < "3.4" and sys_platform != "win32" Requires-Dist: importlib-resources (>=1.0) ; python_version < "3.7" @@ -59,7 +57,9 @@ Requires-Dist: pytest-freezegun (>=0.4.1) ; extra == 'testing' Requires-Dist: pytest-mock (>=2) ; extra == 'testing' Requires-Dist: pytest-randomly (>=1) ; extra == 'testing' Requires-Dist: pytest-timeout (>=1) ; extra == 'testing' +Requires-Dist: pytest-xdist (>=1.31.0) ; extra == 'testing' Requires-Dist: packaging (>=20.0) ; (python_version > "3.4") and extra == 'testing' +Requires-Dist: xonsh (>=0.9.16) ; (python_version > "3.4" and python_version != "3.9") and extra == 'testing' # virtualenv @@ -67,7 +67,7 @@ Requires-Dist: packaging (>=20.0) ; (python_version > "3.4") and extra == 'testi [![PyPI - Implementation](https://img.shields.io/pypi/implementation/virtualenv?style=flat-square)](https://pypi.org/project/virtualenv) [![PyPI - Python Version](https://img.shields.io/pypi/pyversions/virtualenv?style=flat-square)](https://pypi.org/project/virtualenv) [![Documentation](https://readthedocs.org/projects/virtualenv/badge/?version=latest&style=flat-square)](http://virtualenv.pypa.io) -[![Discord](https://img.shields.io/discord/803025117553754132)](https://discord.gg/pypa) +[![Gitter Chat](https://img.shields.io/gitter/room/pypa/virtualenv?color=FF004F&style=flat-square)](https://gitter.im/pypa/virtualenv) [![PyPI - Downloads](https://img.shields.io/pypi/dm/virtualenv?style=flat-square)](https://pypistats.org/packages/virtualenv) [![PyPI - License](https://img.shields.io/pypi/l/virtualenv?style=flat-square)](https://opensource.org/licenses/MIT) [![Build Status](https://github.com/pypa/virtualenv/workflows/check/badge.svg?branch=main&event=push)](https://github.com/pypa/virtualenv/actions?query=workflow%3Acheck) diff --git a/third_party/python/virtualenv/virtualenv-20.7.0.dist-info/RECORD b/third_party/python/virtualenv/virtualenv-20.2.2.dist-info/RECORD similarity index 61% rename from third_party/python/virtualenv/virtualenv-20.7.0.dist-info/RECORD rename to third_party/python/virtualenv/virtualenv-20.2.2.dist-info/RECORD index c1ff93bb7979..b8808400e635 100644 --- a/third_party/python/virtualenv/virtualenv-20.7.0.dist-info/RECORD +++ b/third_party/python/virtualenv/virtualenv-20.2.2.dist-info/RECORD @@ -1,13 +1,13 @@ virtualenv/__init__.py,sha256=SMvpjz4VJ3vJ_yfDDPzJAdi2GJOYd_UBXXuvImO07gk,205 -virtualenv/__main__.py,sha256=ypkUDuc5Q8XVyFpW-wEWgzx0LuAdK1yF2FsCUYlgPEk,2900 +virtualenv/__main__.py,sha256=QMwDqrR4QbhEivl8yoRmAr6G1BY92gr4n1ConcDIxc4,2770 virtualenv/info.py,sha256=-2pI_kyC9fNj5OR8AQWkKjlpOk4_96Lmbco3atYYBdY,1921 virtualenv/report.py,sha256=M2OHHCWdOHZsn74tj1MYYKmaI3QRJF8VA1FZIdkQTMQ,1594 -virtualenv/version.py,sha256=1tb1pU7k-rSQtA7ozrxCgTMrflwymMrJyQ8MVfD1IH4,65 -virtualenv/activation/__init__.py,sha256=3Hl_b1qPTDlVrBCjD_kuEsUVPZfjxhApBIVeY6fk2m4,424 +virtualenv/version.py,sha256=T9L0FIrWWe1IEvi_PNtZQcEIf_WbHAtFeLA1_hwZ07I,65 +virtualenv/activation/__init__.py,sha256=jLIERxJXMnHq2fH49RdWqBoaiASres4CTKMdUJOeos0,480 virtualenv/activation/activator.py,sha256=CXomkRvhzcAeygYlDwQdDjfPyZQG85aBab5GIVQPv2M,1341 virtualenv/activation/via_template.py,sha256=U8LgH-lyTjXIQBUdbd0xOZpXNICpiKhsfpiZwzQg7tU,2372 virtualenv/activation/bash/__init__.py,sha256=7aC1WfvyzgFrIQs13jOuESuAbuiAnTsKkOe0iReRoaE,312 -virtualenv/activation/bash/activate.sh,sha256=JFbq0nOptwP5iZID37HLjPV--M7E1kWGgSkAbo_DcQQ,2176 +virtualenv/activation/bash/activate.sh,sha256=aHia5vyXg2JwymkvRXCp29Aswcg88Mz5UrssXbX9Jjc,2398 virtualenv/activation/batch/__init__.py,sha256=K0gVfwuXV7uoaMDL7moWGCq7uTDzI64giZzQQ8s2qnU,733 virtualenv/activation/batch/activate.bat,sha256=PeQnWWsjvHT-jIWhYI7hbdzkDBZx5UOstnsCmq5PYtw,1031 virtualenv/activation/batch/deactivate.bat,sha256=6OznnO-HC2wnWUN7YAT-bj815zeKMXEPC0keyBYwKUU,510 @@ -22,22 +22,22 @@ virtualenv/activation/python/__init__.py,sha256=Uv53LqOrIT_2dO1FXcUYAnwH1eypG8CJ virtualenv/activation/python/activate_this.py,sha256=Xpz7exdGSjmWk0KfwHLofIpDPUtazNSNGrxT0-5ZG_s,1208 virtualenv/activation/xonsh/__init__.py,sha256=7NUevd5EpHRMZdSyR1KgFTe9QQBO94zZOwFH6MR6zjo,355 virtualenv/activation/xonsh/activate.xsh,sha256=qkKgWfrUjYKrgrmhf45VuBz99EMadtiNU8GMfHZZ7AU,1172 -virtualenv/app_data/__init__.py,sha256=OkJ7iHAF3-j6nDjomzaF3dPKO0Ulrua741V1BM7Igds,1468 +virtualenv/app_data/__init__.py,sha256=nwgqY-Our_SYcDisLfRLmWrTSPytDkjck9-lzg-pOI8,1462 virtualenv/app_data/base.py,sha256=dbS5Maob1-Cqs6EVqTmmbjAGeNYA1iw3pmdgYPWCJak,2129 -virtualenv/app_data/na.py,sha256=N2lR5VV4coM4Lym_E-nW_8Mh3x5-U6Jsq9x4wJn3wBM,1310 +virtualenv/app_data/na.py,sha256=iMRVpCe4m5Q5WM5bC3ee1wYyfkfHvkcQ-8tgIw4druc,1306 virtualenv/app_data/read_only.py,sha256=MD-4Bl2SZZiGw0g8qZy0YLBGZGCuFYXnAEvWboF1PSc,1006 -virtualenv/app_data/via_disk_folder.py,sha256=6sICLoJn7UvDYl-DhEGJ1lj-pqSQy_h1GxJTxeGPUcg,5598 +virtualenv/app_data/via_disk_folder.py,sha256=CdNXQkenyH178MtSs2Ve6uDUs30-oZpkOz_1guTtTz0,5597 virtualenv/app_data/via_tempdir.py,sha256=Z_-PoU7qeZe-idzi3nqys4FX0rfsRgOQ9_7XwX3hxSA,770 virtualenv/config/__init__.py,sha256=8ArZTco6Meo0W9i4dqnwmDO8BJYTaHX7oQx1o06vCm4,57 -virtualenv/config/convert.py,sha256=CWqO9z6j4ifUlAfadtB60xqTZpyAnPT0oDcpNomW7ek,2694 -virtualenv/config/env_var.py,sha256=h1oipwjVqHgelK0YI80q3RvckBibdKtvi2mw-OEMUqI,869 -virtualenv/config/ini.py,sha256=eHN1m4J6L-DvhhQwRjWbXelvYoZoCSSwqCms8gEemi4,2840 +virtualenv/config/convert.py,sha256=WYGjMRKVriZkfTH3z1fI0sDQRZxCxAedqWbOGsaquyg,2693 +virtualenv/config/env_var.py,sha256=48XpOurSLLjMX-kXjvOpZuAoOUP-LvnbotTlmebhhFk,844 +virtualenv/config/ini.py,sha256=neMqXrA6IOkLF_M_MCQWQSeqNm4CT8tj_h3GdbJv1Cg,2783 virtualenv/config/cli/__init__.py,sha256=8ArZTco6Meo0W9i4dqnwmDO8BJYTaHX7oQx1o06vCm4,57 -virtualenv/config/cli/parser.py,sha256=HMbn7rDjM3knXhlJ-HP-79Q6mOOwJBgDuBA_GO842C0,4666 +virtualenv/config/cli/parser.py,sha256=y5IqHccLBqFpocpE75X611nVrP8v394VW94a9GAojvE,4524 virtualenv/create/__init__.py,sha256=8ArZTco6Meo0W9i4dqnwmDO8BJYTaHX7oQx1o06vCm4,57 -virtualenv/create/creator.py,sha256=rvdFhBeKZA0zCKoVFu83Rw7xa4PCPupwQ_kVLx7wjKQ,8928 +virtualenv/create/creator.py,sha256=4jxxEGXCWd6tInT37QNt-13_yDtcIJdPB6EkoYzDkbM,8889 virtualenv/create/debug.py,sha256=ETOke8w4Ib8fiufAHVeOkH3v0zrztljw3WjGvZyE0Mk,3342 -virtualenv/create/describe.py,sha256=m_vJWNSpblQWQKluBiSp3EZrhFOCOOOZ49SXMXvukUA,3540 +virtualenv/create/describe.py,sha256=bm0V2wpFOjdN_MkzZuJAEBSttmi5YGPVwxtwGYU5zQU,3561 virtualenv/create/pyenv_cfg.py,sha256=VsOGfzUpaVCO3J29zrhIeip4jZ4b7llbe45iOQAIRGg,1717 virtualenv/create/via_global_ref/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 virtualenv/create/via_global_ref/_virtualenv.py,sha256=aEuMB5MrpKhKwuWumv5J7wTpK6w9jUGR1FXPCdCT5fw,5662 @@ -50,74 +50,73 @@ virtualenv/create/via_global_ref/builtin/ref.py,sha256=xCTICJhE-OiopBxl6ymo1P1Nq virtualenv/create/via_global_ref/builtin/via_global_self_do.py,sha256=d569fX7fjq5vHvGGXDjo-1Xi__HhqU2xjDJOuYrmGjw,4552 virtualenv/create/via_global_ref/builtin/cpython/__init__.py,sha256=8ArZTco6Meo0W9i4dqnwmDO8BJYTaHX7oQx1o06vCm4,57 virtualenv/create/via_global_ref/builtin/cpython/common.py,sha256=U7EvB9-2DlOQTGrTyPrEcItEbJ1sFBzo1EAOcAIjQ5Q,2392 -virtualenv/create/via_global_ref/builtin/cpython/cpython2.py,sha256=NixgnZITjDP8qBWlnN40lUTeaPNoDUKPSS2ByUBs7Fk,3752 -virtualenv/create/via_global_ref/builtin/cpython/cpython3.py,sha256=jz4mbuhu9BcbcZSwmKBT8eSIvKi6kgkx_V-fuAmtmjc,3312 +virtualenv/create/via_global_ref/builtin/cpython/cpython2.py,sha256=p41H2g6wAqhJzeUU48nH3u05-yWEbwCzhyj4pn8rnm4,3757 +virtualenv/create/via_global_ref/builtin/cpython/cpython3.py,sha256=gguQAhTQb0PH7Xg-G-mgQm5LlhyyW0V0piV3LwI-PeM,3111 virtualenv/create/via_global_ref/builtin/cpython/mac_os.py,sha256=B0Lqgo8geZBSKSpHWUB46lDYRggW4Kg2AZUp3Z7xn9M,12382 virtualenv/create/via_global_ref/builtin/pypy/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 virtualenv/create/via_global_ref/builtin/pypy/common.py,sha256=-t-TZxCTJwpIh_oRsDyv5IilH19jKqJrZa27zWN_8Ws,1816 -virtualenv/create/via_global_ref/builtin/pypy/pypy2.py,sha256=PNNPhPThtR8yrpehQedFiforqBO9MYeOUrWvLPlJR7w,3547 -virtualenv/create/via_global_ref/builtin/pypy/pypy3.py,sha256=RSn-lQEE_rvNR8jvUXSKsvFAC0xg9Qj7TdTfKzm1GZ4,1924 +virtualenv/create/via_global_ref/builtin/pypy/pypy2.py,sha256=bmMY_KJZ1iD_ifq-X9ZBOlOpJ1aN7839qigBgnWRIdA,3535 +virtualenv/create/via_global_ref/builtin/pypy/pypy3.py,sha256=ti6hmOIC4HiTBnEYKytO-d9wH-eLeMoQxQ0kZRhnNrw,1751 virtualenv/create/via_global_ref/builtin/python2/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 virtualenv/create/via_global_ref/builtin/python2/python2.py,sha256=jkJwmkeJVTzwzo95eMIptTfdBA-qmyIqZcpt48iOitU,4276 -virtualenv/create/via_global_ref/builtin/python2/site.py,sha256=4kiq0cs57rbrqtFBsl7MyyGB2G2zjWn-1CHuH8C7gPQ,6903 +virtualenv/create/via_global_ref/builtin/python2/site.py,sha256=4uguJDuWPmB25yBmpsMYKLOnIVXkerck0UO8CP8F2c4,6078 virtualenv/discovery/__init__.py,sha256=8ArZTco6Meo0W9i4dqnwmDO8BJYTaHX7oQx1o06vCm4,57 -virtualenv/discovery/builtin.py,sha256=LYvYqlyfrzULHeAwm4J4B9y4hLJHczNsivMtvsCoX78,6384 -virtualenv/discovery/cached_py_info.py,sha256=Lo74BQutsLR8z2JFKXdX6HizKrP5-sxsPCCVQZRzB8Q,5312 -virtualenv/discovery/discover.py,sha256=dq-yReN-vZHs9ZCBEOHN97KZErE2K26bPZRfVnNESIU,1241 -virtualenv/discovery/py_info.py,sha256=XAU3WB5yUEzPTC1eRwSBsSPrB4ejVJzZdOMZY5Knyds,22778 +virtualenv/discovery/builtin.py,sha256=rB6XaQwuK1HfvJsrla3BoSQUH9QkJnwKHGWBdbK4QGM,5432 +virtualenv/discovery/cached_py_info.py,sha256=l2lELE8YkwKXCNopImY2VjmpHPTawh1d3qmdsXMtkRs,5043 +virtualenv/discovery/discover.py,sha256=evJYn4APkwjNmdolNeIBSHiOudkvN59c5oVYI2Zsjlg,1209 +virtualenv/discovery/py_info.py,sha256=QtZFq0xav1tEpKI5seEJaEOkc_FXer21Gzgl_Ccqy98,21793 virtualenv/discovery/py_spec.py,sha256=wQhLzCfXoSPsAAO9nm5-I2lNolVDux4W2vPSUfJGjlc,4790 -virtualenv/discovery/windows/__init__.py,sha256=9LjYTjiPygcERmWUugyHcv5jHmZSfHO3H2RvwzIvQvU,1200 +virtualenv/discovery/windows/__init__.py,sha256=TPbnzCtRyw47pRVHTO8ikwljNcczxmSLDdWtwasxvQU,1036 virtualenv/discovery/windows/pep514.py,sha256=YYiaJzo-XuMtO78BMFMAudqkeJiLQkFnUTOuQZ5lJz8,5451 -virtualenv/run/__init__.py,sha256=yRu6KoU7JGlyg5zFTa51aiYKIq36tUphxHaHzmSdjbU,6090 +virtualenv/run/__init__.py,sha256=lVIiIq_LoMHUGYkrTSx0tpFG_aYywy_u6GWUReHRcUA,5777 virtualenv/run/session.py,sha256=S4NZiHzij1vp895mN9s9ZwYobJjjdP37QOHCb1o-Ufo,2563 virtualenv/run/plugin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 virtualenv/run/plugin/activators.py,sha256=kmHShj36eHfbnsiAJzX0U5LYvGhe0WkRYjbuKDz6gVM,2117 -virtualenv/run/plugin/base.py,sha256=_lpRytNmPnkzJzzQhCy2LXWp0tJbDVGdZJkfabx8Zwc,1854 +virtualenv/run/plugin/base.py,sha256=-2185C01PaxOG7gnMbWWyZlo24n_FYo5J5_naeNZw8s,1934 virtualenv/run/plugin/creators.py,sha256=PIxJ85KmrQU7lUO-r8Znxbb4lTEzwHggc9lcDqmt2tc,3494 -virtualenv/run/plugin/discovery.py,sha256=M1S8CZPqUsIpJ88FT8CyGB32lRuflEY5pgK_XcCnK60,1156 +virtualenv/run/plugin/discovery.py,sha256=3ykxRvPA1FJMkqsbr2TV0LBRPT5UCFeJdzEHfuEjxRM,1002 virtualenv/run/plugin/seeders.py,sha256=c1mhzu0HNzKdif6YUV35fuAOS0XHFJz3TtccLW5fWG0,1074 virtualenv/seed/__init__.py,sha256=8ArZTco6Meo0W9i4dqnwmDO8BJYTaHX7oQx1o06vCm4,57 -virtualenv/seed/seeder.py,sha256=YAxyIOONsXRJrwzL8GbfSOoZjIMR7fcPREVyE2Tn3iQ,1209 +virtualenv/seed/seeder.py,sha256=DSGE_8Ycj01vj8mkppUBA9h7JG76XsVBMt-5MWlMF6k,1178 virtualenv/seed/embed/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 virtualenv/seed/embed/base_embed.py,sha256=46mWtqWj_MjOQEqMJyosL0RWGL6HwrHAL2r1Jxc9DuI,4182 -virtualenv/seed/embed/pip_invoke.py,sha256=6pRRy3_jmGRKc1L7TDbqoXIHy_2_X6t7p_dF27I4cWs,2167 +virtualenv/seed/embed/pip_invoke.py,sha256=EMVwIeoW15SuorJ8z_-vBxPXwQJLS0ILA0Va9zNoOLI,2127 virtualenv/seed/embed/via_app_data/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -virtualenv/seed/embed/via_app_data/via_app_data.py,sha256=w2Xtd56g1AfntfxNZtfFlg2t8yqrl5zTzvLo2xMiR94,6032 +virtualenv/seed/embed/via_app_data/via_app_data.py,sha256=NkVhEFv4iuKG0qvEg4AAmucMwmQgNaPLB-Syepzgps0,5994 virtualenv/seed/embed/via_app_data/pip_install/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -virtualenv/seed/embed/via_app_data/pip_install/base.py,sha256=MXdES-qXA4ZVyoi2pEETL2iMznnK4FSsBd7q1_hi72c,7040 +virtualenv/seed/embed/via_app_data/pip_install/base.py,sha256=rnR60JzM7G04cPDo2eH-aR8-iQuFXBgHJ2lQnSf0Gfs,6355 virtualenv/seed/embed/via_app_data/pip_install/copy.py,sha256=gG2NePFHOYh-bsCf6TpsaQ_qrYhdBy67k0RWuwHSAwo,1307 virtualenv/seed/embed/via_app_data/pip_install/symlink.py,sha256=wHCpfKobvjjaZLUSwM3FSCblZfiBFw4IQYsxwlfEEu0,2362 virtualenv/seed/wheels/__init__.py,sha256=1J7el7lNjAwGxM4dmricrbVhSbYxs5sPzv9PTx2A6qA,226 -virtualenv/seed/wheels/acquire.py,sha256=SJuKhqJPFC_Sn4lQttI_pAGZUXmO3LGgVaQzvGy6WGo,4472 -virtualenv/seed/wheels/bundle.py,sha256=3tw1CNANa7gvvPR1H-_P-ofsMhluKh81yvNUK4-hphg,1845 -virtualenv/seed/wheels/periodic_update.py,sha256=D9WwkJsB4W-THEyFKHWxa2QYmIROepcYi-VjUwAbKpg,12874 +virtualenv/seed/wheels/acquire.py,sha256=qchqlIynLi2VP2VtdAfVfZJHbUPcLY2Ui5r7Eh-aZz8,4426 +virtualenv/seed/wheels/bundle.py,sha256=W0uVjClv9IBa50jRvPKm0jMwWnrYTEfDny2Z6bw2W7c,1835 +virtualenv/seed/wheels/periodic_update.py,sha256=HNVEuU2OYdWHW7lVO0h3NkpLkC8bu-5R7igJRXBnGDc,12792 virtualenv/seed/wheels/util.py,sha256=Zdo76KEDqbNmM5u9JTuyu5uzEN_fQ4oj6qHOt0h0o1M,3960 -virtualenv/seed/wheels/embed/__init__.py,sha256=mp6C_1FACEDjioxIl5xGVm8M28XF-Cj_1vF5sjtbIlQ,1787 -virtualenv/seed/wheels/embed/pip-20.3.4-py2.py3-none-any.whl,sha256=IXrlFhoOCMD7hzhYgG40eMl3XK_85RaLUOyIXjWMGZ0,1522101 -virtualenv/seed/wheels/embed/pip-21.1.3-py3-none-any.whl,sha256=eMt2BxH-3AcyRlQ4AchNxTd6_-rYMuEDrQIR-ZMDogQ,1548027 -virtualenv/seed/wheels/embed/pip-21.2.2-py3-none-any.whl,sha256=sCqdNF-RPgP94u1BiWaHzBogU8atvhQuwDz_awgnIz0,1562517 +virtualenv/seed/wheels/embed/__init__.py,sha256=CLMKoeveDRyiNAdZjEtD38cepgNXkg65xzFu5OSHEus,1995 +virtualenv/seed/wheels/embed/pip-19.1.1-py2.py3-none-any.whl,sha256=mTE08EdUcbkUUsoCnUOQ3I8pisY6cSgU8QHNG220ZnY,1360957 +virtualenv/seed/wheels/embed/pip-20.3.1-py2.py3-none-any.whl,sha256=Ql55sgk5q7_6djOpEVGogq7cd1ZNkxPjWE6wQWwoxVg,1518513 +virtualenv/seed/wheels/embed/setuptools-43.0.0-py2.py3-none-any.whl,sha256=pn-qUVGe8ozYJhr_DiIbbkw3D4-4utqKo-etiUUZmWM,583228 virtualenv/seed/wheels/embed/setuptools-44.1.1-py2.py3-none-any.whl,sha256=J6cUwJJTE05gpvpoEw94xwN-VWLE8h-PMY8q6QDRUtU,583493 virtualenv/seed/wheels/embed/setuptools-50.3.2-py3-none-any.whl,sha256=LCQqCFb7rX775WDfSnrdkyTzQM9I30NlHpYEkkRmeUo,785194 -virtualenv/seed/wheels/embed/setuptools-57.1.0-py3-none-any.whl,sha256=3a5MG5Ig2vHjK6nU43FN9gGcW1g3VVWb6E_4GZ9-H-M,818939 -virtualenv/seed/wheels/embed/setuptools-57.4.0-py3-none-any.whl,sha256=pJIwl3qmz7nZM2FNL3t5A26ZRcTN11gxY_TpILg0GNY,819017 -virtualenv/seed/wheels/embed/wheel-0.36.2-py2.py3-none-any.whl,sha256=eLWxhfDldjwmyh4yQ3Oq3UkYLKkOgl94U_SyUJIV3A4,35046 +virtualenv/seed/wheels/embed/setuptools-51.0.0-py3-none-any.whl,sha256=jBd5NiFZRcmjfvgJraD6s2UZGVL3oSNhhDK7-sNTxSk,785164 +virtualenv/seed/wheels/embed/wheel-0.33.6-py2.py3-none-any.whl,sha256=9NoXY9O-zy4s2SoUp8kg8PAOyjD93p6pksg2aFufryg,21556 +virtualenv/seed/wheels/embed/wheel-0.36.1-py2.py3-none-any.whl,sha256=kGhk-3IsCrXy-cNbLGXjrzwAlALBCKcJwKyie8LJGHs,34788 virtualenv/util/__init__.py,sha256=om6Hs2lH5igf5lkcSmQFiU7iMZ0Wx4dmSlMc6XW_Llg,199 virtualenv/util/error.py,sha256=SRSZlXvMYQuJwxoUfNhlAyo3VwrAnIsZemSwPOxpjns,352 virtualenv/util/lock.py,sha256=oFa0FcbE_TVDHOol44Mgtfa4D3ZjnVy-HSQx-y7ERKQ,4727 virtualenv/util/six.py,sha256=_8KWXUWi3-AaFmz4LkdyNra-uNuf70vlxwjN7oeRo8g,1463 virtualenv/util/zipapp.py,sha256=jtf4Vn7XBnjPs_B_ObIQv_x4pFlIlPKAWHYLFV59h6U,1054 -virtualenv/util/path/__init__.py,sha256=2aA04ZAJ53nk3ZKPxvL_DvOwismyH8r_WCujrObxT5o,401 +virtualenv/util/path/__init__.py,sha256=YaBAxtzGBdMu0uUtppe0ZeCHw5HhO-5zjeb3-fzyMoI,336 virtualenv/util/path/_permission.py,sha256=XpO2vGAk_92_biD4MEQcAQq2Zc8_rpm3M3n_hMUA1rw,745 virtualenv/util/path/_sync.py,sha256=rheUrGsCqmhMwNs-uc5rDthNSUlsOrBJPoK8KZj3O1o,2393 -virtualenv/util/path/_win.py,sha256=cxXH1z5pyqwOlcq8PmRC38ASfB1QZjEyPEF3aQmXRb8,709 -virtualenv/util/path/_pathlib/__init__.py,sha256=XqzX7bagsBo3dNdzW8TTlkNzQf-pjSxvWbVPFSYUvb0,340 +virtualenv/util/path/_pathlib/__init__.py,sha256=FjKCi8scB5MnHg2fLX5REoE0bOPkMXqpBEILVTeJZGQ,2130 virtualenv/util/path/_pathlib/via_os_path.py,sha256=fYDFAX483zVvC9hAOAC9FYtrGdZethS0vtYtKsL5r-s,3772 virtualenv/util/subprocess/__init__.py,sha256=1UmFrdBv2sVeUfZbDcO2yZpe28AE0ULOu9dRKlpJaa0,801 -virtualenv/util/subprocess/_win_subprocess.py,sha256=nnYCcATKY_5ektDgGlk6OTdDQNyF_onbpfzCf13J5Qs,5697 -virtualenv-20.7.0.dist-info/LICENSE,sha256=XBWRk3jFsqqrexnOpw2M3HX3aHnjJFTkwDmfi3HRcek,1074 -virtualenv-20.7.0.dist-info/METADATA,sha256=dhxbc0FJy4pcp6b8Y4pafU9h4tQgbXYdLDiREBwsDIA,4840 -virtualenv-20.7.0.dist-info/WHEEL,sha256=Z-nyYpwrcSqxfdux5Mbn_DQ525iP7J2DG3JgGvOYyTQ,110 -virtualenv-20.7.0.dist-info/entry_points.txt,sha256=T4j5uph6UQbyUwjOEF1tH719zxcyqXnLdOHD3dG64yE,1564 -virtualenv-20.7.0.dist-info/top_level.txt,sha256=JV-LVlC8YeIw1DgiYI0hEot7tgFy5IWdKVcSG7NyzaI,11 -virtualenv-20.7.0.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 -virtualenv-20.7.0.dist-info/RECORD,, +virtualenv/util/subprocess/_win_subprocess.py,sha256=SChkXAKVbpehyrHod1ld76RSdTIalrgME1rtz5jUfm0,5655 +virtualenv-20.2.2.dist-info/LICENSE,sha256=XBWRk3jFsqqrexnOpw2M3HX3aHnjJFTkwDmfi3HRcek,1074 +virtualenv-20.2.2.dist-info/METADATA,sha256=OWyC_GXU3AvST-YiGhmI2iE4ntdcBm-6Q1yCaU9Bx_U,4965 +virtualenv-20.2.2.dist-info/WHEEL,sha256=oh0NKYrTcu1i1-wgrI1cnhkjYIi8WJ-8qd9Jrr5_y4E,110 +virtualenv-20.2.2.dist-info/entry_points.txt,sha256=1DALKzYOcffJa7Q15TQlMQu0yeFXEy5W124y0aJEfYU,1615 +virtualenv-20.2.2.dist-info/top_level.txt,sha256=JV-LVlC8YeIw1DgiYI0hEot7tgFy5IWdKVcSG7NyzaI,11 +virtualenv-20.2.2.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 +virtualenv-20.2.2.dist-info/RECORD,, diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.1-py2.py3-none-any/importlib_resources-3.3.1.dist-info/WHEEL b/third_party/python/virtualenv/virtualenv-20.2.2.dist-info/WHEEL similarity index 100% rename from third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.1-py2.py3-none-any/importlib_resources-3.3.1.dist-info/WHEEL rename to third_party/python/virtualenv/virtualenv-20.2.2.dist-info/WHEEL diff --git a/third_party/python/virtualenv/virtualenv-20.7.0.dist-info/entry_points.txt b/third_party/python/virtualenv/virtualenv-20.2.2.dist-info/entry_points.txt similarity index 96% rename from third_party/python/virtualenv/virtualenv-20.7.0.dist-info/entry_points.txt rename to third_party/python/virtualenv/virtualenv-20.2.2.dist-info/entry_points.txt index 8f41e7d8280f..3effb4ba117a 100644 --- a/third_party/python/virtualenv/virtualenv-20.7.0.dist-info/entry_points.txt +++ b/third_party/python/virtualenv/virtualenv-20.2.2.dist-info/entry_points.txt @@ -8,6 +8,7 @@ cshell = virtualenv.activation.cshell:CShellActivator fish = virtualenv.activation.fish:FishActivator powershell = virtualenv.activation.powershell:PowerShellActivator python = virtualenv.activation.python:PythonActivator +xonsh = virtualenv.activation.xonsh:XonshActivator [virtualenv.create] cpython2-mac-framework = virtualenv.create.via_global_ref.builtin.cpython.mac_os:CPython2macOsFramework diff --git a/third_party/python/virtualenv/virtualenv-20.7.0.dist-info/top_level.txt b/third_party/python/virtualenv/virtualenv-20.2.2.dist-info/top_level.txt similarity index 100% rename from third_party/python/virtualenv/virtualenv-20.7.0.dist-info/top_level.txt rename to third_party/python/virtualenv/virtualenv-20.2.2.dist-info/top_level.txt diff --git a/third_party/python/virtualenv/virtualenv-20.7.0.dist-info/zip-safe b/third_party/python/virtualenv/virtualenv-20.2.2.dist-info/zip-safe similarity index 100% rename from third_party/python/virtualenv/virtualenv-20.7.0.dist-info/zip-safe rename to third_party/python/virtualenv/virtualenv-20.2.2.dist-info/zip-safe diff --git a/third_party/python/virtualenv/virtualenv-20.7.0.dist-info/WHEEL b/third_party/python/virtualenv/virtualenv-20.7.0.dist-info/WHEEL deleted file mode 100644 index 01b8fc7d4a10..000000000000 --- a/third_party/python/virtualenv/virtualenv-20.7.0.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.36.2) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/third_party/python/virtualenv/virtualenv/__main__.py b/third_party/python/virtualenv/virtualenv/__main__.py index 3b06fd747ceb..0995e4c18bc8 100644 --- a/third_party/python/virtualenv/virtualenv/__main__.py +++ b/third_party/python/virtualenv/virtualenv/__main__.py @@ -1,13 +1,11 @@ from __future__ import absolute_import, print_function, unicode_literals import logging -import os import sys from datetime import datetime -def run(args=None, options=None, env=None): - env = os.environ if env is None else env +def run(args=None, options=None): start = datetime.now() from virtualenv.run import cli_run from virtualenv.util.error import ProcessCallFailed @@ -15,7 +13,7 @@ def run(args=None, options=None, env=None): if args is None: args = sys.argv[1:] try: - session = cli_run(args, options, env) + session = cli_run(args, options) logging.warning(LogSession(session, start)) except ProcessCallFailed as exception: print("subprocess call failed for {} with code {}".format(exception.cmd, exception.code)) @@ -56,13 +54,12 @@ class LogSession(object): return "\n".join(lines) -def run_with_catch(args=None, env=None): +def run_with_catch(args=None): from virtualenv.config.cli.parser import VirtualEnvOptions - env = os.environ if env is None else env options = VirtualEnvOptions() try: - run(args, options, env) + run(args, options) except (KeyboardInterrupt, SystemExit, Exception) as exception: try: if getattr(options, "with_traceback", False): diff --git a/third_party/python/virtualenv/virtualenv/activation/__init__.py b/third_party/python/virtualenv/virtualenv/activation/__init__.py index 962cdf7942d1..fa2f0b4af78b 100644 --- a/third_party/python/virtualenv/virtualenv/activation/__init__.py +++ b/third_party/python/virtualenv/virtualenv/activation/__init__.py @@ -6,10 +6,12 @@ from .cshell import CShellActivator from .fish import FishActivator from .powershell import PowerShellActivator from .python import PythonActivator +from .xonsh import XonshActivator __all__ = [ "BashActivator", "PowerShellActivator", + "XonshActivator", "CShellActivator", "PythonActivator", "BatchActivator", diff --git a/third_party/python/virtualenv/virtualenv/activation/bash/activate.sh b/third_party/python/virtualenv/virtualenv/activation/bash/activate.sh index dd7956ef3755..222d98204351 100644 --- a/third_party/python/virtualenv/virtualenv/activation/bash/activate.sh +++ b/third_party/python/virtualenv/virtualenv/activation/bash/activate.sh @@ -8,7 +8,7 @@ if [ "${BASH_SOURCE-}" = "$0" ]; then fi deactivate () { - unset -f pydoc >/dev/null 2>&1 || true + unset -f pydoc >/dev/null 2>&1 # reset old environment variables # ! [ -z ${VAR+_} ] returns true if VAR is declared at all @@ -23,10 +23,12 @@ deactivate () { unset _OLD_VIRTUAL_PYTHONHOME fi - # The hash command must be called to get it to forget past - # commands. Without forgetting past commands the $PATH changes - # we made may not be respected - hash -r 2>/dev/null + # This should detect bash and zsh, which have a hash command that must + # be called to get it to forget past commands. Without forgetting + # past commands the $PATH changes we made may not be respected + if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then + hash -r 2>/dev/null + fi if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then PS1="$_OLD_VIRTUAL_PS1" @@ -77,7 +79,9 @@ pydoc () { python -m pydoc "$@" } -# The hash command must be called to get it to forget past -# commands. Without forgetting past commands the $PATH changes -# we made may not be respected -hash -r 2>/dev/null +# This should detect bash and zsh, which have a hash command that must +# be called to get it to forget past commands. Without forgetting +# past commands the $PATH changes we made may not be respected +if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then + hash -r 2>/dev/null +fi diff --git a/third_party/python/virtualenv/virtualenv/app_data/__init__.py b/third_party/python/virtualenv/virtualenv/app_data/__init__.py index e56e63d532e4..2df0cae5d346 100644 --- a/third_party/python/virtualenv/virtualenv/app_data/__init__.py +++ b/third_party/python/virtualenv/virtualenv/app_data/__init__.py @@ -6,7 +6,7 @@ from __future__ import absolute_import, unicode_literals import logging import os -from platformdirs import user_data_dir +from appdirs import user_data_dir from .na import AppDataDisabled from .read_only import ReadOnlyAppData @@ -14,22 +14,21 @@ from .via_disk_folder import AppDataDiskFolder from .via_tempdir import TempAppData -def _default_app_data_dir(env): +def _default_app_data_dir(): # type: () -> str key = str("VIRTUALENV_OVERRIDE_APP_DATA") - if key in env: - return env[key] + if key in os.environ: + return os.environ[key] else: return user_data_dir(appname="virtualenv", appauthor="pypa") def make_app_data(folder, **kwargs): read_only = kwargs.pop("read_only") - env = kwargs.pop("env") if kwargs: # py3+ kwonly raise TypeError("unexpected keywords: {}") if folder is None: - folder = _default_app_data_dir(env) + folder = _default_app_data_dir() folder = os.path.abspath(folder) if read_only: diff --git a/third_party/python/virtualenv/virtualenv/app_data/na.py b/third_party/python/virtualenv/virtualenv/app_data/na.py index d5897871f9fd..5f7200d3a3e6 100644 --- a/third_party/python/virtualenv/virtualenv/app_data/na.py +++ b/third_party/python/virtualenv/virtualenv/app_data/na.py @@ -44,7 +44,7 @@ class AppDataDisabled(AppData): raise self.error def py_info_clear(self): - """ """ + """""" class ContentStoreNA(ContentStore): @@ -52,14 +52,14 @@ class ContentStoreNA(ContentStore): return False def read(self): - """ """ + """""" return None def write(self, content): - """ """ + """""" def remove(self): - """ """ + """""" @contextmanager def locked(self): diff --git a/third_party/python/virtualenv/virtualenv/app_data/via_disk_folder.py b/third_party/python/virtualenv/virtualenv/app_data/via_disk_folder.py index 257a85fd5c9b..2243f1670e77 100644 --- a/third_party/python/virtualenv/virtualenv/app_data/via_disk_folder.py +++ b/third_party/python/virtualenv/virtualenv/app_data/via_disk_folder.py @@ -91,7 +91,7 @@ class AppDataDiskFolder(AppData): return PyInfoStoreDisk(self.py_info_at, path) def py_info_clear(self): - """ """ + """""" py_info_folder = self.py_info_at with py_info_folder: for filename in py_info_folder.path.iterdir(): diff --git a/third_party/python/virtualenv/virtualenv/config/cli/parser.py b/third_party/python/virtualenv/virtualenv/config/cli/parser.py index c8e2f551f6c3..eb4db30a70c9 100644 --- a/third_party/python/virtualenv/virtualenv/config/cli/parser.py +++ b/third_party/python/virtualenv/virtualenv/config/cli/parser.py @@ -1,6 +1,5 @@ from __future__ import absolute_import, unicode_literals -import os from argparse import SUPPRESS, ArgumentDefaultsHelpFormatter, ArgumentParser, Namespace from collections import OrderedDict @@ -48,11 +47,9 @@ class VirtualEnvConfigParser(ArgumentParser): Custom option parser which updates its defaults by checking the configuration files and environmental variables """ - def __init__(self, options=None, env=None, *args, **kwargs): - env = os.environ if env is None else env - self.file_config = IniConfig(env) + def __init__(self, options=None, *args, **kwargs): + self.file_config = IniConfig() self.epilog_list = [] - self.env = env kwargs["epilog"] = self.file_config.epilog kwargs["add_help"] = False kwargs["formatter_class"] = HelpFormatter @@ -78,7 +75,7 @@ class VirtualEnvConfigParser(ArgumentParser): names = OrderedDict((i.lstrip("-").replace("-", "_"), None) for i in action.option_strings) outcome = None for name in names: - outcome = get_env_var(name, as_type, self.env) + outcome = get_env_var(name, as_type) if outcome is not None: break if outcome is None and self.file_config: @@ -104,7 +101,6 @@ class VirtualEnvConfigParser(ArgumentParser): self._fix_defaults() self.options._src = "cli" try: - namespace.env = self.env return super(VirtualEnvConfigParser, self).parse_known_args(args, namespace=namespace) finally: self.options._src = None diff --git a/third_party/python/virtualenv/virtualenv/config/convert.py b/third_party/python/virtualenv/virtualenv/config/convert.py index df408663dbf5..562720a57e39 100644 --- a/third_party/python/virtualenv/virtualenv/config/convert.py +++ b/third_party/python/virtualenv/virtualenv/config/convert.py @@ -43,7 +43,7 @@ class NoneType(TypeData): class ListType(TypeData): def _validate(self): - """ """ + """""" def convert(self, value, flatten=True): values = self.split_values(value) diff --git a/third_party/python/virtualenv/virtualenv/config/env_var.py b/third_party/python/virtualenv/virtualenv/config/env_var.py index 8f6211caef08..259399a70526 100644 --- a/third_party/python/virtualenv/virtualenv/config/env_var.py +++ b/third_party/python/virtualenv/virtualenv/config/env_var.py @@ -1,21 +1,22 @@ from __future__ import absolute_import, unicode_literals +import os + from virtualenv.util.six import ensure_str, ensure_text from .convert import convert -def get_env_var(key, as_type, env): +def get_env_var(key, as_type): """Get the environment variable option. :param key: the config key requested :param as_type: the type we would like to convert it to - :param env: environment variables to use :return: """ environ_key = ensure_str("VIRTUALENV_{}".format(key.upper())) - if env.get(environ_key): - value = env[environ_key] + if os.environ.get(environ_key): + value = os.environ[environ_key] # noinspection PyBroadException try: source = "env var {}".format(ensure_text(environ_key)) diff --git a/third_party/python/virtualenv/virtualenv/config/ini.py b/third_party/python/virtualenv/virtualenv/config/ini.py index 0d945ee258c8..4dec629a97a4 100644 --- a/third_party/python/virtualenv/virtualenv/config/ini.py +++ b/third_party/python/virtualenv/virtualenv/config/ini.py @@ -3,7 +3,7 @@ from __future__ import absolute_import, unicode_literals import logging import os -from platformdirs import user_config_dir +from appdirs import user_config_dir from virtualenv.info import PY3 from virtualenv.util import ConfigParser @@ -19,9 +19,8 @@ class IniConfig(object): section = "virtualenv" - def __init__(self, env=None): - env = os.environ if env is None else env - config_file = env.get(self.VIRTUALENV_CONFIG_FILE_ENV_VAR, None) + def __init__(self): + config_file = os.environ.get(self.VIRTUALENV_CONFIG_FILE_ENV_VAR, None) self.is_env_var = config_file is not None config_file = ( Path(config_file) diff --git a/third_party/python/virtualenv/virtualenv/create/creator.py b/third_party/python/virtualenv/virtualenv/create/creator.py index 6363f8b7e050..1b4ea69f6677 100644 --- a/third_party/python/virtualenv/virtualenv/create/creator.py +++ b/third_party/python/virtualenv/virtualenv/create/creator.py @@ -47,7 +47,6 @@ class Creator(object): self.no_vcs_ignore = options.no_vcs_ignore self.pyenv_cfg = PyEnvCfg.from_folder(self.dest) self.app_data = options.app_data - self.env = options.env def __repr__(self): return ensure_str(self.__unicode__()) @@ -205,7 +204,7 @@ class Creator(object): :return: debug information about the virtual environment (only valid after :meth:`create` has run) """ if self._debug is None and self.exe is not None: - self._debug = get_env_debug_info(self.exe, self.debug_script(), self.app_data, self.env) + self._debug = get_env_debug_info(self.exe, self.debug_script(), self.app_data) return self._debug # noinspection PyMethodMayBeStatic @@ -213,8 +212,8 @@ class Creator(object): return DEBUG_SCRIPT -def get_env_debug_info(env_exe, debug_script, app_data, env): - env = env.copy() +def get_env_debug_info(env_exe, debug_script, app_data): + env = os.environ.copy() env.pop(str("PYTHONPATH"), None) with app_data.ensure_extracted(debug_script) as debug_script: diff --git a/third_party/python/virtualenv/virtualenv/create/describe.py b/third_party/python/virtualenv/virtualenv/create/describe.py index 6f05ff1e296b..1e59aaeae04f 100644 --- a/third_party/python/virtualenv/virtualenv/create/describe.py +++ b/third_party/python/virtualenv/virtualenv/create/describe.py @@ -30,15 +30,15 @@ class Describe(object): @property def script_dir(self): - return self.dest / self.interpreter.install_path("scripts") + return self.dest / Path(self.interpreter.distutils_install["scripts"]) @property def purelib(self): - return self.dest / self.interpreter.install_path("purelib") + return self.dest / self.interpreter.distutils_install["purelib"] @property def platlib(self): - return self.dest / self.interpreter.install_path("platlib") + return self.dest / self.interpreter.distutils_install["platlib"] @property def libs(self): diff --git a/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/cpython/cpython2.py b/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/cpython/cpython2.py index dc822bcb91df..555b0c50fc15 100644 --- a/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/cpython/cpython2.py +++ b/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/cpython/cpython2.py @@ -36,7 +36,7 @@ class CPython2(CPython, Python2): @property def include(self): # the pattern include the distribution name too at the end, remove that via the parent call - return (self.dest / self.interpreter.install_path("headers")).parent + return (self.dest / self.interpreter.distutils_install["headers"]).parent @classmethod def modules(cls): diff --git a/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/cpython/cpython3.py b/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/cpython/cpython3.py index fcd92b82f61f..19385095f7fe 100644 --- a/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/cpython/cpython3.py +++ b/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/cpython/cpython3.py @@ -15,7 +15,7 @@ from .common import CPython, CPythonPosix, CPythonWindows, is_mac_os_framework @add_metaclass(abc.ABCMeta) class CPython3(CPython, Python3Supports): - """ """ + """""" class CPython3Posix(CPythonPosix, CPython3): @@ -43,7 +43,7 @@ class CPython3Posix(CPythonPosix, CPython3): class CPython3Windows(CPythonWindows, CPython3): - """ """ + """""" @classmethod def setup_meta(cls, interpreter): @@ -55,27 +55,20 @@ class CPython3Windows(CPythonWindows, CPython3): def sources(cls, interpreter): for src in super(CPython3Windows, cls).sources(interpreter): yield src - if not cls.has_shim(interpreter): + if not cls.venv_37p(interpreter): for src in cls.include_dll_and_pyd(interpreter): yield src - @classmethod - def has_shim(cls, interpreter): - return interpreter.version_info.minor >= 7 and cls.shim(interpreter) is not None - - @classmethod - def shim(cls, interpreter): - shim = Path(interpreter.system_stdlib) / "venv" / "scripts" / "nt" / "python.exe" - if shim.exists(): - return shim - return None + @staticmethod + def venv_37p(interpreter): + return interpreter.version_info.minor >= 7 @classmethod def host_python(cls, interpreter): - if cls.has_shim(interpreter): + if cls.venv_37p(interpreter): # starting with CPython 3.7 Windows ships with a venvlauncher.exe that avoids the need for dll/pyd copies # it also means the wrapper must be copied to avoid bugs such as https://bugs.python.org/issue42013 - return cls.shim(interpreter) + return Path(interpreter.system_stdlib) / "venv" / "scripts" / "nt" / "python.exe" return super(CPython3Windows, cls).host_python(interpreter) @classmethod diff --git a/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/pypy/pypy2.py b/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/pypy/pypy2.py index 9249442478d2..020000b34221 100644 --- a/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/pypy/pypy2.py +++ b/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/pypy/pypy2.py @@ -16,7 +16,7 @@ from .common import PyPy @add_metaclass(abc.ABCMeta) class PyPy2(PyPy, Python2): - """ """ + """""" @classmethod def exe_stem(cls): @@ -41,7 +41,7 @@ class PyPy2(PyPy, Python2): @property def include(self): - return self.dest / self.interpreter.install_path("headers") + return self.dest / self.interpreter.distutils_install["headers"] @classmethod def modules(cls): @@ -112,7 +112,7 @@ class Pypy2Windows(PyPy2, WindowsSupports): @classmethod def _shared_libs(cls): - return ["libpypy-c.dll", "libffi-7.dll"] + return ["libpypy-c.dll"] @classmethod def sources(cls, interpreter): diff --git a/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/pypy/pypy3.py b/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/pypy/pypy3.py index be5319a2b3bf..9588706786b9 100644 --- a/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/pypy/pypy3.py +++ b/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/pypy/pypy3.py @@ -17,6 +17,14 @@ class PyPy3(PyPy, Python3Supports): def exe_stem(cls): return "pypy3" + @property + def stdlib(self): + """ + PyPy3 seems to respect sysconfig only for the host python... + virtual environments purelib is instead lib/pythonx.y + """ + return self.dest / "lib" / "python{}".format(self.interpreter.version_release_str) / "site-packages" + @classmethod def exe_names(cls, interpreter): return super(PyPy3, cls).exe_names(interpreter) | {"pypy"} @@ -25,11 +33,6 @@ class PyPy3(PyPy, Python3Supports): class PyPy3Posix(PyPy3, PosixSupports): """PyPy 2 on POSIX""" - @property - def stdlib(self): - """PyPy3 respects sysconfig only for the host python, virtual envs is instead lib/pythonx.y/site-packages""" - return self.dest / "lib" / "python{}".format(self.interpreter.version_release_str) / "site-packages" - @classmethod def _shared_libs(cls): return ["libpypy3-c.so", "libpypy3-c.dylib"] @@ -50,11 +53,6 @@ class PyPy3Posix(PyPy3, PosixSupports): class Pypy3Windows(PyPy3, WindowsSupports): """PyPy 2 on Windows""" - @property - def stdlib(self): - """PyPy3 respects sysconfig only for the host python, virtual envs is instead Lib/site-packages""" - return self.dest / "Lib" / "site-packages" - @property def bin_dir(self): """PyPy3 needs to fallback to pypy definition""" @@ -62,4 +60,4 @@ class Pypy3Windows(PyPy3, WindowsSupports): @classmethod def _shared_libs(cls): - return ["libpypy3-c.dll", "libffi-7.dll"] + return ["libpypy3-c.dll"] diff --git a/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/python2/site.py b/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/python2/site.py index 4decd87338e2..85eee842aed8 100644 --- a/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/python2/site.py +++ b/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/python2/site.py @@ -9,7 +9,6 @@ import sys def main(): """Patch what needed, and invoke the original site.py""" - here = __file__ # the distutils.install patterns will be injected relative to this site.py, save it here config = read_pyvenv() sys.real_prefix = sys.base_prefix = config["base-prefix"] sys.base_exec_prefix = config["base-exec-prefix"] @@ -17,13 +16,12 @@ def main(): global_site_package_enabled = config.get("include-system-site-packages", False) == "true" rewrite_standard_library_sys_path() disable_user_site_package() - load_host_site(here) + load_host_site() if global_site_package_enabled: add_global_site_package() - rewrite_getsitepackages(here) -def load_host_site(here): +def load_host_site(): """trigger reload of site.py - now it will use the standard library instance that will take care of init""" # we have a duality here, we generate the platform and pure library path based on what distutils.install specifies # because this is what pip will be using; the host site.py though may contain it's own pattern for where the @@ -38,26 +36,23 @@ def load_host_site(here): # to facilitate when the two match, or not we first reload the site.py, now triggering the import of host site.py, # as this will ensure that initialization code within host site.py runs + here = __file__ # the distutils.install patterns will be injected relative to this site.py, save it here + # ___RELOAD_CODE___ # and then if the distutils site packages are not on the sys.path we add them via add_site_dir; note we must add # them by invoking add_site_dir to trigger the processing of pth files - - add_site_dir = sys.modules["site"].addsitedir - for path in get_site_packages_dirs(here): - add_site_dir(path) - - -def get_site_packages_dirs(here): - import json import os site_packages = r""" ___EXPECTED_SITE_PACKAGES___ """ + import json + add_site_dir = sys.modules["site"].addsitedir for path in json.loads(site_packages): - yield os.path.abspath(os.path.join(here, path.encode("utf-8"))) + full_path = os.path.abspath(os.path.join(here, path.encode("utf-8"))) + add_site_dir(full_path) sep = "\\" if sys.platform == "win32" else "/" # no os module here yet - poor mans version @@ -163,28 +158,7 @@ def add_global_site_package(): site.PREFIXES = [sys.base_prefix, sys.base_exec_prefix] site.main() finally: - site.PREFIXES = orig_prefixes + site.PREFIXES - - -# Debian and it's derivatives patch this function. We undo the damage -def rewrite_getsitepackages(here): - site = sys.modules["site"] - - site_package_dirs = get_site_packages_dirs(here) - orig_getsitepackages = site.getsitepackages - - def getsitepackages(): - sitepackages = orig_getsitepackages() - if sys.prefix not in site.PREFIXES or sys.exec_prefix not in site.PREFIXES: - # Someone messed with the prefixes, so we stop patching - return sitepackages - for path in site_package_dirs: - if path not in sitepackages: - sitepackages.insert(0, path) - - return sitepackages - - site.getsitepackages = getsitepackages + site.PREFIXES = orig_prefixes main() diff --git a/third_party/python/virtualenv/virtualenv/discovery/builtin.py b/third_party/python/virtualenv/virtualenv/discovery/builtin.py index 52f7398cfd46..b66ecb193213 100644 --- a/third_party/python/virtualenv/virtualenv/discovery/builtin.py +++ b/third_party/python/virtualenv/virtualenv/discovery/builtin.py @@ -17,7 +17,6 @@ class Builtin(Discover): super(Builtin, self).__init__(options) self.python_spec = options.python if options.python else [sys.executable] self.app_data = options.app_data - self.try_first_with = options.try_first_with @classmethod def add_parser_arguments(cls, parser): @@ -32,19 +31,10 @@ class Builtin(Discover): help="interpreter based on what to create environment (path/identifier) " "- by default use the interpreter where the tool is installed - first found wins", ) - parser.add_argument( - "--try-first-with", - dest="try_first_with", - metavar="py_exe", - type=str, - action="append", - default=[], - help="try first these interpreters before starting the discovery", - ) def run(self): for python_spec in self.python_spec: - result = get_interpreter(python_spec, self.try_first_with, self.app_data, self._env) + result = get_interpreter(python_spec, self.app_data) if result is not None: return result return None @@ -57,12 +47,11 @@ class Builtin(Discover): return "{} discover of python_spec={!r}".format(self.__class__.__name__, spec) -def get_interpreter(key, try_first_with, app_data=None, env=None): +def get_interpreter(key, app_data=None): spec = PythonSpec.from_string_spec(key) logging.info("find interpreter for spec %r", spec) proposed_paths = set() - env = os.environ if env is None else env - for interpreter, impl_must_match in propose_interpreters(spec, try_first_with, app_data, env): + for interpreter, impl_must_match in propose_interpreters(spec, app_data): key = interpreter.system_executable, impl_must_match if key in proposed_paths: continue @@ -73,18 +62,7 @@ def get_interpreter(key, try_first_with, app_data=None, env=None): proposed_paths.add(key) -def propose_interpreters(spec, try_first_with, app_data, env=None): - # 0. try with first - env = os.environ if env is None else env - for py_exe in try_first_with: - path = os.path.abspath(py_exe) - try: - os.lstat(path) # Windows Store Python does not work with os.path.exists, but does for os.lstat - except OSError: - pass - else: - yield PythonInfo.from_exe(os.path.abspath(path), app_data, env=env), True - +def propose_interpreters(spec, app_data): # 1. if it's a path and exists if spec.path is not None: try: @@ -93,7 +71,7 @@ def propose_interpreters(spec, try_first_with, app_data, env=None): if spec.is_abs: raise else: - yield PythonInfo.from_exe(os.path.abspath(spec.path), app_data, env=env), True + yield PythonInfo.from_exe(os.path.abspath(spec.path), app_data), True if spec.is_abs: return else: @@ -104,27 +82,27 @@ def propose_interpreters(spec, try_first_with, app_data, env=None): if IS_WIN: from .windows import propose_interpreters - for interpreter in propose_interpreters(spec, app_data, env): + for interpreter in propose_interpreters(spec, app_data): yield interpreter, True # finally just find on path, the path order matters (as the candidates are less easy to control by end user) - paths = get_paths(env) + paths = get_paths() tested_exes = set() for pos, path in enumerate(paths): path = ensure_text(path) - logging.debug(LazyPathDump(pos, path, env)) + logging.debug(LazyPathDump(pos, path)) for candidate, match in possible_specs(spec): found = check_path(candidate, path) if found is not None: exe = os.path.abspath(found) if exe not in tested_exes: tested_exes.add(exe) - interpreter = PathPythonInfo.from_exe(exe, app_data, raise_on_error=False, env=env) + interpreter = PathPythonInfo.from_exe(exe, app_data, raise_on_error=False) if interpreter is not None: yield interpreter, match -def get_paths(env): - path = env.get(str("PATH"), None) +def get_paths(): + path = os.environ.get(str("PATH"), None) if path is None: try: path = os.confstr("CS_PATH") @@ -138,17 +116,16 @@ def get_paths(env): class LazyPathDump(object): - def __init__(self, pos, path, env): + def __init__(self, pos, path): self.pos = pos self.path = path - self.env = env def __repr__(self): return ensure_str(self.__unicode__()) def __unicode__(self): content = "discover PATH[{}]={}".format(self.pos, self.path) - if self.env.get(str("_VIRTUALENV_DEBUG")): # this is the over the board debug + if os.environ.get(str("_VIRTUALENV_DEBUG")): # this is the over the board debug content += " with =>" for file_name in os.listdir(self.path): try: @@ -183,4 +160,4 @@ def possible_specs(spec): class PathPythonInfo(PythonInfo): - """ """ + """""" diff --git a/third_party/python/virtualenv/virtualenv/discovery/cached_py_info.py b/third_party/python/virtualenv/virtualenv/discovery/cached_py_info.py index 31beff52fec5..ce79ef14b1f8 100644 --- a/third_party/python/virtualenv/virtualenv/discovery/cached_py_info.py +++ b/third_party/python/virtualenv/virtualenv/discovery/cached_py_info.py @@ -23,9 +23,9 @@ _CACHE = OrderedDict() _CACHE[Path(sys.executable)] = PythonInfo() -def from_exe(cls, app_data, exe, env=None, raise_on_error=True, ignore_cache=False): - env = os.environ if env is None else env - result = _get_from_cache(cls, app_data, exe, env, ignore_cache=ignore_cache) +def from_exe(cls, app_data, exe, raise_on_error=True, ignore_cache=False): + """""" + result = _get_from_cache(cls, app_data, exe, ignore_cache=ignore_cache) if isinstance(result, Exception): if raise_on_error: raise result @@ -35,14 +35,14 @@ def from_exe(cls, app_data, exe, env=None, raise_on_error=True, ignore_cache=Fal return result -def _get_from_cache(cls, app_data, exe, env, ignore_cache=True): +def _get_from_cache(cls, app_data, exe, ignore_cache=True): # note here we cannot resolve symlinks, as the symlink may trigger different prefix information if there's a - # pyenv.cfg somewhere alongside on python3.5+ + # pyenv.cfg somewhere alongside on python3.4+ exe_path = Path(exe) if not ignore_cache and exe_path in _CACHE: # check in the in-memory cache result = _CACHE[exe_path] else: # otherwise go through the app data cache - py_info = _get_via_file_cache(cls, app_data, exe_path, exe, env) + py_info = _get_via_file_cache(cls, app_data, exe_path, exe) result = _CACHE[exe_path] = py_info # independent if it was from the file or in-memory cache fix the original executable location if isinstance(result, PythonInfo): @@ -50,7 +50,7 @@ def _get_from_cache(cls, app_data, exe, env, ignore_cache=True): return result -def _get_via_file_cache(cls, app_data, path, exe, env): +def _get_via_file_cache(cls, app_data, path, exe): path_text = ensure_text(str(path)) try: path_modified = path.stat().st_mtime @@ -65,14 +65,10 @@ def _get_via_file_cache(cls, app_data, path, exe, env): of_path, of_st_mtime, of_content = data["path"], data["st_mtime"], data["content"] if of_path == path_text and of_st_mtime == path_modified: py_info = cls._from_dict({k: v for k, v in of_content.items()}) - sys_exe = py_info.system_executable - if sys_exe is not None and not os.path.exists(sys_exe): - py_info_store.remove() - py_info = None else: py_info_store.remove() if py_info is None: # if not loaded run and save - failure, py_info = _run_subprocess(cls, exe, app_data, env) + failure, py_info = _run_subprocess(cls, exe, app_data) if failure is None: data = {"st_mtime": path_modified, "path": path_text, "content": py_info._to_dict()} py_info_store.write(data) @@ -81,12 +77,12 @@ def _get_via_file_cache(cls, app_data, path, exe, env): return py_info -def _run_subprocess(cls, exe, app_data, env): +def _run_subprocess(cls, exe, app_data): py_info_script = Path(os.path.abspath(__file__)).parent / "py_info.py" with app_data.ensure_extracted(py_info_script) as py_info_script: cmd = [exe, str(py_info_script)] # prevent sys.prefix from leaking into the child process - see https://bugs.python.org/issue22490 - env = env.copy() + env = os.environ.copy() env.pop("__PYVENV_LAUNCHER__", None) logging.debug("get interpreter info via cmd: %s", LogCmd(cmd)) try: diff --git a/third_party/python/virtualenv/virtualenv/discovery/discover.py b/third_party/python/virtualenv/virtualenv/discovery/discover.py index 72748c3fac63..93c3ea7ad744 100644 --- a/third_party/python/virtualenv/virtualenv/discovery/discover.py +++ b/third_party/python/virtualenv/virtualenv/discovery/discover.py @@ -25,7 +25,6 @@ class Discover(object): """ self._has_run = False self._interpreter = None - self._env = options.env @abstractmethod def run(self): diff --git a/third_party/python/virtualenv/virtualenv/discovery/py_info.py b/third_party/python/virtualenv/virtualenv/discovery/py_info.py index 9b41d13febf5..46b51df1b3e6 100644 --- a/third_party/python/virtualenv/virtualenv/discovery/py_info.py +++ b/third_party/python/virtualenv/virtualenv/discovery/py_info.py @@ -12,8 +12,9 @@ import platform import re import sys import sysconfig -import warnings from collections import OrderedDict, namedtuple +from distutils import dist +from distutils.command.install import SCHEME_KEYS from string import digits VersionInfo = namedtuple("VersionInfo", ["major", "minor", "micro", "releaselevel", "serial"]) @@ -117,28 +118,10 @@ class PythonInfo(object): # note we must choose the original and not the pure executable as shim scripts might throw us off return self.original_executable - def install_path(self, key): - result = self.distutils_install.get(key) - if result is None: # use sysconfig if distutils is unavailable - # set prefixes to empty => result is relative from cwd - prefixes = self.prefix, self.exec_prefix, self.base_prefix, self.base_exec_prefix - config_var = {k: "" if v in prefixes else v for k, v in self.sysconfig_vars} - result = self.sysconfig_path(key, config_var=config_var).lstrip(os.sep) - return result - @staticmethod def _distutils_install(): - # use distutils primarily because that's what pip does - # https://github.com/pypa/pip/blob/main/src/pip/_internal/locations.py#L95 + # follow https://github.com/pypa/pip/blob/main/src/pip/_internal/locations.py#L95 # note here we don't import Distribution directly to allow setuptools to patch it - with warnings.catch_warnings(): # disable warning for PEP-632 - warnings.simplefilter("ignore") - try: - from distutils import dist - from distutils.command.install import SCHEME_KEYS - except ImportError: # if removed or not installed ignore - return {} - d = dist.Distribution({"script_args": "--no-user-cfg"}) # conf files not parsed so they do not hijack paths if hasattr(sys, "_framework"): sys._framework = None # disable macOS static paths for framework @@ -194,7 +177,7 @@ class PythonInfo(object): ) if not os.path.exists(path): # some broken packaging don't respect the sysconfig, fallback to distutils path # the pattern include the distribution name too at the end, remove that via the parent call - fallback = os.path.join(self.prefix, os.path.dirname(self.install_path("headers"))) + fallback = os.path.join(self.prefix, os.path.dirname(self.distutils_install["headers"])) if os.path.exists(fallback): path = fallback return path @@ -325,13 +308,12 @@ class PythonInfo(object): return data @classmethod - def from_exe(cls, exe, app_data=None, raise_on_error=True, ignore_cache=False, resolve_to_host=True, env=None): + def from_exe(cls, exe, app_data=None, raise_on_error=True, ignore_cache=False, resolve_to_host=True): """Given a path to an executable get the python information""" # this method is not used by itself, so here and called functions can import stuff locally from virtualenv.discovery.cached_py_info import from_exe - env = os.environ if env is None else env - proposed = from_exe(cls, app_data, exe, env=env, raise_on_error=raise_on_error, ignore_cache=ignore_cache) + proposed = from_exe(cls, app_data, exe, raise_on_error=raise_on_error, ignore_cache=ignore_cache) # noinspection PyProtectedMember if isinstance(proposed, PythonInfo) and resolve_to_host: try: @@ -381,7 +363,7 @@ class PythonInfo(object): _cache_exe_discovery = {} - def discover_exe(self, app_data, prefix, exact=True, env=None): + def discover_exe(self, app_data, prefix, exact=True): key = prefix, exact if key in self._cache_exe_discovery and prefix: logging.debug("discover exe from cache %s - exact %s: %r", prefix, exact, self._cache_exe_discovery[key]) @@ -391,10 +373,9 @@ class PythonInfo(object): possible_names = self._find_possible_exe_names() possible_folders = self._find_possible_folders(prefix) discovered = [] - env = os.environ if env is None else env for folder in possible_folders: for name in possible_names: - info = self._check_exe(app_data, folder, name, exact, discovered, env) + info = self._check_exe(app_data, folder, name, exact, discovered) if info is not None: self._cache_exe_discovery[key] = info return info @@ -407,11 +388,11 @@ class PythonInfo(object): msg = "failed to detect {} in {}".format("|".join(possible_names), os.pathsep.join(possible_folders)) raise RuntimeError(msg) - def _check_exe(self, app_data, folder, name, exact, discovered, env): + def _check_exe(self, app_data, folder, name, exact, discovered): exe_path = os.path.join(folder, name) if not os.path.exists(exe_path): return None - info = self.from_exe(exe_path, app_data, resolve_to_host=False, raise_on_error=False, env=env) + info = self.from_exe(exe_path, app_data, resolve_to_host=False, raise_on_error=False) if info is None: # ignore if for some reason we can't query return None for item in ["implementation", "architecture", "version_info"]: diff --git a/third_party/python/virtualenv/virtualenv/discovery/windows/__init__.py b/third_party/python/virtualenv/virtualenv/discovery/windows/__init__.py index 259be976bdfa..9063ab8df74a 100644 --- a/third_party/python/virtualenv/virtualenv/discovery/windows/__init__.py +++ b/third_party/python/virtualenv/virtualenv/discovery/windows/__init__.py @@ -6,18 +6,15 @@ from .pep514 import discover_pythons class Pep514PythonInfo(PythonInfo): - """ """ + """""" -def propose_interpreters(spec, cache_dir, env): +def propose_interpreters(spec, cache_dir): # see if PEP-514 entries are good # start with higher python versions in an effort to use the latest version available - # and prefer PythonCore over conda pythons (as virtualenv is mostly used by non conda tools) existing = list(discover_pythons()) - existing.sort( - key=lambda i: tuple(-1 if j is None else j for j in i[1:4]) + (1 if i[0] == "PythonCore" else 0,), reverse=True - ) + existing.sort(key=lambda i: tuple(-1 if j is None else j for j in i[1:4]), reverse=True) for name, major, minor, arch, exe, _ in existing: # pre-filter @@ -25,7 +22,7 @@ def propose_interpreters(spec, cache_dir, env): name = "CPython" registry_spec = PythonSpec(None, name, major, minor, None, arch, exe) if registry_spec.satisfies(spec): - interpreter = Pep514PythonInfo.from_exe(exe, cache_dir, env=env, raise_on_error=False) + interpreter = Pep514PythonInfo.from_exe(exe, cache_dir, raise_on_error=False) if interpreter is not None: if interpreter.satisfies(spec, impl_must_match=True): yield interpreter diff --git a/third_party/python/virtualenv/virtualenv/run/__init__.py b/third_party/python/virtualenv/virtualenv/run/__init__.py index e8e7ab138966..66083df82b7d 100644 --- a/third_party/python/virtualenv/virtualenv/run/__init__.py +++ b/third_party/python/virtualenv/virtualenv/run/__init__.py @@ -1,7 +1,6 @@ from __future__ import absolute_import, unicode_literals import logging -import os from functools import partial from ..app_data import make_app_data @@ -16,24 +15,22 @@ from .plugin.discovery import get_discover from .plugin.seeders import SeederSelector -def cli_run(args, options=None, setup_logging=True, env=None): +def cli_run(args, options=None, setup_logging=True): """ Create a virtual environment given some command line interface arguments. :param args: the command line arguments :param options: passing in a ``VirtualEnvOptions`` object allows return of the parsed options :param setup_logging: ``True`` if setup logging handlers, ``False`` to use handlers already registered - :param env: environment variables to use :return: the session object of the creation (its structure for now is experimental and might change on short notice) """ - env = os.environ if env is None else env - of_session = session_via_cli(args, options, setup_logging, env) + of_session = session_via_cli(args, options, setup_logging) with of_session: of_session.run() return of_session -def session_via_cli(args, options=None, setup_logging=True, env=None): +def session_via_cli(args, options=None, setup_logging=True): """ Create a virtualenv session (same as cli_run, but this does not perform the creation). Use this if you just want to query what the virtual environment would look like, but not actually create it. @@ -41,19 +38,17 @@ def session_via_cli(args, options=None, setup_logging=True, env=None): :param args: the command line arguments :param options: passing in a ``VirtualEnvOptions`` object allows return of the parsed options :param setup_logging: ``True`` if setup logging handlers, ``False`` to use handlers already registered - :param env: environment variables to use :return: the session object of the creation (its structure for now is experimental and might change on short notice) """ - env = os.environ if env is None else env - parser, elements = build_parser(args, options, setup_logging, env) + parser, elements = build_parser(args, options, setup_logging) options = parser.parse_args(args) creator, seeder, activators = tuple(e.create(options) for e in elements) # create types of_session = Session(options.verbosity, options.app_data, parser._interpreter, creator, seeder, activators) # noqa return of_session -def build_parser(args=None, options=None, setup_logging=True, env=None): - parser = VirtualEnvConfigParser(options, os.environ if env is None else env) +def build_parser(args=None, options=None, setup_logging=True): + parser = VirtualEnvConfigParser(options) add_version_flag(parser) parser.add_argument( "--with-traceback", @@ -89,7 +84,7 @@ def build_parser_only(args=None): def handle_extra_commands(options): if options.upgrade_embed_wheels: - result = manual_upgrade(options.app_data, options.env) + result = manual_upgrade(options.app_data) raise SystemExit(result) @@ -105,8 +100,8 @@ def load_app_data(args, parser, options): parser.add_argument( "--app-data", help="a data folder used as cache by the virtualenv", - type=partial(make_app_data, read_only=options.read_only_app_data, env=options.env), - default=make_app_data(None, read_only=options.read_only_app_data, env=options.env), + type=partial(make_app_data, read_only=options.read_only_app_data), + default=make_app_data(None, read_only=options.read_only_app_data), ) parser.add_argument( "--reset-app-data", diff --git a/third_party/python/virtualenv/virtualenv/run/plugin/base.py b/third_party/python/virtualenv/virtualenv/run/plugin/base.py index f1f4ee0618e3..ed10fe0e2731 100644 --- a/third_party/python/virtualenv/virtualenv/run/plugin/base.py +++ b/third_party/python/virtualenv/virtualenv/run/plugin/base.py @@ -1,8 +1,12 @@ from __future__ import absolute_import, unicode_literals +import sys from collections import OrderedDict -from backports.entry_points_selectable import entry_points +if sys.version_info >= (3, 8): + from importlib.metadata import entry_points +else: + from importlib_metadata import entry_points class PluginLoader(object): @@ -11,7 +15,7 @@ class PluginLoader(object): @classmethod def entry_points_for(cls, key): - return OrderedDict((e.name, e.load()) for e in cls.entry_points().select(group=key)) + return OrderedDict((e.name, e.load()) for e in cls.entry_points().get(key, {})) @staticmethod def entry_points(): diff --git a/third_party/python/virtualenv/virtualenv/run/plugin/discovery.py b/third_party/python/virtualenv/virtualenv/run/plugin/discovery.py index ac9b7f526bf2..3b6fc60d8392 100644 --- a/third_party/python/virtualenv/virtualenv/run/plugin/discovery.py +++ b/third_party/python/virtualenv/virtualenv/run/plugin/discovery.py @@ -4,7 +4,7 @@ from .base import PluginLoader class Discovery(PluginLoader): - """ """ + """""" def get_discover(parser, args): @@ -13,13 +13,10 @@ def get_discover(parser, args): title="discovery", description="discover and provide a target interpreter", ) - choices = _get_default_discovery(discover_types) - # prefer the builtin if present, otherwise fallback to first defined type - choices = sorted(choices, key=lambda a: 0 if a == "builtin" else 1) discovery_parser.add_argument( "--discovery", - choices=choices, - default=next(iter(choices)), + choices=_get_default_discovery(discover_types), + default=next(i for i in discover_types.keys()), required=False, help="interpreter discovery method", ) diff --git a/third_party/python/virtualenv/virtualenv/seed/embed/pip_invoke.py b/third_party/python/virtualenv/virtualenv/seed/embed/pip_invoke.py index c935c0216813..372e140dc41f 100644 --- a/third_party/python/virtualenv/virtualenv/seed/embed/pip_invoke.py +++ b/third_party/python/virtualenv/virtualenv/seed/embed/pip_invoke.py @@ -19,7 +19,7 @@ class PipInvoke(BaseEmbed): return for_py_version = creator.interpreter.version_release_str with self.get_pip_install_cmd(creator.exe, for_py_version) as cmd: - env = pip_wheel_env_run(self.extra_search_dir, self.app_data, self.env) + env = pip_wheel_env_run(self.extra_search_dir, self.app_data) self._execute(cmd, env) @staticmethod @@ -46,7 +46,6 @@ class PipInvoke(BaseEmbed): download=False, app_data=self.app_data, do_periodic_update=self.periodic_update, - env=self.env, ) if wheel is None: raise RuntimeError("could not get wheel for distribution {}".format(dist)) diff --git a/third_party/python/virtualenv/virtualenv/seed/embed/via_app_data/pip_install/base.py b/third_party/python/virtualenv/virtualenv/seed/embed/via_app_data/pip_install/base.py index 017b7efe105d..a1d946d50920 100644 --- a/third_party/python/virtualenv/virtualenv/seed/embed/via_app_data/pip_install/base.py +++ b/third_party/python/virtualenv/virtualenv/seed/embed/via_app_data/pip_install/base.py @@ -51,7 +51,6 @@ class PipInstall(object): # 1. first extract the wheel logging.debug("build install image for %s to %s", self._wheel.name, self._image_dir) with zipfile.ZipFile(str(self._wheel)) as zip_ref: - self._shorten_path_if_needed(zip_ref) zip_ref.extractall(str(self._image_dir)) self._extracted = True # 2. now add additional files not present in the distribution @@ -59,20 +58,6 @@ class PipInstall(object): # 3. finally fix the records file self._fix_records(new_files) - def _shorten_path_if_needed(self, zip_ref): - if os.name == "nt": - to_folder = str(self._image_dir) - # https://docs.microsoft.com/en-us/windows/win32/fileio/maximum-file-path-limitation - zip_max_len = max(len(i) for i in zip_ref.namelist()) - path_len = zip_max_len + len(to_folder) - if path_len > 260: - self._image_dir.mkdir(exist_ok=True) # to get a short path must exist - - from virtualenv.util.path import get_short_path_name - - to_folder = get_short_path_name(to_folder) - self._image_dir = Path(to_folder) - def _records_text(self, files): record_data = "\n".join( "{},,".format(os.path.relpath(ensure_text(str(rec)), ensure_text(str(self._image_dir)))) for rec in files diff --git a/third_party/python/virtualenv/virtualenv/seed/embed/via_app_data/via_app_data.py b/third_party/python/virtualenv/virtualenv/seed/embed/via_app_data/via_app_data.py index 9a98a709fbc4..1afa7978c54f 100644 --- a/third_party/python/virtualenv/virtualenv/seed/embed/via_app_data/via_app_data.py +++ b/third_party/python/virtualenv/virtualenv/seed/embed/via_app_data/via_app_data.py @@ -89,7 +89,6 @@ class FromAppData(BaseEmbed): download=download, app_data=self.app_data, do_periodic_update=self.periodic_update, - env=self.env, ) if result is not None: break diff --git a/third_party/python/virtualenv/virtualenv/seed/seeder.py b/third_party/python/virtualenv/virtualenv/seed/seeder.py index 852e85254f72..2bcccfc727c9 100644 --- a/third_party/python/virtualenv/virtualenv/seed/seeder.py +++ b/third_party/python/virtualenv/virtualenv/seed/seeder.py @@ -17,7 +17,6 @@ class Seeder(object): :param enabled: a flag weather the seeder is enabled or not """ self.enabled = enabled - self.env = options.env @classmethod def add_parser_arguments(cls, parser, interpreter, app_data): diff --git a/third_party/python/virtualenv/virtualenv/seed/wheels/acquire.py b/third_party/python/virtualenv/virtualenv/seed/wheels/acquire.py index 0ee03ec9841c..e63ecb67cf2e 100644 --- a/third_party/python/virtualenv/virtualenv/seed/wheels/acquire.py +++ b/third_party/python/virtualenv/virtualenv/seed/wheels/acquire.py @@ -2,6 +2,7 @@ from __future__ import absolute_import, unicode_literals import logging +import os import sys from operator import eq, lt @@ -13,13 +14,13 @@ from .bundle import from_bundle from .util import Version, Wheel, discover_wheels -def get_wheel(distribution, version, for_py_version, search_dirs, download, app_data, do_periodic_update, env): +def get_wheel(distribution, version, for_py_version, search_dirs, download, app_data, do_periodic_update): """ Get a wheel with the given distribution-version-for_py_version trio, by using the extra search dir + download """ # not all wheels are compatible with all python versions, so we need to py version qualify it # 1. acquire from bundle - wheel = from_bundle(distribution, version, for_py_version, search_dirs, app_data, do_periodic_update, env) + wheel = from_bundle(distribution, version, for_py_version, search_dirs, app_data, do_periodic_update) # 2. download from the internet if version not in Version.non_version and download: @@ -30,12 +31,11 @@ def get_wheel(distribution, version, for_py_version, search_dirs, download, app_ search_dirs=search_dirs, app_data=app_data, to_folder=app_data.house, - env=env, ) return wheel -def download_wheel(distribution, version_spec, for_py_version, search_dirs, app_data, to_folder, env): +def download_wheel(distribution, version_spec, for_py_version, search_dirs, app_data, to_folder): to_download = "{}{}".format(distribution, version_spec or "") logging.debug("download wheel %s %s to %s", to_download, for_py_version, to_folder) cmd = [ @@ -55,7 +55,7 @@ def download_wheel(distribution, version_spec, for_py_version, search_dirs, app_ to_download, ] # pip has no interface in python - must be a new sub-process - env = pip_wheel_env_run(search_dirs, app_data, env) + env = pip_wheel_env_run(search_dirs, app_data) process = Popen(cmd, env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True) out, err = process.communicate() if process.returncode != 0: @@ -96,9 +96,9 @@ def find_compatible_in_house(distribution, version_spec, for_py_version, in_fold return None if start == end else wheels[start] -def pip_wheel_env_run(search_dirs, app_data, env): +def pip_wheel_env_run(search_dirs, app_data): for_py_version = "{}.{}".format(*sys.version_info[0:2]) - env = env.copy() + env = os.environ.copy() env.update( { ensure_str(k): str(v) # python 2 requires these to be string only (non-unicode) @@ -113,7 +113,6 @@ def pip_wheel_env_run(search_dirs, app_data, env): download=False, app_data=app_data, do_periodic_update=False, - env=env, ) if wheel is None: raise RuntimeError("could not find the embedded pip") diff --git a/third_party/python/virtualenv/virtualenv/seed/wheels/bundle.py b/third_party/python/virtualenv/virtualenv/seed/wheels/bundle.py index ab2fe5fa5a77..7c664bd389a1 100644 --- a/third_party/python/virtualenv/virtualenv/seed/wheels/bundle.py +++ b/third_party/python/virtualenv/virtualenv/seed/wheels/bundle.py @@ -5,7 +5,7 @@ from .periodic_update import periodic_update from .util import Version, Wheel, discover_wheels -def from_bundle(distribution, version, for_py_version, search_dirs, app_data, do_periodic_update, env): +def from_bundle(distribution, version, for_py_version, search_dirs, app_data, do_periodic_update): """ Load the bundled wheel to a cache directory. """ @@ -15,7 +15,7 @@ def from_bundle(distribution, version, for_py_version, search_dirs, app_data, do if version != Version.embed: # 2. check if we have upgraded embed if app_data.can_update: - wheel = periodic_update(distribution, for_py_version, wheel, search_dirs, app_data, do_periodic_update, env) + wheel = periodic_update(distribution, for_py_version, wheel, search_dirs, app_data, do_periodic_update) # 3. acquire from extra search dir found_wheel = from_dir(distribution, of_version, for_py_version, search_dirs) diff --git a/third_party/python/virtualenv/virtualenv/seed/wheels/embed/__init__.py b/third_party/python/virtualenv/virtualenv/seed/wheels/embed/__init__.py index a3bef65120a1..5233e4876112 100644 --- a/third_party/python/virtualenv/virtualenv/seed/wheels/embed/__init__.py +++ b/third_party/python/virtualenv/virtualenv/seed/wheels/embed/__init__.py @@ -6,39 +6,44 @@ from virtualenv.util.path import Path BUNDLE_FOLDER = Path(__file__).absolute().parent BUNDLE_SUPPORT = { "3.10": { - "pip": "pip-21.2.2-py3-none-any.whl", - "setuptools": "setuptools-57.4.0-py3-none-any.whl", - "wheel": "wheel-0.36.2-py2.py3-none-any.whl", + "pip": "pip-20.3.1-py2.py3-none-any.whl", + "setuptools": "setuptools-51.0.0-py3-none-any.whl", + "wheel": "wheel-0.36.1-py2.py3-none-any.whl", }, "3.9": { - "pip": "pip-21.2.2-py3-none-any.whl", - "setuptools": "setuptools-57.4.0-py3-none-any.whl", - "wheel": "wheel-0.36.2-py2.py3-none-any.whl", + "pip": "pip-20.3.1-py2.py3-none-any.whl", + "setuptools": "setuptools-51.0.0-py3-none-any.whl", + "wheel": "wheel-0.36.1-py2.py3-none-any.whl", }, "3.8": { - "pip": "pip-21.2.2-py3-none-any.whl", - "setuptools": "setuptools-57.4.0-py3-none-any.whl", - "wheel": "wheel-0.36.2-py2.py3-none-any.whl", + "pip": "pip-20.3.1-py2.py3-none-any.whl", + "setuptools": "setuptools-51.0.0-py3-none-any.whl", + "wheel": "wheel-0.36.1-py2.py3-none-any.whl", }, "3.7": { - "pip": "pip-21.2.2-py3-none-any.whl", - "setuptools": "setuptools-57.4.0-py3-none-any.whl", - "wheel": "wheel-0.36.2-py2.py3-none-any.whl", + "pip": "pip-20.3.1-py2.py3-none-any.whl", + "setuptools": "setuptools-51.0.0-py3-none-any.whl", + "wheel": "wheel-0.36.1-py2.py3-none-any.whl", }, "3.6": { - "pip": "pip-21.2.2-py3-none-any.whl", - "setuptools": "setuptools-57.4.0-py3-none-any.whl", - "wheel": "wheel-0.36.2-py2.py3-none-any.whl", + "pip": "pip-20.3.1-py2.py3-none-any.whl", + "setuptools": "setuptools-51.0.0-py3-none-any.whl", + "wheel": "wheel-0.36.1-py2.py3-none-any.whl", }, "3.5": { - "pip": "pip-20.3.4-py2.py3-none-any.whl", + "pip": "pip-20.3.1-py2.py3-none-any.whl", "setuptools": "setuptools-50.3.2-py3-none-any.whl", - "wheel": "wheel-0.36.2-py2.py3-none-any.whl", + "wheel": "wheel-0.36.1-py2.py3-none-any.whl", + }, + "3.4": { + "pip": "pip-19.1.1-py2.py3-none-any.whl", + "setuptools": "setuptools-43.0.0-py2.py3-none-any.whl", + "wheel": "wheel-0.33.6-py2.py3-none-any.whl", }, "2.7": { - "pip": "pip-20.3.4-py2.py3-none-any.whl", + "pip": "pip-20.3.1-py2.py3-none-any.whl", "setuptools": "setuptools-44.1.1-py2.py3-none-any.whl", - "wheel": "wheel-0.36.2-py2.py3-none-any.whl", + "wheel": "wheel-0.36.1-py2.py3-none-any.whl", }, } MAX = "3.10" diff --git a/third_party/python/virtualenv/virtualenv/seed/wheels/embed/pip-19.1.1-py2.py3-none-any.whl b/third_party/python/virtualenv/virtualenv/seed/wheels/embed/pip-19.1.1-py2.py3-none-any.whl new file mode 100644 index 000000000000..8476c119301f Binary files /dev/null and b/third_party/python/virtualenv/virtualenv/seed/wheels/embed/pip-19.1.1-py2.py3-none-any.whl differ diff --git a/third_party/python/virtualenv/virtualenv/seed/wheels/embed/pip-20.3.4-py2.py3-none-any.whl b/third_party/python/virtualenv/virtualenv/seed/wheels/embed/pip-20.3.1-py2.py3-none-any.whl similarity index 90% rename from third_party/python/virtualenv/virtualenv/seed/wheels/embed/pip-20.3.4-py2.py3-none-any.whl rename to third_party/python/virtualenv/virtualenv/seed/wheels/embed/pip-20.3.1-py2.py3-none-any.whl index 95de4d7a5d88..fbac5d3c90f3 100644 Binary files a/third_party/python/virtualenv/virtualenv/seed/wheels/embed/pip-20.3.4-py2.py3-none-any.whl and b/third_party/python/virtualenv/virtualenv/seed/wheels/embed/pip-20.3.1-py2.py3-none-any.whl differ diff --git a/third_party/python/virtualenv/virtualenv/seed/wheels/embed/pip-21.1.3-py3-none-any.whl b/third_party/python/virtualenv/virtualenv/seed/wheels/embed/pip-21.1.3-py3-none-any.whl deleted file mode 100644 index d96a40a9291f..000000000000 Binary files a/third_party/python/virtualenv/virtualenv/seed/wheels/embed/pip-21.1.3-py3-none-any.whl and /dev/null differ diff --git a/third_party/python/virtualenv/virtualenv/seed/wheels/embed/pip-21.2.2-py3-none-any.whl b/third_party/python/virtualenv/virtualenv/seed/wheels/embed/pip-21.2.2-py3-none-any.whl deleted file mode 100644 index aea9c37a7b6e..000000000000 Binary files a/third_party/python/virtualenv/virtualenv/seed/wheels/embed/pip-21.2.2-py3-none-any.whl and /dev/null differ diff --git a/third_party/python/virtualenv/virtualenv/seed/wheels/embed/setuptools-43.0.0-py2.py3-none-any.whl b/third_party/python/virtualenv/virtualenv/seed/wheels/embed/setuptools-43.0.0-py2.py3-none-any.whl new file mode 100644 index 000000000000..733faa6a546d Binary files /dev/null and b/third_party/python/virtualenv/virtualenv/seed/wheels/embed/setuptools-43.0.0-py2.py3-none-any.whl differ diff --git a/third_party/python/virtualenv/virtualenv/seed/wheels/embed/setuptools-57.1.0-py3-none-any.whl b/third_party/python/virtualenv/virtualenv/seed/wheels/embed/setuptools-51.0.0-py3-none-any.whl similarity index 60% rename from third_party/python/virtualenv/virtualenv/seed/wheels/embed/setuptools-57.1.0-py3-none-any.whl rename to third_party/python/virtualenv/virtualenv/seed/wheels/embed/setuptools-51.0.0-py3-none-any.whl index 063dafc1be8c..7e60e1130595 100644 Binary files a/third_party/python/virtualenv/virtualenv/seed/wheels/embed/setuptools-57.1.0-py3-none-any.whl and b/third_party/python/virtualenv/virtualenv/seed/wheels/embed/setuptools-51.0.0-py3-none-any.whl differ diff --git a/third_party/python/virtualenv/virtualenv/seed/wheels/embed/setuptools-57.4.0-py3-none-any.whl b/third_party/python/virtualenv/virtualenv/seed/wheels/embed/setuptools-57.4.0-py3-none-any.whl deleted file mode 100644 index af8f8ba21003..000000000000 Binary files a/third_party/python/virtualenv/virtualenv/seed/wheels/embed/setuptools-57.4.0-py3-none-any.whl and /dev/null differ diff --git a/third_party/python/virtualenv/virtualenv/seed/wheels/embed/wheel-0.33.6-py2.py3-none-any.whl b/third_party/python/virtualenv/virtualenv/seed/wheels/embed/wheel-0.33.6-py2.py3-none-any.whl new file mode 100644 index 000000000000..2a71896be974 Binary files /dev/null and b/third_party/python/virtualenv/virtualenv/seed/wheels/embed/wheel-0.33.6-py2.py3-none-any.whl differ diff --git a/third_party/python/virtualenv/virtualenv/seed/wheels/embed/wheel-0.36.2-py2.py3-none-any.whl b/third_party/python/virtualenv/virtualenv/seed/wheels/embed/wheel-0.36.1-py2.py3-none-any.whl similarity index 59% rename from third_party/python/virtualenv/virtualenv/seed/wheels/embed/wheel-0.36.2-py2.py3-none-any.whl rename to third_party/python/virtualenv/virtualenv/seed/wheels/embed/wheel-0.36.1-py2.py3-none-any.whl index ead39b036e86..1f17303bf91e 100644 Binary files a/third_party/python/virtualenv/virtualenv/seed/wheels/embed/wheel-0.36.2-py2.py3-none-any.whl and b/third_party/python/virtualenv/virtualenv/seed/wheels/embed/wheel-0.36.1-py2.py3-none-any.whl differ diff --git a/third_party/python/virtualenv/virtualenv/seed/wheels/periodic_update.py b/third_party/python/virtualenv/virtualenv/seed/wheels/periodic_update.py index 45584f91bb22..fd0ff4c264ab 100644 --- a/third_party/python/virtualenv/virtualenv/seed/wheels/periodic_update.py +++ b/third_party/python/virtualenv/virtualenv/seed/wheels/periodic_update.py @@ -36,9 +36,9 @@ if PY2: pass # pragma: no cov -def periodic_update(distribution, for_py_version, wheel, search_dirs, app_data, do_periodic_update, env): +def periodic_update(distribution, for_py_version, wheel, search_dirs, app_data, do_periodic_update): if do_periodic_update: - handle_auto_update(distribution, for_py_version, wheel, search_dirs, app_data, env) + handle_auto_update(distribution, for_py_version, wheel, search_dirs, app_data) now = datetime.now() @@ -57,14 +57,14 @@ def periodic_update(distribution, for_py_version, wheel, search_dirs, app_data, return wheel -def handle_auto_update(distribution, for_py_version, wheel, search_dirs, app_data, env): +def handle_auto_update(distribution, for_py_version, wheel, search_dirs, app_data): embed_update_log = app_data.embed_update_log(distribution, for_py_version) u_log = UpdateLog.from_dict(embed_update_log.read()) if u_log.needs_update: u_log.periodic = True u_log.started = datetime.now() embed_update_log.write(u_log.to_dict()) - trigger_update(distribution, for_py_version, wheel, search_dirs, app_data, periodic=True, env=env) + trigger_update(distribution, for_py_version, wheel, search_dirs, app_data, periodic=True) DATETIME_FMT = "%Y-%m-%dT%H:%M:%S.%fZ" @@ -169,7 +169,7 @@ class UpdateLog(object): return self.started is None or now - self.started > timedelta(hours=1) -def trigger_update(distribution, for_py_version, wheel, search_dirs, app_data, env, periodic): +def trigger_update(distribution, for_py_version, wheel, search_dirs, app_data, periodic): wheel_path = None if wheel is None else str(wheel.path) cmd = [ sys.executable, @@ -185,7 +185,7 @@ def trigger_update(distribution, for_py_version, wheel, search_dirs, app_data, e .strip() .format(distribution, for_py_version, wheel_path, str(app_data), [str(p) for p in search_dirs], periodic), ] - debug = env.get(str("_VIRTUALENV_PERIODIC_UPDATE_INLINE")) == str("1") + debug = os.environ.get(str("_VIRTUALENV_PERIODIC_UPDATE_INLINE")) == str("1") pipe = None if debug else subprocess.PIPE kwargs = {"stdout": pipe, "stderr": pipe} if not debug and sys.platform == "win32": @@ -236,7 +236,6 @@ def _run_do_update(app_data, distribution, embed_filename, for_py_version, perio search_dirs=search_dirs, app_data=app_data, to_folder=wheelhouse, - env=os.environ, ) if dest is None or (u_log.versions and u_log.versions[0].filename == dest.name): break @@ -302,13 +301,13 @@ def _pypi_get_distribution_info(distribution): return content -def manual_upgrade(app_data, env): +def manual_upgrade(app_data): threads = [] for for_py_version, distribution_to_package in BUNDLE_SUPPORT.items(): # load extra search dir for the given for_py for distribution in distribution_to_package.keys(): - thread = Thread(target=_run_manual_upgrade, args=(app_data, distribution, for_py_version, env)) + thread = Thread(target=_run_manual_upgrade, args=(app_data, distribution, for_py_version)) thread.start() threads.append(thread) @@ -316,7 +315,7 @@ def manual_upgrade(app_data, env): thread.join() -def _run_manual_upgrade(app_data, distribution, for_py_version, env): +def _run_manual_upgrade(app_data, distribution, for_py_version): start = datetime.now() from .bundle import from_bundle @@ -327,7 +326,6 @@ def _run_manual_upgrade(app_data, distribution, for_py_version, env): search_dirs=[], app_data=app_data, do_periodic_update=False, - env=env, ) logging.warning( "upgrade %s for python %s with current %s", diff --git a/third_party/python/virtualenv/virtualenv/util/path/__init__.py b/third_party/python/virtualenv/virtualenv/util/path/__init__.py index dc628de835ed..a7f71634b53d 100644 --- a/third_party/python/virtualenv/virtualenv/util/path/__init__.py +++ b/third_party/python/virtualenv/virtualenv/util/path/__init__.py @@ -3,7 +3,6 @@ from __future__ import absolute_import, unicode_literals from ._pathlib import Path from ._permission import make_exe, set_tree from ._sync import copy, copytree, ensure_dir, safe_delete, symlink -from ._win import get_short_path_name __all__ = ( "ensure_dir", @@ -14,5 +13,4 @@ __all__ = ( "make_exe", "set_tree", "safe_delete", - "get_short_path_name", ) diff --git a/third_party/python/virtualenv/virtualenv/util/path/_pathlib/__init__.py b/third_party/python/virtualenv/virtualenv/util/path/_pathlib/__init__.py index 746c8aed2ccb..6bb045c2d8da 100644 --- a/third_party/python/virtualenv/virtualenv/util/path/_pathlib/__init__.py +++ b/third_party/python/virtualenv/virtualenv/util/path/_pathlib/__init__.py @@ -6,6 +6,52 @@ import six if six.PY3: from pathlib import Path + + if sys.version_info[0:2] == (3, 4): + # no read/write text on python3.4 + BuiltinPath = Path + + class Path(type(BuiltinPath())): + def read_text(self, encoding=None, errors=None): + """ + Open the file in text mode, read it, and close the file. + """ + with self.open(mode="r", encoding=encoding, errors=errors) as f: + return f.read() + + def read_bytes(self): + """ + Open the file in bytes mode, read it, and close the file. + """ + with self.open(mode="rb") as f: + return f.read() + + def write_text(self, data, encoding=None, errors=None): + """ + Open the file in text mode, write to it, and close the file. + """ + if not isinstance(data, str): + raise TypeError("data must be str, not %s" % data.__class__.__name__) + with self.open(mode="w", encoding=encoding, errors=errors) as f: + return f.write(data) + + def write_bytes(self, data): + """ + Open the file in bytes mode, write to it, and close the file. + """ + # type-check for the buffer interface before truncating the file + view = memoryview(data) + with self.open(mode="wb") as f: + return f.write(view) + + def mkdir(self, mode=0o777, parents=False, exist_ok=False): + try: + super(type(BuiltinPath()), self).mkdir(mode, parents) + except FileExistsError as exception: + if not exist_ok: + raise exception + + else: if sys.platform == "win32": # workaround for https://github.com/mcmtroffaes/pathlib2/issues/56 diff --git a/third_party/python/virtualenv/virtualenv/util/path/_win.py b/third_party/python/virtualenv/virtualenv/util/path/_win.py deleted file mode 100644 index 02e16d07e2e3..000000000000 --- a/third_party/python/virtualenv/virtualenv/util/path/_win.py +++ /dev/null @@ -1,19 +0,0 @@ -def get_short_path_name(long_name): - """ - Gets the short path name of a given long path. - http://stackoverflow.com/a/23598461/200291 - """ - import ctypes - from ctypes import wintypes - - _GetShortPathNameW = ctypes.windll.kernel32.GetShortPathNameW - _GetShortPathNameW.argtypes = [wintypes.LPCWSTR, wintypes.LPWSTR, wintypes.DWORD] - _GetShortPathNameW.restype = wintypes.DWORD - output_buf_size = 0 - while True: - output_buf = ctypes.create_unicode_buffer(output_buf_size) - needed = _GetShortPathNameW(long_name, output_buf, output_buf_size) - if output_buf_size >= needed: - return output_buf.value - else: - output_buf_size = needed diff --git a/third_party/python/virtualenv/virtualenv/util/subprocess/_win_subprocess.py b/third_party/python/virtualenv/virtualenv/util/subprocess/_win_subprocess.py index ce531979ab84..4c4c5d029529 100644 --- a/third_party/python/virtualenv/virtualenv/util/subprocess/_win_subprocess.py +++ b/third_party/python/virtualenv/virtualenv/util/subprocess/_win_subprocess.py @@ -155,8 +155,7 @@ class Popen(subprocess.Popen): args = args.decode('utf-8') startupinfo.dwFlags |= _subprocess.STARTF_USESHOWWINDOW startupinfo.wShowWindow = _subprocess.SW_HIDE - env = os.environ if env is None else env - comspec = env.get("COMSPEC", unicode("cmd.exe")) + comspec = os.environ.get("COMSPEC", unicode("cmd.exe")) if ( _subprocess.GetVersion() >= 0x80000000 or os.path.basename(comspec).lower() == "command.com" diff --git a/third_party/python/virtualenv/virtualenv/version.py b/third_party/python/virtualenv/virtualenv/version.py index 7539c54681cb..7f21daf026c0 100644 --- a/third_party/python/virtualenv/virtualenv/version.py +++ b/third_party/python/virtualenv/virtualenv/version.py @@ -1,3 +1,3 @@ from __future__ import unicode_literals -__version__ = "20.7.0" +__version__ = "20.2.2" diff --git a/third_party/python/yarl/CHANGES.rst b/third_party/python/yarl/CHANGES.rst deleted file mode 100644 index 58ee2e4828aa..000000000000 --- a/third_party/python/yarl/CHANGES.rst +++ /dev/null @@ -1,572 +0,0 @@ -========= -Changelog -========= - -.. - You should *NOT* be adding new change log entries to this file, this - file is managed by towncrier. You *may* edit previous change logs to - fix problems like typo corrections or such. - To add a new change log entry, please see - https://pip.pypa.io/en/latest/development/#adding-a-news-entry - we named the news folder "changes". - - WARNING: Don't drop the next directive! - -.. towncrier release notes start - -1.6.3 (2020-11-14) -================== - -Bugfixes --------- - -- No longer loose characters when decoding incorrect percent-sequences (like ``%e2%82%f8``). All non-decodable percent-sequences are now preserved. - `#517 `_ -- Provide x86 Windows wheels. - `#535 `_ - - ----- - - -1.6.2 (2020-10-12) -================== - - -Bugfixes --------- - -- Provide generated ``.c`` files in TarBall distribution. - `#530 `_ - -1.6.1 (2020-10-12) -================== - -Features --------- - -- Provide wheels for ``aarch64``, ``i686``, ``ppc64le``, ``s390x`` architectures on - Linux as well as ``x86_64``. - `#507 `_ -- Provide wheels for Python 3.9. - `#526 `_ - -Bugfixes --------- - -- ``human_repr()`` now always produces valid representation equivalent to the original URL (if the original URL is valid). - `#511 `_ -- Fixed requoting a single percent followed by a percent-encoded character in the Cython implementation. - `#514 `_ -- Fix ValueError when decoding ``%`` which is not followed by two hexadecimal digits. - `#516 `_ -- Fix decoding ``%`` followed by a space and hexadecimal digit. - `#520 `_ -- Fix annotation of ``with_query()``/``update_query()`` methods for ``key=[val1, val2]`` case. - `#528 `_ - -Removal -------- - -- Drop Python 3.5 support; Python 3.6 is the minimal supported Python version. - - ----- - - -1.6.0 (2020-09-23) -================== - -Features --------- - -- Allow for int and float subclasses in query, while still denying bool. - `#492 `_ - - -Bugfixes --------- - -- Do not requote arguments in ``URL.build()``, ``with_xxx()`` and in ``/`` operator. - `#502 `_ -- Keep IPv6 brackets in ``origin()``. - `#504 `_ - - ----- - - -1.5.1 (2020-08-01) -================== - -Bugfixes --------- - -- Fix including relocated internal ``yarl._quoting_c`` C-extension into published PyPI dists. - `#485 `_ - - -Misc ----- - -- `#484 `_ - - ----- - - -1.5.0 (2020-07-26) -================== - -Features --------- - -- Convert host to lowercase on URL building. - `#386 `_ -- Allow using ``mod`` operator (`%`) for updating query string (an alias for ``update_query()`` method). - `#435 `_ -- Allow use of sequences such as ``list`` and ``tuple`` in the values - of a mapping such as ``dict`` to represent that a key has many values:: - - url = URL("http://example.com") - assert url.with_query({"a": [1, 2]}) == URL("http://example.com/?a=1&a=2") - - `#443 `_ -- Support URL.build() with scheme and path (creates a relative URL). - `#464 `_ -- Cache slow IDNA encode/decode calls. - `#476 `_ -- Add ``@final`` / ``Final`` type hints - `#477 `_ -- Support URL authority/raw_authority properties and authority argument of ``URL.build()`` method. - `#478 `_ -- Hide the library implementation details, make the exposed public list very clean. - `#483 `_ - - -Bugfixes --------- - -- Fix tests with newer Python (3.7.6, 3.8.1 and 3.9.0+). - `#409 `_ -- Fix a bug where query component, passed in a form of mapping or sequence, is unquoted in unexpected way. - `#426 `_ -- Hide `Query` and `QueryVariable` type aliases in `__init__.pyi`, now they are prefixed with underscore. - `#431 `_ -- Keep ipv6 brackets after updating port/user/password. - `#451 `_ - - ----- - - -1.4.2 (2019-12-05) -================== - -Features --------- - -- Workaround for missing `str.isascii()` in Python 3.6 - `#389 `_ - - ----- - - -1.4.1 (2019-11-29) -================== - -* Fix regression, make the library work on Python 3.5 and 3.6 again. - -1.4.0 (2019-11-29) -================== - -* Distinguish an empty password in URL from a password not provided at all (#262) - -* Fixed annotations for optional parameters of ``URL.build`` (#309) - -* Use None as default value of ``user`` parameter of ``URL.build`` (#309) - -* Enforce building C Accelerated modules when installing from source tarball, use - ``YARL_NO_EXTENSIONS`` environment variable for falling back to (slower) Pure Python - implementation (#329) - -* Drop Python 3.5 support - -* Fix quoting of plus in path by pure python version (#339) - -* Don't create a new URL if fragment is unchanged (#292) - -* Included in error msg the path that produces starting slash forbidden error (#376) - -* Skip slow IDNA encoding for ASCII-only strings (#387) - - -1.3.0 (2018-12-11) -================== - -* Fix annotations for ``query`` parameter (#207) - -* An incoming query sequence can have int variables (the same as for - Mapping type) (#208) - -* Add ``URL.explicit_port`` property (#218) - -* Give a friendlier error when port cant be converted to int (#168) - -* ``bool(URL())`` now returns ``False`` (#272) - -1.2.6 (2018-06-14) -================== - -* Drop Python 3.4 trove classifier (#205) - -1.2.5 (2018-05-23) -================== - -* Fix annotations for ``build`` (#199) - -1.2.4 (2018-05-08) -================== - -* Fix annotations for ``cached_property`` (#195) - -1.2.3 (2018-05-03) -================== - -* Accept ``str`` subclasses in ``URL`` constructor (#190) - -1.2.2 (2018-05-01) -================== - -* Fix build - -1.2.1 (2018-04-30) -================== - -* Pin minimal required Python to 3.5.3 (#189) - -1.2.0 (2018-04-30) -================== - -* Forbid inheritance, replace ``__init__`` with ``__new__`` (#171) - -* Support PEP-561 (provide type hinting marker) (#182) - -1.1.1 (2018-02-17) -================== - -* Fix performance regression: don't encode enmpty netloc (#170) - -1.1.0 (2018-01-21) -================== - -* Make pure Python quoter consistent with Cython version (#162) - -1.0.0 (2018-01-15) -================== - -* Use fast path if quoted string does not need requoting (#154) - -* Speed up quoting/unquoting by ``_Quoter`` and ``_Unquoter`` classes (#155) - -* Drop ``yarl.quote`` and ``yarl.unquote`` public functions (#155) - -* Add custom string writer, reuse static buffer if available (#157) - Code is 50-80 times faster than Pure Python version (was 4-5 times faster) - -* Don't recode IP zone (#144) - -* Support ``encoded=True`` in ``yarl.URL.build()`` (#158) - -* Fix updating query with multiple keys (#160) - -0.18.0 (2018-01-10) -=================== - -* Fallback to IDNA 2003 if domain name is not IDNA 2008 compatible (#152) - -0.17.0 (2017-12-30) -=================== - -* Use IDNA 2008 for domain name processing (#149) - -0.16.0 (2017-12-07) -=================== - -* Fix raising ``TypeError`` by ``url.query_string()`` after - ``url.with_query({})`` (empty mapping) (#141) - -0.15.0 (2017-11-23) -=================== - -* Add ``raw_path_qs`` attribute (#137) - -0.14.2 (2017-11-14) -=================== - -* Restore ``strict`` parameter as no-op in ``quote`` / ``unquote`` - -0.14.1 (2017-11-13) -=================== - -* Restore ``strict`` parameter as no-op for sake of compatibility with - aiohttp 2.2 - -0.14.0 (2017-11-11) -=================== - -* Drop strict mode (#123) - -* Fix ``"ValueError: Unallowed PCT %"`` when there's a ``"%"`` in the url (#124) - -0.13.0 (2017-10-01) -=================== - -* Document ``encoded`` parameter (#102) - -* Support relative urls like ``'?key=value'`` (#100) - -* Unsafe encoding for QS fixed. Encode ``;`` char in value param (#104) - -* Process passwords without user names (#95) - -0.12.0 (2017-06-26) -=================== - -* Properly support paths without leading slash in ``URL.with_path()`` (#90) - -* Enable type annotation checks - -0.11.0 (2017-06-26) -=================== - -* Normalize path (#86) - -* Clear query and fragment parts in ``.with_path()`` (#85) - -0.10.3 (2017-06-13) -=================== - -* Prevent double URL args unquoting (#83) - -0.10.2 (2017-05-05) -=================== - -* Unexpected hash behaviour (#75) - - -0.10.1 (2017-05-03) -=================== - -* Unexpected compare behaviour (#73) - -* Do not quote or unquote + if not a query string. (#74) - - -0.10.0 (2017-03-14) -=================== - -* Added ``URL.build`` class method (#58) - -* Added ``path_qs`` attribute (#42) - - -0.9.8 (2017-02-16) -================== - -* Do not quote ``:`` in path - - -0.9.7 (2017-02-16) -================== - -* Load from pickle without _cache (#56) - -* Percent-encoded pluses in path variables become spaces (#59) - - -0.9.6 (2017-02-15) -================== - -* Revert backward incompatible change (BaseURL) - - -0.9.5 (2017-02-14) -================== - -* Fix BaseURL rich comparison support - - -0.9.4 (2017-02-14) -================== - -* Use BaseURL - - -0.9.3 (2017-02-14) -================== - -* Added BaseURL - - -0.9.2 (2017-02-08) -================== - -* Remove debug print - - -0.9.1 (2017-02-07) -================== - -* Do not lose tail chars (#45) - - -0.9.0 (2017-02-07) -================== - -* Allow to quote ``%`` in non strict mode (#21) - -* Incorrect parsing of query parameters with %3B (;) inside (#34) - -* Fix core dumps (#41) - -* tmpbuf - compiling error (#43) - -* Added ``URL.update_path()`` method - -* Added ``URL.update_query()`` method (#47) - - -0.8.1 (2016-12-03) -================== - -* Fix broken aiohttp: revert back ``quote`` / ``unquote``. - - -0.8.0 (2016-12-03) -================== - -* Support more verbose error messages in ``.with_query()`` (#24) - -* Don't percent-encode ``@`` and ``:`` in path (#32) - -* Don't expose ``yarl.quote`` and ``yarl.unquote``, these functions are - part of private API - -0.7.1 (2016-11-18) -================== - -* Accept not only ``str`` but all classes inherited from ``str`` also (#25) - -0.7.0 (2016-11-07) -================== - -* Accept ``int`` as value for ``.with_query()`` - -0.6.0 (2016-11-07) -================== - -* Explicitly use UTF8 encoding in setup.py (#20) -* Properly unquote non-UTF8 strings (#19) - -0.5.3 (2016-11-02) -================== - -* Don't use namedtuple fields but indexes on URL construction - -0.5.2 (2016-11-02) -================== - -* Inline ``_encode`` class method - -0.5.1 (2016-11-02) -================== - -* Make URL construction faster by removing extra classmethod calls - -0.5.0 (2016-11-02) -================== - -* Add cython optimization for quoting/unquoting -* Provide binary wheels - -0.4.3 (2016-09-29) -================== - -* Fix typing stubs - -0.4.2 (2016-09-29) -================== - -* Expose ``quote()`` and ``unquote()`` as public API - -0.4.1 (2016-09-28) -================== - -* Support empty values in query (``'/path?arg'``) - -0.4.0 (2016-09-27) -================== - -* Introduce ``relative()`` (#16) - -0.3.2 (2016-09-27) -================== - -* Typo fixes #15 - -0.3.1 (2016-09-26) -================== - -* Support sequence of pairs as ``with_query()`` parameter - -0.3.0 (2016-09-26) -================== - -* Introduce ``is_default_port()`` - -0.2.1 (2016-09-26) -================== - -* Raise ValueError for URLs like 'http://:8080/' - -0.2.0 (2016-09-18) -================== - -* Avoid doubling slashes when joining paths (#13) - -* Appending path starting from slash is forbidden (#12) - -0.1.4 (2016-09-09) -================== - -* Add kwargs support for ``with_query()`` (#10) - -0.1.3 (2016-09-07) -================== - -* Document ``with_query()``, ``with_fragment()`` and ``origin()`` - -* Allow ``None`` for ``with_query()`` and ``with_fragment()`` - -0.1.2 (2016-09-07) -================== - -* Fix links, tune docs theme. - -0.1.1 (2016-09-06) -================== - -* Update README, old version used obsolete API - -0.1.0 (2016-09-06) -================== - -* The library was deeply refactored, bytes are gone away but all - accepted strings are encoded if needed. - -0.0.1 (2016-08-30) -================== - -* The first release. diff --git a/third_party/python/yarl/LICENSE b/third_party/python/yarl/LICENSE deleted file mode 100644 index cc5cfd67902f..000000000000 --- a/third_party/python/yarl/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "{}" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright 2016-2018, Andrew Svetlov and aio-libs team - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/third_party/python/yarl/MANIFEST.in b/third_party/python/yarl/MANIFEST.in deleted file mode 100644 index dab6cb9a0046..000000000000 --- a/third_party/python/yarl/MANIFEST.in +++ /dev/null @@ -1,13 +0,0 @@ -include LICENSE -include CHANGES.rst -include README.rst -graft yarl -graft docs -graft tests -include yarl/*.c -global-exclude *.pyc -global-exclude *.cache -exclude yarl/*.html -exclude yarl/*.so -exclude yarl/*.pyd -prune docs/_build diff --git a/third_party/python/yarl/PKG-INFO b/third_party/python/yarl/PKG-INFO deleted file mode 100644 index 3c242e513537..000000000000 --- a/third_party/python/yarl/PKG-INFO +++ /dev/null @@ -1,797 +0,0 @@ -Metadata-Version: 2.1 -Name: yarl -Version: 1.6.3 -Summary: Yet another URL library -Home-page: https://github.com/aio-libs/yarl/ -Author: Andrew Svetlov -Author-email: andrew.svetlov@gmail.com -License: Apache 2 -Description: yarl - ==== - - .. image:: https://github.com/aio-libs/yarl/workflows/CI/badge.svg - :target: https://github.com/aio-libs/yarl/actions?query=workflow%3ACI - :align: right - - .. image:: https://codecov.io/gh/aio-libs/yarl/branch/master/graph/badge.svg - :target: https://codecov.io/gh/aio-libs/yarl - - .. image:: https://badge.fury.io/py/yarl.svg - :target: https://badge.fury.io/py/yarl - - - .. image:: https://readthedocs.org/projects/yarl/badge/?version=latest - :target: https://yarl.readthedocs.io - - - .. image:: https://img.shields.io/pypi/pyversions/yarl.svg - :target: https://pypi.python.org/pypi/yarl - - .. image:: https://badges.gitter.im/Join%20Chat.svg - :target: https://gitter.im/aio-libs/Lobby - :alt: Chat on Gitter - - Introduction - ------------ - - Url is constructed from ``str``: - - .. code-block:: pycon - - >>> from yarl import URL - >>> url = URL('https://www.python.org/~guido?arg=1#frag') - >>> url - URL('https://www.python.org/~guido?arg=1#frag') - - All url parts: *scheme*, *user*, *password*, *host*, *port*, *path*, - *query* and *fragment* are accessible by properties: - - .. code-block:: pycon - - >>> url.scheme - 'https' - >>> url.host - 'www.python.org' - >>> url.path - '/~guido' - >>> url.query_string - 'arg=1' - >>> url.query - - >>> url.fragment - 'frag' - - All url manipulations produce a new url object: - - .. code-block:: pycon - - >>> url = URL('https://www.python.org') - >>> url / 'foo' / 'bar' - URL('https://www.python.org/foo/bar') - >>> url / 'foo' % {'bar': 'baz'} - URL('https://www.python.org/foo?bar=baz') - - Strings passed to constructor and modification methods are - automatically encoded giving canonical representation as result: - - .. code-block:: pycon - - >>> url = URL('https://www.python.org/путь') - >>> url - URL('https://www.python.org/%D0%BF%D1%83%D1%82%D1%8C') - - Regular properties are *percent-decoded*, use ``raw_`` versions for - getting *encoded* strings: - - .. code-block:: pycon - - >>> url.path - '/путь' - - >>> url.raw_path - '/%D0%BF%D1%83%D1%82%D1%8C' - - Human readable representation of URL is available as ``.human_repr()``: - - .. code-block:: pycon - - >>> url.human_repr() - 'https://www.python.org/путь' - - For full documentation please read https://yarl.readthedocs.org. - - - Installation - ------------ - - :: - - $ pip install yarl - - The library is Python 3 only! - - PyPI contains binary wheels for Linux, Windows and MacOS. If you want to install - ``yarl`` on another operating system (like *Alpine Linux*, which is not - manylinux-compliant because of the missing glibc and therefore, cannot be - used with our wheels) the the tarball will be used to compile the library from - the source code. It requires a C compiler and and Python headers installed. - - To skip the compilation you must explicitly opt-in by setting the `YARL_NO_EXTENSIONS` - environment variable to a non-empty value, e.g.: - - .. code-block:: bash - - $ YARL_NO_EXTENSIONS=1 pip install yarl - - Please note that the pure-Python (uncompiled) version is much slower. However, - PyPy always uses a pure-Python implementation, and, as such, it is unaffected - by this variable. - - Dependencies - ------------ - - YARL requires multidict_ library. - - - API documentation - ------------------ - - The documentation is located at https://yarl.readthedocs.org - - - Why isn't boolean supported by the URL query API? - ------------------------------------------------- - - There is no standard for boolean representation of boolean values. - - Some systems prefer ``true``/``false``, others like ``yes``/``no``, ``on``/``off``, - ``Y``/``N``, ``1``/``0``, etc. - - ``yarl`` cannot make an unambiguous decision on how to serialize ``bool`` values because - it is specific to how the end-user's application is built and would be different for - different apps. The library doesn't accept booleans in the API; a user should convert - bools into strings using own preferred translation protocol. - - - Comparison with other URL libraries - ------------------------------------ - - * furl (https://pypi.python.org/pypi/furl) - - The library has rich functionality but the ``furl`` object is mutable. - - I'm afraid to pass this object into foreign code: who knows if the - code will modify my url in a terrible way while I just want to send URL - with handy helpers for accessing URL properties. - - ``furl`` has other non-obvious tricky things but the main objection - is mutability. - - * URLObject (https://pypi.python.org/pypi/URLObject) - - URLObject is immutable, that's pretty good. - - Every URL change generates a new URL object. - - But the library doesn't do any decode/encode transformations leaving the - end user to cope with these gory details. - - - Source code - ----------- - - The project is hosted on GitHub_ - - Please file an issue on the `bug tracker - `_ if you have found a bug - or have some suggestion in order to improve the library. - - The library uses `Azure Pipelines `_ for - Continuous Integration. - - Discussion list - --------------- - - *aio-libs* google group: https://groups.google.com/forum/#!forum/aio-libs - - Feel free to post your questions and ideas here. - - - Authors and License - ------------------- - - The ``yarl`` package is written by Andrew Svetlov. - - It's *Apache 2* licensed and freely available. - - - .. _GitHub: https://github.com/aio-libs/yarl - - .. _multidict: https://github.com/aio-libs/multidict - - - ========= - Changelog - ========= - - .. - You should *NOT* be adding new change log entries to this file, this - file is managed by towncrier. You *may* edit previous change logs to - fix problems like typo corrections or such. - To add a new change log entry, please see - https://pip.pypa.io/en/latest/development/#adding-a-news-entry - we named the news folder "changes". - - WARNING: Don't drop the next directive! - - .. towncrier release notes start - - 1.6.3 (2020-11-14) - ================== - - Bugfixes - -------- - - - No longer loose characters when decoding incorrect percent-sequences (like ``%e2%82%f8``). All non-decodable percent-sequences are now preserved. - `#517 `_ - - Provide x86 Windows wheels. - `#535 `_ - - - ---- - - - 1.6.2 (2020-10-12) - ================== - - - Bugfixes - -------- - - - Provide generated ``.c`` files in TarBall distribution. - `#530 `_ - - 1.6.1 (2020-10-12) - ================== - - Features - -------- - - - Provide wheels for ``aarch64``, ``i686``, ``ppc64le``, ``s390x`` architectures on - Linux as well as ``x86_64``. - `#507 `_ - - Provide wheels for Python 3.9. - `#526 `_ - - Bugfixes - -------- - - - ``human_repr()`` now always produces valid representation equivalent to the original URL (if the original URL is valid). - `#511 `_ - - Fixed requoting a single percent followed by a percent-encoded character in the Cython implementation. - `#514 `_ - - Fix ValueError when decoding ``%`` which is not followed by two hexadecimal digits. - `#516 `_ - - Fix decoding ``%`` followed by a space and hexadecimal digit. - `#520 `_ - - Fix annotation of ``with_query()``/``update_query()`` methods for ``key=[val1, val2]`` case. - `#528 `_ - - Removal - ------- - - - Drop Python 3.5 support; Python 3.6 is the minimal supported Python version. - - - ---- - - - 1.6.0 (2020-09-23) - ================== - - Features - -------- - - - Allow for int and float subclasses in query, while still denying bool. - `#492 `_ - - - Bugfixes - -------- - - - Do not requote arguments in ``URL.build()``, ``with_xxx()`` and in ``/`` operator. - `#502 `_ - - Keep IPv6 brackets in ``origin()``. - `#504 `_ - - - ---- - - - 1.5.1 (2020-08-01) - ================== - - Bugfixes - -------- - - - Fix including relocated internal ``yarl._quoting_c`` C-extension into published PyPI dists. - `#485 `_ - - - Misc - ---- - - - `#484 `_ - - - ---- - - - 1.5.0 (2020-07-26) - ================== - - Features - -------- - - - Convert host to lowercase on URL building. - `#386 `_ - - Allow using ``mod`` operator (`%`) for updating query string (an alias for ``update_query()`` method). - `#435 `_ - - Allow use of sequences such as ``list`` and ``tuple`` in the values - of a mapping such as ``dict`` to represent that a key has many values:: - - url = URL("http://example.com") - assert url.with_query({"a": [1, 2]}) == URL("http://example.com/?a=1&a=2") - - `#443 `_ - - Support URL.build() with scheme and path (creates a relative URL). - `#464 `_ - - Cache slow IDNA encode/decode calls. - `#476 `_ - - Add ``@final`` / ``Final`` type hints - `#477 `_ - - Support URL authority/raw_authority properties and authority argument of ``URL.build()`` method. - `#478 `_ - - Hide the library implementation details, make the exposed public list very clean. - `#483 `_ - - - Bugfixes - -------- - - - Fix tests with newer Python (3.7.6, 3.8.1 and 3.9.0+). - `#409 `_ - - Fix a bug where query component, passed in a form of mapping or sequence, is unquoted in unexpected way. - `#426 `_ - - Hide `Query` and `QueryVariable` type aliases in `__init__.pyi`, now they are prefixed with underscore. - `#431 `_ - - Keep ipv6 brackets after updating port/user/password. - `#451 `_ - - - ---- - - - 1.4.2 (2019-12-05) - ================== - - Features - -------- - - - Workaround for missing `str.isascii()` in Python 3.6 - `#389 `_ - - - ---- - - - 1.4.1 (2019-11-29) - ================== - - * Fix regression, make the library work on Python 3.5 and 3.6 again. - - 1.4.0 (2019-11-29) - ================== - - * Distinguish an empty password in URL from a password not provided at all (#262) - - * Fixed annotations for optional parameters of ``URL.build`` (#309) - - * Use None as default value of ``user`` parameter of ``URL.build`` (#309) - - * Enforce building C Accelerated modules when installing from source tarball, use - ``YARL_NO_EXTENSIONS`` environment variable for falling back to (slower) Pure Python - implementation (#329) - - * Drop Python 3.5 support - - * Fix quoting of plus in path by pure python version (#339) - - * Don't create a new URL if fragment is unchanged (#292) - - * Included in error msg the path that produces starting slash forbidden error (#376) - - * Skip slow IDNA encoding for ASCII-only strings (#387) - - - 1.3.0 (2018-12-11) - ================== - - * Fix annotations for ``query`` parameter (#207) - - * An incoming query sequence can have int variables (the same as for - Mapping type) (#208) - - * Add ``URL.explicit_port`` property (#218) - - * Give a friendlier error when port cant be converted to int (#168) - - * ``bool(URL())`` now returns ``False`` (#272) - - 1.2.6 (2018-06-14) - ================== - - * Drop Python 3.4 trove classifier (#205) - - 1.2.5 (2018-05-23) - ================== - - * Fix annotations for ``build`` (#199) - - 1.2.4 (2018-05-08) - ================== - - * Fix annotations for ``cached_property`` (#195) - - 1.2.3 (2018-05-03) - ================== - - * Accept ``str`` subclasses in ``URL`` constructor (#190) - - 1.2.2 (2018-05-01) - ================== - - * Fix build - - 1.2.1 (2018-04-30) - ================== - - * Pin minimal required Python to 3.5.3 (#189) - - 1.2.0 (2018-04-30) - ================== - - * Forbid inheritance, replace ``__init__`` with ``__new__`` (#171) - - * Support PEP-561 (provide type hinting marker) (#182) - - 1.1.1 (2018-02-17) - ================== - - * Fix performance regression: don't encode enmpty netloc (#170) - - 1.1.0 (2018-01-21) - ================== - - * Make pure Python quoter consistent with Cython version (#162) - - 1.0.0 (2018-01-15) - ================== - - * Use fast path if quoted string does not need requoting (#154) - - * Speed up quoting/unquoting by ``_Quoter`` and ``_Unquoter`` classes (#155) - - * Drop ``yarl.quote`` and ``yarl.unquote`` public functions (#155) - - * Add custom string writer, reuse static buffer if available (#157) - Code is 50-80 times faster than Pure Python version (was 4-5 times faster) - - * Don't recode IP zone (#144) - - * Support ``encoded=True`` in ``yarl.URL.build()`` (#158) - - * Fix updating query with multiple keys (#160) - - 0.18.0 (2018-01-10) - =================== - - * Fallback to IDNA 2003 if domain name is not IDNA 2008 compatible (#152) - - 0.17.0 (2017-12-30) - =================== - - * Use IDNA 2008 for domain name processing (#149) - - 0.16.0 (2017-12-07) - =================== - - * Fix raising ``TypeError`` by ``url.query_string()`` after - ``url.with_query({})`` (empty mapping) (#141) - - 0.15.0 (2017-11-23) - =================== - - * Add ``raw_path_qs`` attribute (#137) - - 0.14.2 (2017-11-14) - =================== - - * Restore ``strict`` parameter as no-op in ``quote`` / ``unquote`` - - 0.14.1 (2017-11-13) - =================== - - * Restore ``strict`` parameter as no-op for sake of compatibility with - aiohttp 2.2 - - 0.14.0 (2017-11-11) - =================== - - * Drop strict mode (#123) - - * Fix ``"ValueError: Unallowed PCT %"`` when there's a ``"%"`` in the url (#124) - - 0.13.0 (2017-10-01) - =================== - - * Document ``encoded`` parameter (#102) - - * Support relative urls like ``'?key=value'`` (#100) - - * Unsafe encoding for QS fixed. Encode ``;`` char in value param (#104) - - * Process passwords without user names (#95) - - 0.12.0 (2017-06-26) - =================== - - * Properly support paths without leading slash in ``URL.with_path()`` (#90) - - * Enable type annotation checks - - 0.11.0 (2017-06-26) - =================== - - * Normalize path (#86) - - * Clear query and fragment parts in ``.with_path()`` (#85) - - 0.10.3 (2017-06-13) - =================== - - * Prevent double URL args unquoting (#83) - - 0.10.2 (2017-05-05) - =================== - - * Unexpected hash behaviour (#75) - - - 0.10.1 (2017-05-03) - =================== - - * Unexpected compare behaviour (#73) - - * Do not quote or unquote + if not a query string. (#74) - - - 0.10.0 (2017-03-14) - =================== - - * Added ``URL.build`` class method (#58) - - * Added ``path_qs`` attribute (#42) - - - 0.9.8 (2017-02-16) - ================== - - * Do not quote ``:`` in path - - - 0.9.7 (2017-02-16) - ================== - - * Load from pickle without _cache (#56) - - * Percent-encoded pluses in path variables become spaces (#59) - - - 0.9.6 (2017-02-15) - ================== - - * Revert backward incompatible change (BaseURL) - - - 0.9.5 (2017-02-14) - ================== - - * Fix BaseURL rich comparison support - - - 0.9.4 (2017-02-14) - ================== - - * Use BaseURL - - - 0.9.3 (2017-02-14) - ================== - - * Added BaseURL - - - 0.9.2 (2017-02-08) - ================== - - * Remove debug print - - - 0.9.1 (2017-02-07) - ================== - - * Do not lose tail chars (#45) - - - 0.9.0 (2017-02-07) - ================== - - * Allow to quote ``%`` in non strict mode (#21) - - * Incorrect parsing of query parameters with %3B (;) inside (#34) - - * Fix core dumps (#41) - - * tmpbuf - compiling error (#43) - - * Added ``URL.update_path()`` method - - * Added ``URL.update_query()`` method (#47) - - - 0.8.1 (2016-12-03) - ================== - - * Fix broken aiohttp: revert back ``quote`` / ``unquote``. - - - 0.8.0 (2016-12-03) - ================== - - * Support more verbose error messages in ``.with_query()`` (#24) - - * Don't percent-encode ``@`` and ``:`` in path (#32) - - * Don't expose ``yarl.quote`` and ``yarl.unquote``, these functions are - part of private API - - 0.7.1 (2016-11-18) - ================== - - * Accept not only ``str`` but all classes inherited from ``str`` also (#25) - - 0.7.0 (2016-11-07) - ================== - - * Accept ``int`` as value for ``.with_query()`` - - 0.6.0 (2016-11-07) - ================== - - * Explicitly use UTF8 encoding in setup.py (#20) - * Properly unquote non-UTF8 strings (#19) - - 0.5.3 (2016-11-02) - ================== - - * Don't use namedtuple fields but indexes on URL construction - - 0.5.2 (2016-11-02) - ================== - - * Inline ``_encode`` class method - - 0.5.1 (2016-11-02) - ================== - - * Make URL construction faster by removing extra classmethod calls - - 0.5.0 (2016-11-02) - ================== - - * Add cython optimization for quoting/unquoting - * Provide binary wheels - - 0.4.3 (2016-09-29) - ================== - - * Fix typing stubs - - 0.4.2 (2016-09-29) - ================== - - * Expose ``quote()`` and ``unquote()`` as public API - - 0.4.1 (2016-09-28) - ================== - - * Support empty values in query (``'/path?arg'``) - - 0.4.0 (2016-09-27) - ================== - - * Introduce ``relative()`` (#16) - - 0.3.2 (2016-09-27) - ================== - - * Typo fixes #15 - - 0.3.1 (2016-09-26) - ================== - - * Support sequence of pairs as ``with_query()`` parameter - - 0.3.0 (2016-09-26) - ================== - - * Introduce ``is_default_port()`` - - 0.2.1 (2016-09-26) - ================== - - * Raise ValueError for URLs like 'http://:8080/' - - 0.2.0 (2016-09-18) - ================== - - * Avoid doubling slashes when joining paths (#13) - - * Appending path starting from slash is forbidden (#12) - - 0.1.4 (2016-09-09) - ================== - - * Add kwargs support for ``with_query()`` (#10) - - 0.1.3 (2016-09-07) - ================== - - * Document ``with_query()``, ``with_fragment()`` and ``origin()`` - - * Allow ``None`` for ``with_query()`` and ``with_fragment()`` - - 0.1.2 (2016-09-07) - ================== - - * Fix links, tune docs theme. - - 0.1.1 (2016-09-06) - ================== - - * Update README, old version used obsolete API - - 0.1.0 (2016-09-06) - ================== - - * The library was deeply refactored, bytes are gone away but all - accepted strings are encoded if needed. - - 0.0.1 (2016-08-30) - ================== - - * The first release. - -Platform: UNKNOWN -Classifier: License :: OSI Approved :: Apache Software License -Classifier: Intended Audience :: Developers -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Topic :: Internet :: WWW/HTTP -Requires-Python: >=3.6 -Description-Content-Type: text/x-rst diff --git a/third_party/python/yarl/README.rst b/third_party/python/yarl/README.rst deleted file mode 100644 index 6347ece2b975..000000000000 --- a/third_party/python/yarl/README.rst +++ /dev/null @@ -1,202 +0,0 @@ -yarl -==== - -.. image:: https://github.com/aio-libs/yarl/workflows/CI/badge.svg - :target: https://github.com/aio-libs/yarl/actions?query=workflow%3ACI - :align: right - -.. image:: https://codecov.io/gh/aio-libs/yarl/branch/master/graph/badge.svg - :target: https://codecov.io/gh/aio-libs/yarl - -.. image:: https://badge.fury.io/py/yarl.svg - :target: https://badge.fury.io/py/yarl - - -.. image:: https://readthedocs.org/projects/yarl/badge/?version=latest - :target: https://yarl.readthedocs.io - - -.. image:: https://img.shields.io/pypi/pyversions/yarl.svg - :target: https://pypi.python.org/pypi/yarl - -.. image:: https://badges.gitter.im/Join%20Chat.svg - :target: https://gitter.im/aio-libs/Lobby - :alt: Chat on Gitter - -Introduction ------------- - -Url is constructed from ``str``: - -.. code-block:: pycon - - >>> from yarl import URL - >>> url = URL('https://www.python.org/~guido?arg=1#frag') - >>> url - URL('https://www.python.org/~guido?arg=1#frag') - -All url parts: *scheme*, *user*, *password*, *host*, *port*, *path*, -*query* and *fragment* are accessible by properties: - -.. code-block:: pycon - - >>> url.scheme - 'https' - >>> url.host - 'www.python.org' - >>> url.path - '/~guido' - >>> url.query_string - 'arg=1' - >>> url.query - - >>> url.fragment - 'frag' - -All url manipulations produce a new url object: - -.. code-block:: pycon - - >>> url = URL('https://www.python.org') - >>> url / 'foo' / 'bar' - URL('https://www.python.org/foo/bar') - >>> url / 'foo' % {'bar': 'baz'} - URL('https://www.python.org/foo?bar=baz') - -Strings passed to constructor and modification methods are -automatically encoded giving canonical representation as result: - -.. code-block:: pycon - - >>> url = URL('https://www.python.org/путь') - >>> url - URL('https://www.python.org/%D0%BF%D1%83%D1%82%D1%8C') - -Regular properties are *percent-decoded*, use ``raw_`` versions for -getting *encoded* strings: - -.. code-block:: pycon - - >>> url.path - '/путь' - - >>> url.raw_path - '/%D0%BF%D1%83%D1%82%D1%8C' - -Human readable representation of URL is available as ``.human_repr()``: - -.. code-block:: pycon - - >>> url.human_repr() - 'https://www.python.org/путь' - -For full documentation please read https://yarl.readthedocs.org. - - -Installation ------------- - -:: - - $ pip install yarl - -The library is Python 3 only! - -PyPI contains binary wheels for Linux, Windows and MacOS. If you want to install -``yarl`` on another operating system (like *Alpine Linux*, which is not -manylinux-compliant because of the missing glibc and therefore, cannot be -used with our wheels) the the tarball will be used to compile the library from -the source code. It requires a C compiler and and Python headers installed. - -To skip the compilation you must explicitly opt-in by setting the `YARL_NO_EXTENSIONS` -environment variable to a non-empty value, e.g.: - -.. code-block:: bash - - $ YARL_NO_EXTENSIONS=1 pip install yarl - -Please note that the pure-Python (uncompiled) version is much slower. However, -PyPy always uses a pure-Python implementation, and, as such, it is unaffected -by this variable. - -Dependencies ------------- - -YARL requires multidict_ library. - - -API documentation ------------------- - -The documentation is located at https://yarl.readthedocs.org - - -Why isn't boolean supported by the URL query API? -------------------------------------------------- - -There is no standard for boolean representation of boolean values. - -Some systems prefer ``true``/``false``, others like ``yes``/``no``, ``on``/``off``, -``Y``/``N``, ``1``/``0``, etc. - -``yarl`` cannot make an unambiguous decision on how to serialize ``bool`` values because -it is specific to how the end-user's application is built and would be different for -different apps. The library doesn't accept booleans in the API; a user should convert -bools into strings using own preferred translation protocol. - - -Comparison with other URL libraries ------------------------------------- - -* furl (https://pypi.python.org/pypi/furl) - - The library has rich functionality but the ``furl`` object is mutable. - - I'm afraid to pass this object into foreign code: who knows if the - code will modify my url in a terrible way while I just want to send URL - with handy helpers for accessing URL properties. - - ``furl`` has other non-obvious tricky things but the main objection - is mutability. - -* URLObject (https://pypi.python.org/pypi/URLObject) - - URLObject is immutable, that's pretty good. - - Every URL change generates a new URL object. - - But the library doesn't do any decode/encode transformations leaving the - end user to cope with these gory details. - - -Source code ------------ - -The project is hosted on GitHub_ - -Please file an issue on the `bug tracker -`_ if you have found a bug -or have some suggestion in order to improve the library. - -The library uses `Azure Pipelines `_ for -Continuous Integration. - -Discussion list ---------------- - -*aio-libs* google group: https://groups.google.com/forum/#!forum/aio-libs - -Feel free to post your questions and ideas here. - - -Authors and License -------------------- - -The ``yarl`` package is written by Andrew Svetlov. - -It's *Apache 2* licensed and freely available. - - -.. _GitHub: https://github.com/aio-libs/yarl - -.. _multidict: https://github.com/aio-libs/multidict diff --git a/third_party/python/yarl/pyproject.toml b/third_party/python/yarl/pyproject.toml deleted file mode 100644 index 3cd69a29d51c..000000000000 --- a/third_party/python/yarl/pyproject.toml +++ /dev/null @@ -1,7 +0,0 @@ -[tool.towncrier] -package = "yarl" -filename = "CHANGES.rst" -directory = "CHANGES/" -title_format = "{version} ({project_date})" -template = "CHANGES/.TEMPLATE.rst" -issue_format = "`#{issue} `_" diff --git a/third_party/python/yarl/setup.cfg b/third_party/python/yarl/setup.cfg deleted file mode 100644 index 7515097649e0..000000000000 --- a/third_party/python/yarl/setup.cfg +++ /dev/null @@ -1,27 +0,0 @@ -[metadata] -license_file = LICENSE - -[tool:pytest] -addopts = --cov=yarl -v -filterwarnings = error -norecursedirs = dist docs build .tox .eggs venv virtualenv .git -minversion = 3.8.2 -testpaths = tests/ -junit_suite_name = yarl_test_suite - -[flake8] -ignore = E203,E301,E302,E704,W503,W504,F811 -max-line-length = 88 - -[mypy] - -[mypy-idna] -ignore_missing_imports = true - -[mypy-pytest] -ignore_missing_imports = true - -[egg_info] -tag_build = -tag_date = 0 - diff --git a/third_party/python/yarl/setup.py b/third_party/python/yarl/setup.py deleted file mode 100644 index d47cabcb9dc2..000000000000 --- a/third_party/python/yarl/setup.py +++ /dev/null @@ -1,83 +0,0 @@ -import pathlib -import os -import sys -import re - -from setuptools import setup, Extension - - -if sys.version_info < (3, 5): - raise RuntimeError("yarl 1.4+ requires Python 3.5+") - - -NO_EXTENSIONS = bool(os.environ.get("YARL_NO_EXTENSIONS")) # type: bool - -if sys.implementation.name != "cpython": - NO_EXTENSIONS = True - - -extensions = [Extension("yarl._quoting_c", ["yarl/_quoting_c.c"])] -# extra_compile_args=["-g"], -# extra_link_args=["-g"], - - -here = pathlib.Path(__file__).parent -fname = here / "yarl" / "__init__.py" - -with fname.open(encoding="utf8") as fp: - try: - version = re.findall(r'^__version__ = "([^"]+)"$', fp.read(), re.M)[0] - except IndexError: - raise RuntimeError("Unable to determine version.") - -install_requires = [ - "multidict>=4.0", - "idna>=2.0", - 'typing_extensions>=3.7.4;python_version<"3.8"', -] - - -def read(name): - fname = here / name - with fname.open(encoding="utf8") as f: - return f.read() - - -args = dict( - name="yarl", - version=version, - description=("Yet another URL library"), - long_description="\n\n".join([read("README.rst"), read("CHANGES.rst")]), - long_description_content_type="text/x-rst", - classifiers=[ - "License :: OSI Approved :: Apache Software License", - "Intended Audience :: Developers", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.6", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Topic :: Internet :: WWW/HTTP", - ], - author="Andrew Svetlov", - author_email="andrew.svetlov@gmail.com", - url="https://github.com/aio-libs/yarl/", - license="Apache 2", - packages=["yarl"], - install_requires=install_requires, - python_requires=">=3.6", - include_package_data=True, -) - - -if not NO_EXTENSIONS: - print("**********************") - print("* Accellerated build *") - print("**********************") - setup(ext_modules=extensions, **args) -else: - print("*********************") - print("* Pure Python build *") - print("*********************") - setup(**args) diff --git a/third_party/python/yarl/yarl/__init__.py b/third_party/python/yarl/yarl/__init__.py deleted file mode 100644 index db4e94817aa2..000000000000 --- a/third_party/python/yarl/yarl/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -from ._url import URL, cache_clear, cache_configure, cache_info - -__version__ = "1.6.3" - -__all__ = ("URL", "cache_clear", "cache_configure", "cache_info") diff --git a/third_party/python/yarl/yarl/__init__.pyi b/third_party/python/yarl/yarl/__init__.pyi deleted file mode 100644 index a8d8fbda486c..000000000000 --- a/third_party/python/yarl/yarl/__init__.pyi +++ /dev/null @@ -1,111 +0,0 @@ -from typing import overload, Any, Tuple, Optional, Mapping, Union, Sequence, Type -import multidict -from functools import _CacheInfo -import sys - -if sys.version_info >= (3, 8): - from typing import TypedDict, Final, final -else: - from typing_extensions import TypedDict, Final, final - -_SimpleQuery = Union[str, int, float] -_QueryVariable = Union[_SimpleQuery, Sequence[_SimpleQuery]] -_Query = Union[ - None, str, Mapping[str, _QueryVariable], Sequence[Tuple[str, _QueryVariable]] -] -@final -class URL: - scheme: Final[str] - raw_user: Final[str] - user: Final[Optional[str]] - raw_password: Final[Optional[str]] - password: Final[Optional[str]] - raw_host: Final[Optional[str]] - host: Final[Optional[str]] - port: Final[Optional[int]] - raw_authority: Final[str] - authority: Final[str] - raw_path: Final[str] - path: Final[str] - raw_query_string: Final[str] - query_string: Final[str] - path_qs: Final[str] - raw_path_qs: Final[str] - raw_fragment: Final[str] - fragment: Final[str] - query: Final[multidict.MultiDict[str]] - raw_name: Final[str] - name: Final[str] - raw_parts: Final[Tuple[str, ...]] - parts: Final[Tuple[str, ...]] - parent: Final[URL] - def __init__( - self, val: Union[str, "URL"] = ..., *, encoded: bool = ... - ) -> None: ... - @classmethod - def build( - cls, - *, - scheme: str = ..., - authority: str = ..., - user: Optional[str] = ..., - password: Optional[str] = ..., - host: str = ..., - port: Optional[int] = ..., - path: str = ..., - query: Optional[_Query] = ..., - query_string: str = ..., - fragment: str = ..., - encoded: bool = ... - ) -> URL: ... - def __str__(self) -> str: ... - def __repr__(self) -> str: ... - def __eq__(self, other: Any) -> bool: ... - def __le__(self, other: Any) -> bool: ... - def __lt__(self, other: Any) -> bool: ... - def __ge__(self, other: Any) -> bool: ... - def __gt__(self, other: Any) -> bool: ... - def __hash__(self) -> int: ... - def __truediv__(self, name: str) -> URL: ... - def __mod__(self, query: _Query) -> URL: ... - def is_absolute(self) -> bool: ... - def is_default_port(self) -> bool: ... - def origin(self) -> URL: ... - def relative(self) -> URL: ... - def with_scheme(self, scheme: str) -> URL: ... - def with_user(self, user: Optional[str]) -> URL: ... - def with_password(self, password: Optional[str]) -> URL: ... - def with_host(self, host: str) -> URL: ... - def with_port(self, port: Optional[int]) -> URL: ... - def with_path(self, path: str, *, encoded: bool = ...) -> URL: ... - @overload - def with_query(self, query: _Query) -> URL: ... - @overload - def with_query(self, **kwargs: _QueryVariable) -> URL: ... - @overload - def update_query(self, query: _Query) -> URL: ... - @overload - def update_query(self, **kwargs: _QueryVariable) -> URL: ... - def with_fragment(self, fragment: Optional[str]) -> URL: ... - def with_name(self, name: str) -> URL: ... - def join(self, url: URL) -> URL: ... - def human_repr(self) -> str: ... - # private API - @classmethod - def _normalize_path(cls, path: str) -> str: ... - -@final -class cached_property: - def __init__(self, wrapped: Any) -> None: ... - def __get__(self, inst: URL, owner: Type[URL]) -> Any: ... - def __set__(self, inst: URL, value: Any) -> None: ... - -class CacheInfo(TypedDict): - idna_encode: _CacheInfo - idna_decode: _CacheInfo - -def cache_clear() -> None: ... -def cache_info() -> CacheInfo: ... -def cache_configure( - *, idna_encode_size: Optional[int] = ..., idna_decode_size: Optional[int] = ... -) -> None: ... diff --git a/third_party/python/yarl/yarl/_quoting.py b/third_party/python/yarl/yarl/_quoting.py deleted file mode 100644 index 46e100a9eeae..000000000000 --- a/third_party/python/yarl/yarl/_quoting.py +++ /dev/null @@ -1,18 +0,0 @@ -import os -import sys - -__all__ = ("_Quoter", "_Unquoter") - - -NO_EXTENSIONS = bool(os.environ.get("YARL_NO_EXTENSIONS")) # type: bool -if sys.implementation.name != "cpython": - NO_EXTENSIONS = True - - -if not NO_EXTENSIONS: # pragma: no branch - try: - from ._quoting_c import _Quoter, _Unquoter # type: ignore[misc] - except ImportError: # pragma: no cover - from ._quoting_py import _Quoter, _Unquoter # type: ignore[misc] -else: - from ._quoting_py import _Quoter, _Unquoter # type: ignore[misc] diff --git a/third_party/python/yarl/yarl/_quoting_c.c b/third_party/python/yarl/yarl/_quoting_c.c deleted file mode 100644 index cdb46f71de8f..000000000000 --- a/third_party/python/yarl/yarl/_quoting_c.c +++ /dev/null @@ -1,11612 +0,0 @@ -/* Generated by Cython 0.29.21 */ - -#define PY_SSIZE_T_CLEAN -#include "Python.h" -#ifndef Py_PYTHON_H - #error Python headers needed to compile C extensions, please install development version of Python. -#elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000) - #error Cython requires Python 2.6+ or Python 3.3+. -#else -#define CYTHON_ABI "0_29_21" -#define CYTHON_HEX_VERSION 0x001D15F0 -#define CYTHON_FUTURE_DIVISION 1 -#include -#ifndef offsetof - #define offsetof(type, member) ( (size_t) & ((type*)0) -> member ) -#endif -#if !defined(WIN32) && !defined(MS_WINDOWS) - #ifndef __stdcall - #define __stdcall - #endif - #ifndef __cdecl - #define __cdecl - #endif - #ifndef __fastcall - #define __fastcall - #endif -#endif -#ifndef DL_IMPORT - #define DL_IMPORT(t) t -#endif -#ifndef DL_EXPORT - #define DL_EXPORT(t) t -#endif -#define __PYX_COMMA , -#ifndef HAVE_LONG_LONG - #if PY_VERSION_HEX >= 0x02070000 - #define HAVE_LONG_LONG - #endif -#endif -#ifndef PY_LONG_LONG - #define PY_LONG_LONG LONG_LONG -#endif -#ifndef Py_HUGE_VAL - #define Py_HUGE_VAL HUGE_VAL -#endif -#ifdef PYPY_VERSION - #define CYTHON_COMPILING_IN_PYPY 1 - #define CYTHON_COMPILING_IN_PYSTON 0 - #define CYTHON_COMPILING_IN_CPYTHON 0 - #undef CYTHON_USE_TYPE_SLOTS - #define CYTHON_USE_TYPE_SLOTS 0 - #undef CYTHON_USE_PYTYPE_LOOKUP - #define CYTHON_USE_PYTYPE_LOOKUP 0 - #if PY_VERSION_HEX < 0x03050000 - #undef CYTHON_USE_ASYNC_SLOTS - #define CYTHON_USE_ASYNC_SLOTS 0 - #elif !defined(CYTHON_USE_ASYNC_SLOTS) - #define CYTHON_USE_ASYNC_SLOTS 1 - #endif - #undef CYTHON_USE_PYLIST_INTERNALS - #define CYTHON_USE_PYLIST_INTERNALS 0 - #undef CYTHON_USE_UNICODE_INTERNALS - #define CYTHON_USE_UNICODE_INTERNALS 0 - #undef CYTHON_USE_UNICODE_WRITER - #define CYTHON_USE_UNICODE_WRITER 0 - #undef CYTHON_USE_PYLONG_INTERNALS - #define CYTHON_USE_PYLONG_INTERNALS 0 - #undef CYTHON_AVOID_BORROWED_REFS - #define CYTHON_AVOID_BORROWED_REFS 1 - #undef CYTHON_ASSUME_SAFE_MACROS - #define CYTHON_ASSUME_SAFE_MACROS 0 - #undef CYTHON_UNPACK_METHODS - #define CYTHON_UNPACK_METHODS 0 - #undef CYTHON_FAST_THREAD_STATE - #define CYTHON_FAST_THREAD_STATE 0 - #undef CYTHON_FAST_PYCALL - #define CYTHON_FAST_PYCALL 0 - #undef CYTHON_PEP489_MULTI_PHASE_INIT - #define CYTHON_PEP489_MULTI_PHASE_INIT 0 - #undef CYTHON_USE_TP_FINALIZE - #define CYTHON_USE_TP_FINALIZE 0 - #undef CYTHON_USE_DICT_VERSIONS - #define CYTHON_USE_DICT_VERSIONS 0 - #undef CYTHON_USE_EXC_INFO_STACK - #define CYTHON_USE_EXC_INFO_STACK 0 -#elif defined(PYSTON_VERSION) - #define CYTHON_COMPILING_IN_PYPY 0 - #define CYTHON_COMPILING_IN_PYSTON 1 - #define CYTHON_COMPILING_IN_CPYTHON 0 - #ifndef CYTHON_USE_TYPE_SLOTS - #define CYTHON_USE_TYPE_SLOTS 1 - #endif - #undef CYTHON_USE_PYTYPE_LOOKUP - #define CYTHON_USE_PYTYPE_LOOKUP 0 - #undef CYTHON_USE_ASYNC_SLOTS - #define CYTHON_USE_ASYNC_SLOTS 0 - #undef CYTHON_USE_PYLIST_INTERNALS - #define CYTHON_USE_PYLIST_INTERNALS 0 - #ifndef CYTHON_USE_UNICODE_INTERNALS - #define CYTHON_USE_UNICODE_INTERNALS 1 - #endif - #undef CYTHON_USE_UNICODE_WRITER - #define CYTHON_USE_UNICODE_WRITER 0 - #undef CYTHON_USE_PYLONG_INTERNALS - #define CYTHON_USE_PYLONG_INTERNALS 0 - #ifndef CYTHON_AVOID_BORROWED_REFS - #define CYTHON_AVOID_BORROWED_REFS 0 - #endif - #ifndef CYTHON_ASSUME_SAFE_MACROS - #define CYTHON_ASSUME_SAFE_MACROS 1 - #endif - #ifndef CYTHON_UNPACK_METHODS - #define CYTHON_UNPACK_METHODS 1 - #endif - #undef CYTHON_FAST_THREAD_STATE - #define CYTHON_FAST_THREAD_STATE 0 - #undef CYTHON_FAST_PYCALL - #define CYTHON_FAST_PYCALL 0 - #undef CYTHON_PEP489_MULTI_PHASE_INIT - #define CYTHON_PEP489_MULTI_PHASE_INIT 0 - #undef CYTHON_USE_TP_FINALIZE - #define CYTHON_USE_TP_FINALIZE 0 - #undef CYTHON_USE_DICT_VERSIONS - #define CYTHON_USE_DICT_VERSIONS 0 - #undef CYTHON_USE_EXC_INFO_STACK - #define CYTHON_USE_EXC_INFO_STACK 0 -#else - #define CYTHON_COMPILING_IN_PYPY 0 - #define CYTHON_COMPILING_IN_PYSTON 0 - #define CYTHON_COMPILING_IN_CPYTHON 1 - #ifndef CYTHON_USE_TYPE_SLOTS - #define CYTHON_USE_TYPE_SLOTS 1 - #endif - #if PY_VERSION_HEX < 0x02070000 - #undef CYTHON_USE_PYTYPE_LOOKUP - #define CYTHON_USE_PYTYPE_LOOKUP 0 - #elif !defined(CYTHON_USE_PYTYPE_LOOKUP) - #define CYTHON_USE_PYTYPE_LOOKUP 1 - #endif - #if PY_MAJOR_VERSION < 3 - #undef CYTHON_USE_ASYNC_SLOTS - #define CYTHON_USE_ASYNC_SLOTS 0 - #elif !defined(CYTHON_USE_ASYNC_SLOTS) - #define CYTHON_USE_ASYNC_SLOTS 1 - #endif - #if PY_VERSION_HEX < 0x02070000 - #undef CYTHON_USE_PYLONG_INTERNALS - #define CYTHON_USE_PYLONG_INTERNALS 0 - #elif !defined(CYTHON_USE_PYLONG_INTERNALS) - #define CYTHON_USE_PYLONG_INTERNALS 1 - #endif - #ifndef CYTHON_USE_PYLIST_INTERNALS - #define CYTHON_USE_PYLIST_INTERNALS 1 - #endif - #ifndef CYTHON_USE_UNICODE_INTERNALS - #define CYTHON_USE_UNICODE_INTERNALS 1 - #endif - #if PY_VERSION_HEX < 0x030300F0 - #undef CYTHON_USE_UNICODE_WRITER - #define CYTHON_USE_UNICODE_WRITER 0 - #elif !defined(CYTHON_USE_UNICODE_WRITER) - #define CYTHON_USE_UNICODE_WRITER 1 - #endif - #ifndef CYTHON_AVOID_BORROWED_REFS - #define CYTHON_AVOID_BORROWED_REFS 0 - #endif - #ifndef CYTHON_ASSUME_SAFE_MACROS - #define CYTHON_ASSUME_SAFE_MACROS 1 - #endif - #ifndef CYTHON_UNPACK_METHODS - #define CYTHON_UNPACK_METHODS 1 - #endif - #ifndef CYTHON_FAST_THREAD_STATE - #define CYTHON_FAST_THREAD_STATE 1 - #endif - #ifndef CYTHON_FAST_PYCALL - #define CYTHON_FAST_PYCALL 1 - #endif - #ifndef CYTHON_PEP489_MULTI_PHASE_INIT - #define CYTHON_PEP489_MULTI_PHASE_INIT (PY_VERSION_HEX >= 0x03050000) - #endif - #ifndef CYTHON_USE_TP_FINALIZE - #define CYTHON_USE_TP_FINALIZE (PY_VERSION_HEX >= 0x030400a1) - #endif - #ifndef CYTHON_USE_DICT_VERSIONS - #define CYTHON_USE_DICT_VERSIONS (PY_VERSION_HEX >= 0x030600B1) - #endif - #ifndef CYTHON_USE_EXC_INFO_STACK - #define CYTHON_USE_EXC_INFO_STACK (PY_VERSION_HEX >= 0x030700A3) - #endif -#endif -#if !defined(CYTHON_FAST_PYCCALL) -#define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1) -#endif -#if CYTHON_USE_PYLONG_INTERNALS - #include "longintrepr.h" - #undef SHIFT - #undef BASE - #undef MASK - #ifdef SIZEOF_VOID_P - enum { __pyx_check_sizeof_voidp = 1 / (int)(SIZEOF_VOID_P == sizeof(void*)) }; - #endif -#endif -#ifndef __has_attribute - #define __has_attribute(x) 0 -#endif -#ifndef __has_cpp_attribute - #define __has_cpp_attribute(x) 0 -#endif -#ifndef CYTHON_RESTRICT - #if defined(__GNUC__) - #define CYTHON_RESTRICT __restrict__ - #elif defined(_MSC_VER) && _MSC_VER >= 1400 - #define CYTHON_RESTRICT __restrict - #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L - #define CYTHON_RESTRICT restrict - #else - #define CYTHON_RESTRICT - #endif -#endif -#ifndef CYTHON_UNUSED -# if defined(__GNUC__) -# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) -# define CYTHON_UNUSED __attribute__ ((__unused__)) -# else -# define CYTHON_UNUSED -# endif -# elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER)) -# define CYTHON_UNUSED __attribute__ ((__unused__)) -# else -# define CYTHON_UNUSED -# endif -#endif -#ifndef CYTHON_MAYBE_UNUSED_VAR -# if defined(__cplusplus) - template void CYTHON_MAYBE_UNUSED_VAR( const T& ) { } -# else -# define CYTHON_MAYBE_UNUSED_VAR(x) (void)(x) -# endif -#endif -#ifndef CYTHON_NCP_UNUSED -# if CYTHON_COMPILING_IN_CPYTHON -# define CYTHON_NCP_UNUSED -# else -# define CYTHON_NCP_UNUSED CYTHON_UNUSED -# endif -#endif -#define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None) -#ifdef _MSC_VER - #ifndef _MSC_STDINT_H_ - #if _MSC_VER < 1300 - typedef unsigned char uint8_t; - typedef unsigned int uint32_t; - #else - typedef unsigned __int8 uint8_t; - typedef unsigned __int32 uint32_t; - #endif - #endif -#else - #include -#endif -#ifndef CYTHON_FALLTHROUGH - #if defined(__cplusplus) && __cplusplus >= 201103L - #if __has_cpp_attribute(fallthrough) - #define CYTHON_FALLTHROUGH [[fallthrough]] - #elif __has_cpp_attribute(clang::fallthrough) - #define CYTHON_FALLTHROUGH [[clang::fallthrough]] - #elif __has_cpp_attribute(gnu::fallthrough) - #define CYTHON_FALLTHROUGH [[gnu::fallthrough]] - #endif - #endif - #ifndef CYTHON_FALLTHROUGH - #if __has_attribute(fallthrough) - #define CYTHON_FALLTHROUGH __attribute__((fallthrough)) - #else - #define CYTHON_FALLTHROUGH - #endif - #endif - #if defined(__clang__ ) && defined(__apple_build_version__) - #if __apple_build_version__ < 7000000 - #undef CYTHON_FALLTHROUGH - #define CYTHON_FALLTHROUGH - #endif - #endif -#endif - -#ifndef CYTHON_INLINE - #if defined(__clang__) - #define CYTHON_INLINE __inline__ __attribute__ ((__unused__)) - #elif defined(__GNUC__) - #define CYTHON_INLINE __inline__ - #elif defined(_MSC_VER) - #define CYTHON_INLINE __inline - #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L - #define CYTHON_INLINE inline - #else - #define CYTHON_INLINE - #endif -#endif - -#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x02070600 && !defined(Py_OptimizeFlag) - #define Py_OptimizeFlag 0 -#endif -#define __PYX_BUILD_PY_SSIZE_T "n" -#define CYTHON_FORMAT_SSIZE_T "z" -#if PY_MAJOR_VERSION < 3 - #define __Pyx_BUILTIN_MODULE_NAME "__builtin__" - #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ - PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) - #define __Pyx_DefaultClassType PyClass_Type -#else - #define __Pyx_BUILTIN_MODULE_NAME "builtins" -#if PY_VERSION_HEX >= 0x030800A4 && PY_VERSION_HEX < 0x030800B2 - #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ - PyCode_New(a, 0, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) -#else - #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ - PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) -#endif - #define __Pyx_DefaultClassType PyType_Type -#endif -#ifndef Py_TPFLAGS_CHECKTYPES - #define Py_TPFLAGS_CHECKTYPES 0 -#endif -#ifndef Py_TPFLAGS_HAVE_INDEX - #define Py_TPFLAGS_HAVE_INDEX 0 -#endif -#ifndef Py_TPFLAGS_HAVE_NEWBUFFER - #define Py_TPFLAGS_HAVE_NEWBUFFER 0 -#endif -#ifndef Py_TPFLAGS_HAVE_FINALIZE - #define Py_TPFLAGS_HAVE_FINALIZE 0 -#endif -#ifndef METH_STACKLESS - #define METH_STACKLESS 0 -#endif -#if PY_VERSION_HEX <= 0x030700A3 || !defined(METH_FASTCALL) - #ifndef METH_FASTCALL - #define METH_FASTCALL 0x80 - #endif - typedef PyObject *(*__Pyx_PyCFunctionFast) (PyObject *self, PyObject *const *args, Py_ssize_t nargs); - typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject *const *args, - Py_ssize_t nargs, PyObject *kwnames); -#else - #define __Pyx_PyCFunctionFast _PyCFunctionFast - #define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords -#endif -#if CYTHON_FAST_PYCCALL -#define __Pyx_PyFastCFunction_Check(func)\ - ((PyCFunction_Check(func) && (METH_FASTCALL == (PyCFunction_GET_FLAGS(func) & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS | METH_STACKLESS))))) -#else -#define __Pyx_PyFastCFunction_Check(func) 0 -#endif -#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc) - #define PyObject_Malloc(s) PyMem_Malloc(s) - #define PyObject_Free(p) PyMem_Free(p) - #define PyObject_Realloc(p) PyMem_Realloc(p) -#endif -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030400A1 - #define PyMem_RawMalloc(n) PyMem_Malloc(n) - #define PyMem_RawRealloc(p, n) PyMem_Realloc(p, n) - #define PyMem_RawFree(p) PyMem_Free(p) -#endif -#if CYTHON_COMPILING_IN_PYSTON - #define __Pyx_PyCode_HasFreeVars(co) PyCode_HasFreeVars(co) - #define __Pyx_PyFrame_SetLineNumber(frame, lineno) PyFrame_SetLineNumber(frame, lineno) -#else - #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) - #define __Pyx_PyFrame_SetLineNumber(frame, lineno) (frame)->f_lineno = (lineno) -#endif -#if !CYTHON_FAST_THREAD_STATE || PY_VERSION_HEX < 0x02070000 - #define __Pyx_PyThreadState_Current PyThreadState_GET() -#elif PY_VERSION_HEX >= 0x03060000 - #define __Pyx_PyThreadState_Current _PyThreadState_UncheckedGet() -#elif PY_VERSION_HEX >= 0x03000000 - #define __Pyx_PyThreadState_Current PyThreadState_GET() -#else - #define __Pyx_PyThreadState_Current _PyThreadState_Current -#endif -#if PY_VERSION_HEX < 0x030700A2 && !defined(PyThread_tss_create) && !defined(Py_tss_NEEDS_INIT) -#include "pythread.h" -#define Py_tss_NEEDS_INIT 0 -typedef int Py_tss_t; -static CYTHON_INLINE int PyThread_tss_create(Py_tss_t *key) { - *key = PyThread_create_key(); - return 0; -} -static CYTHON_INLINE Py_tss_t * PyThread_tss_alloc(void) { - Py_tss_t *key = (Py_tss_t *)PyObject_Malloc(sizeof(Py_tss_t)); - *key = Py_tss_NEEDS_INIT; - return key; -} -static CYTHON_INLINE void PyThread_tss_free(Py_tss_t *key) { - PyObject_Free(key); -} -static CYTHON_INLINE int PyThread_tss_is_created(Py_tss_t *key) { - return *key != Py_tss_NEEDS_INIT; -} -static CYTHON_INLINE void PyThread_tss_delete(Py_tss_t *key) { - PyThread_delete_key(*key); - *key = Py_tss_NEEDS_INIT; -} -static CYTHON_INLINE int PyThread_tss_set(Py_tss_t *key, void *value) { - return PyThread_set_key_value(*key, value); -} -static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) { - return PyThread_get_key_value(*key); -} -#endif -#if CYTHON_COMPILING_IN_CPYTHON || defined(_PyDict_NewPresized) -#define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n)) -#else -#define __Pyx_PyDict_NewPresized(n) PyDict_New() -#endif -#if PY_MAJOR_VERSION >= 3 || CYTHON_FUTURE_DIVISION - #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y) - #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y) -#else - #define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y) - #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y) -#endif -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 && CYTHON_USE_UNICODE_INTERNALS -#define __Pyx_PyDict_GetItemStr(dict, name) _PyDict_GetItem_KnownHash(dict, name, ((PyASCIIObject *) name)->hash) -#else -#define __Pyx_PyDict_GetItemStr(dict, name) PyDict_GetItem(dict, name) -#endif -#if PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND) - #define CYTHON_PEP393_ENABLED 1 - #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\ - 0 : _PyUnicode_Ready((PyObject *)(op))) - #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u) - #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i) - #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u) - #define __Pyx_PyUnicode_KIND(u) PyUnicode_KIND(u) - #define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u) - #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i) - #define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, ch) - #if defined(PyUnicode_IS_READY) && defined(PyUnicode_GET_SIZE) - #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u))) - #else - #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_LENGTH(u)) - #endif -#else - #define CYTHON_PEP393_ENABLED 0 - #define PyUnicode_1BYTE_KIND 1 - #define PyUnicode_2BYTE_KIND 2 - #define PyUnicode_4BYTE_KIND 4 - #define __Pyx_PyUnicode_READY(op) (0) - #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u) - #define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i])) - #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((sizeof(Py_UNICODE) == 2) ? 65535 : 1114111) - #define __Pyx_PyUnicode_KIND(u) (sizeof(Py_UNICODE)) - #define __Pyx_PyUnicode_DATA(u) ((void*)PyUnicode_AS_UNICODE(u)) - #define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i])) - #define __Pyx_PyUnicode_WRITE(k, d, i, ch) (((void)(k)), ((Py_UNICODE*)d)[i] = ch) - #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_SIZE(u)) -#endif -#if CYTHON_COMPILING_IN_PYPY - #define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b) - #define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b) -#else - #define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b) - #define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ?\ - PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b)) -#endif -#if CYTHON_COMPILING_IN_PYPY && !defined(PyUnicode_Contains) - #define PyUnicode_Contains(u, s) PySequence_Contains(u, s) -#endif -#if CYTHON_COMPILING_IN_PYPY && !defined(PyByteArray_Check) - #define PyByteArray_Check(obj) PyObject_TypeCheck(obj, &PyByteArray_Type) -#endif -#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Format) - #define PyObject_Format(obj, fmt) PyObject_CallMethod(obj, "__format__", "O", fmt) -#endif -#define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyString_Check(b) && !PyString_CheckExact(b)))) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b)) -#define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyUnicode_Check(b) && !PyUnicode_CheckExact(b)))) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b)) -#if PY_MAJOR_VERSION >= 3 - #define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b) -#else - #define __Pyx_PyString_Format(a, b) PyString_Format(a, b) -#endif -#if PY_MAJOR_VERSION < 3 && !defined(PyObject_ASCII) - #define PyObject_ASCII(o) PyObject_Repr(o) -#endif -#if PY_MAJOR_VERSION >= 3 - #define PyBaseString_Type PyUnicode_Type - #define PyStringObject PyUnicodeObject - #define PyString_Type PyUnicode_Type - #define PyString_Check PyUnicode_Check - #define PyString_CheckExact PyUnicode_CheckExact -#ifndef PyObject_Unicode - #define PyObject_Unicode PyObject_Str -#endif -#endif -#if PY_MAJOR_VERSION >= 3 - #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj) - #define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj) -#else - #define __Pyx_PyBaseString_Check(obj) (PyString_Check(obj) || PyUnicode_Check(obj)) - #define __Pyx_PyBaseString_CheckExact(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj)) -#endif -#ifndef PySet_CheckExact - #define PySet_CheckExact(obj) (Py_TYPE(obj) == &PySet_Type) -#endif -#if PY_VERSION_HEX >= 0x030900A4 - #define __Pyx_SET_REFCNT(obj, refcnt) Py_SET_REFCNT(obj, refcnt) - #define __Pyx_SET_SIZE(obj, size) Py_SET_SIZE(obj, size) -#else - #define __Pyx_SET_REFCNT(obj, refcnt) Py_REFCNT(obj) = (refcnt) - #define __Pyx_SET_SIZE(obj, size) Py_SIZE(obj) = (size) -#endif -#if CYTHON_ASSUME_SAFE_MACROS - #define __Pyx_PySequence_SIZE(seq) Py_SIZE(seq) -#else - #define __Pyx_PySequence_SIZE(seq) PySequence_Size(seq) -#endif -#if PY_MAJOR_VERSION >= 3 - #define PyIntObject PyLongObject - #define PyInt_Type PyLong_Type - #define PyInt_Check(op) PyLong_Check(op) - #define PyInt_CheckExact(op) PyLong_CheckExact(op) - #define PyInt_FromString PyLong_FromString - #define PyInt_FromUnicode PyLong_FromUnicode - #define PyInt_FromLong PyLong_FromLong - #define PyInt_FromSize_t PyLong_FromSize_t - #define PyInt_FromSsize_t PyLong_FromSsize_t - #define PyInt_AsLong PyLong_AsLong - #define PyInt_AS_LONG PyLong_AS_LONG - #define PyInt_AsSsize_t PyLong_AsSsize_t - #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask - #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask - #define PyNumber_Int PyNumber_Long -#endif -#if PY_MAJOR_VERSION >= 3 - #define PyBoolObject PyLongObject -#endif -#if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_PYPY - #ifndef PyUnicode_InternFromString - #define PyUnicode_InternFromString(s) PyUnicode_FromString(s) - #endif -#endif -#if PY_VERSION_HEX < 0x030200A4 - typedef long Py_hash_t; - #define __Pyx_PyInt_FromHash_t PyInt_FromLong - #define __Pyx_PyInt_AsHash_t PyInt_AsLong -#else - #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t - #define __Pyx_PyInt_AsHash_t PyInt_AsSsize_t -#endif -#if PY_MAJOR_VERSION >= 3 - #define __Pyx_PyMethod_New(func, self, klass) ((self) ? ((void)(klass), PyMethod_New(func, self)) : __Pyx_NewRef(func)) -#else - #define __Pyx_PyMethod_New(func, self, klass) PyMethod_New(func, self, klass) -#endif -#if CYTHON_USE_ASYNC_SLOTS - #if PY_VERSION_HEX >= 0x030500B1 - #define __Pyx_PyAsyncMethodsStruct PyAsyncMethods - #define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async) - #else - #define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved)) - #endif -#else - #define __Pyx_PyType_AsAsync(obj) NULL -#endif -#ifndef __Pyx_PyAsyncMethodsStruct - typedef struct { - unaryfunc am_await; - unaryfunc am_aiter; - unaryfunc am_anext; - } __Pyx_PyAsyncMethodsStruct; -#endif - -#if defined(WIN32) || defined(MS_WINDOWS) - #define _USE_MATH_DEFINES -#endif -#include -#ifdef NAN -#define __PYX_NAN() ((float) NAN) -#else -static CYTHON_INLINE float __PYX_NAN() { - float value; - memset(&value, 0xFF, sizeof(value)); - return value; -} -#endif -#if defined(__CYGWIN__) && defined(_LDBL_EQ_DBL) -#define __Pyx_truncl trunc -#else -#define __Pyx_truncl truncl -#endif - -#define __PYX_MARK_ERR_POS(f_index, lineno) \ - { __pyx_filename = __pyx_f[f_index]; (void)__pyx_filename; __pyx_lineno = lineno; (void)__pyx_lineno; __pyx_clineno = __LINE__; (void)__pyx_clineno; } -#define __PYX_ERR(f_index, lineno, Ln_error) \ - { __PYX_MARK_ERR_POS(f_index, lineno) goto Ln_error; } - -#ifndef __PYX_EXTERN_C - #ifdef __cplusplus - #define __PYX_EXTERN_C extern "C" - #else - #define __PYX_EXTERN_C extern - #endif -#endif - -#define __PYX_HAVE__yarl___quoting_c -#define __PYX_HAVE_API__yarl___quoting_c -/* Early includes */ -#include -#include -#include -#ifdef _OPENMP -#include -#endif /* _OPENMP */ - -#if defined(PYREX_WITHOUT_ASSERTIONS) && !defined(CYTHON_WITHOUT_ASSERTIONS) -#define CYTHON_WITHOUT_ASSERTIONS -#endif - -typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* encoding; - const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; - -#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0 -#define __PYX_DEFAULT_STRING_ENCODING_IS_UTF8 0 -#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT (PY_MAJOR_VERSION >= 3 && __PYX_DEFAULT_STRING_ENCODING_IS_UTF8) -#define __PYX_DEFAULT_STRING_ENCODING "" -#define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString -#define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize -#define __Pyx_uchar_cast(c) ((unsigned char)c) -#define __Pyx_long_cast(x) ((long)x) -#define __Pyx_fits_Py_ssize_t(v, type, is_signed) (\ - (sizeof(type) < sizeof(Py_ssize_t)) ||\ - (sizeof(type) > sizeof(Py_ssize_t) &&\ - likely(v < (type)PY_SSIZE_T_MAX ||\ - v == (type)PY_SSIZE_T_MAX) &&\ - (!is_signed || likely(v > (type)PY_SSIZE_T_MIN ||\ - v == (type)PY_SSIZE_T_MIN))) ||\ - (sizeof(type) == sizeof(Py_ssize_t) &&\ - (is_signed || likely(v < (type)PY_SSIZE_T_MAX ||\ - v == (type)PY_SSIZE_T_MAX))) ) -static CYTHON_INLINE int __Pyx_is_valid_index(Py_ssize_t i, Py_ssize_t limit) { - return (size_t) i < (size_t) limit; -} -#if defined (__cplusplus) && __cplusplus >= 201103L - #include - #define __Pyx_sst_abs(value) std::abs(value) -#elif SIZEOF_INT >= SIZEOF_SIZE_T - #define __Pyx_sst_abs(value) abs(value) -#elif SIZEOF_LONG >= SIZEOF_SIZE_T - #define __Pyx_sst_abs(value) labs(value) -#elif defined (_MSC_VER) - #define __Pyx_sst_abs(value) ((Py_ssize_t)_abs64(value)) -#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L - #define __Pyx_sst_abs(value) llabs(value) -#elif defined (__GNUC__) - #define __Pyx_sst_abs(value) __builtin_llabs(value) -#else - #define __Pyx_sst_abs(value) ((value<0) ? -value : value) -#endif -static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject*); -static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length); -#define __Pyx_PyByteArray_FromString(s) PyByteArray_FromStringAndSize((const char*)s, strlen((const char*)s)) -#define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l) -#define __Pyx_PyBytes_FromString PyBytes_FromString -#define __Pyx_PyBytes_FromStringAndSize PyBytes_FromStringAndSize -static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*); -#if PY_MAJOR_VERSION < 3 - #define __Pyx_PyStr_FromString __Pyx_PyBytes_FromString - #define __Pyx_PyStr_FromStringAndSize __Pyx_PyBytes_FromStringAndSize -#else - #define __Pyx_PyStr_FromString __Pyx_PyUnicode_FromString - #define __Pyx_PyStr_FromStringAndSize __Pyx_PyUnicode_FromStringAndSize -#endif -#define __Pyx_PyBytes_AsWritableString(s) ((char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsWritableSString(s) ((signed char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsWritableUString(s) ((unsigned char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsString(s) ((const char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsSString(s) ((const signed char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsUString(s) ((const unsigned char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyObject_AsWritableString(s) ((char*) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_AsWritableSString(s) ((signed char*) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_AsWritableUString(s) ((unsigned char*) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_AsSString(s) ((const signed char*) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_AsUString(s) ((const unsigned char*) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_FromCString(s) __Pyx_PyObject_FromString((const char*)s) -#define __Pyx_PyBytes_FromCString(s) __Pyx_PyBytes_FromString((const char*)s) -#define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s) -#define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s) -#define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s) -static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u) { - const Py_UNICODE *u_end = u; - while (*u_end++) ; - return (size_t)(u_end - u - 1); -} -#define __Pyx_PyUnicode_FromUnicode(u) PyUnicode_FromUnicode(u, __Pyx_Py_UNICODE_strlen(u)) -#define __Pyx_PyUnicode_FromUnicodeAndLength PyUnicode_FromUnicode -#define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode -#define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) -#define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) -static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b); -static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); -static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject*); -static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x); -#define __Pyx_PySequence_Tuple(obj)\ - (likely(PyTuple_CheckExact(obj)) ? __Pyx_NewRef(obj) : PySequence_Tuple(obj)) -static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*); -static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t); -#if CYTHON_ASSUME_SAFE_MACROS -#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x)) -#else -#define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x) -#endif -#define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x)) -#if PY_MAJOR_VERSION >= 3 -#define __Pyx_PyNumber_Int(x) (PyLong_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Long(x)) -#else -#define __Pyx_PyNumber_Int(x) (PyInt_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Int(x)) -#endif -#define __Pyx_PyNumber_Float(x) (PyFloat_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Float(x)) -#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII -static int __Pyx_sys_getdefaultencoding_not_ascii; -static int __Pyx_init_sys_getdefaultencoding_params(void) { - PyObject* sys; - PyObject* default_encoding = NULL; - PyObject* ascii_chars_u = NULL; - PyObject* ascii_chars_b = NULL; - const char* default_encoding_c; - sys = PyImport_ImportModule("sys"); - if (!sys) goto bad; - default_encoding = PyObject_CallMethod(sys, (char*) "getdefaultencoding", NULL); - Py_DECREF(sys); - if (!default_encoding) goto bad; - default_encoding_c = PyBytes_AsString(default_encoding); - if (!default_encoding_c) goto bad; - if (strcmp(default_encoding_c, "ascii") == 0) { - __Pyx_sys_getdefaultencoding_not_ascii = 0; - } else { - char ascii_chars[128]; - int c; - for (c = 0; c < 128; c++) { - ascii_chars[c] = c; - } - __Pyx_sys_getdefaultencoding_not_ascii = 1; - ascii_chars_u = PyUnicode_DecodeASCII(ascii_chars, 128, NULL); - if (!ascii_chars_u) goto bad; - ascii_chars_b = PyUnicode_AsEncodedString(ascii_chars_u, default_encoding_c, NULL); - if (!ascii_chars_b || !PyBytes_Check(ascii_chars_b) || memcmp(ascii_chars, PyBytes_AS_STRING(ascii_chars_b), 128) != 0) { - PyErr_Format( - PyExc_ValueError, - "This module compiled with c_string_encoding=ascii, but default encoding '%.200s' is not a superset of ascii.", - default_encoding_c); - goto bad; - } - Py_DECREF(ascii_chars_u); - Py_DECREF(ascii_chars_b); - } - Py_DECREF(default_encoding); - return 0; -bad: - Py_XDECREF(default_encoding); - Py_XDECREF(ascii_chars_u); - Py_XDECREF(ascii_chars_b); - return -1; -} -#endif -#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT && PY_MAJOR_VERSION >= 3 -#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeUTF8(c_str, size, NULL) -#else -#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_Decode(c_str, size, __PYX_DEFAULT_STRING_ENCODING, NULL) -#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT -static char* __PYX_DEFAULT_STRING_ENCODING; -static int __Pyx_init_sys_getdefaultencoding_params(void) { - PyObject* sys; - PyObject* default_encoding = NULL; - char* default_encoding_c; - sys = PyImport_ImportModule("sys"); - if (!sys) goto bad; - default_encoding = PyObject_CallMethod(sys, (char*) (const char*) "getdefaultencoding", NULL); - Py_DECREF(sys); - if (!default_encoding) goto bad; - default_encoding_c = PyBytes_AsString(default_encoding); - if (!default_encoding_c) goto bad; - __PYX_DEFAULT_STRING_ENCODING = (char*) malloc(strlen(default_encoding_c) + 1); - if (!__PYX_DEFAULT_STRING_ENCODING) goto bad; - strcpy(__PYX_DEFAULT_STRING_ENCODING, default_encoding_c); - Py_DECREF(default_encoding); - return 0; -bad: - Py_XDECREF(default_encoding); - return -1; -} -#endif -#endif - - -/* Test for GCC > 2.95 */ -#if defined(__GNUC__) && (__GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95))) - #define likely(x) __builtin_expect(!!(x), 1) - #define unlikely(x) __builtin_expect(!!(x), 0) -#else /* !__GNUC__ or GCC < 2.95 */ - #define likely(x) (x) - #define unlikely(x) (x) -#endif /* __GNUC__ */ -static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; } - -static PyObject *__pyx_m = NULL; -static PyObject *__pyx_d; -static PyObject *__pyx_b; -static PyObject *__pyx_cython_runtime = NULL; -static PyObject *__pyx_empty_tuple; -static PyObject *__pyx_empty_bytes; -static PyObject *__pyx_empty_unicode; -static int __pyx_lineno; -static int __pyx_clineno = 0; -static const char * __pyx_cfilenm= __FILE__; -static const char *__pyx_filename; - - -static const char *__pyx_f[] = { - "yarl/_quoting_c.pyx", - "stringsource", - "type.pxd", -}; - -/*--- Type declarations ---*/ -struct __pyx_obj_4yarl_10_quoting_c__Quoter; -struct __pyx_obj_4yarl_10_quoting_c__Unquoter; -struct __pyx_t_4yarl_10_quoting_c_Writer; - -/* "yarl/_quoting_c.pyx":79 - * # ----------------- writer --------------------------- - * - * cdef struct Writer: # <<<<<<<<<<<<<< - * char *buf - * Py_ssize_t size - */ -struct __pyx_t_4yarl_10_quoting_c_Writer { - char *buf; - Py_ssize_t size; - Py_ssize_t pos; - int changed; -}; - -/* "yarl/_quoting_c.pyx":169 - * - * - * cdef class _Quoter: # <<<<<<<<<<<<<< - * cdef bint _qs - * cdef bint _requote - */ -struct __pyx_obj_4yarl_10_quoting_c__Quoter { - PyObject_HEAD - struct __pyx_vtabstruct_4yarl_10_quoting_c__Quoter *__pyx_vtab; - int _qs; - int _requote; - uint8_t _safe_table[16]; - uint8_t _protected_table[16]; -}; - - -/* "yarl/_quoting_c.pyx":271 - * - * - * cdef class _Unquoter: # <<<<<<<<<<<<<< - * cdef str _unsafe - * cdef bint _qs - */ -struct __pyx_obj_4yarl_10_quoting_c__Unquoter { - PyObject_HEAD - struct __pyx_vtabstruct_4yarl_10_quoting_c__Unquoter *__pyx_vtab; - PyObject *_unsafe; - int _qs; - struct __pyx_obj_4yarl_10_quoting_c__Quoter *_quoter; - struct __pyx_obj_4yarl_10_quoting_c__Quoter *_qs_quoter; -}; - - - -/* "yarl/_quoting_c.pyx":169 - * - * - * cdef class _Quoter: # <<<<<<<<<<<<<< - * cdef bint _qs - * cdef bint _requote - */ - -struct __pyx_vtabstruct_4yarl_10_quoting_c__Quoter { - PyObject *(*_do_quote)(struct __pyx_obj_4yarl_10_quoting_c__Quoter *, PyObject *, struct __pyx_t_4yarl_10_quoting_c_Writer *); - int (*_write)(struct __pyx_obj_4yarl_10_quoting_c__Quoter *, struct __pyx_t_4yarl_10_quoting_c_Writer *, Py_UCS4); -}; -static struct __pyx_vtabstruct_4yarl_10_quoting_c__Quoter *__pyx_vtabptr_4yarl_10_quoting_c__Quoter; -static CYTHON_INLINE int __pyx_f_4yarl_10_quoting_c_7_Quoter__write(struct __pyx_obj_4yarl_10_quoting_c__Quoter *, struct __pyx_t_4yarl_10_quoting_c_Writer *, Py_UCS4); - - -/* "yarl/_quoting_c.pyx":271 - * - * - * cdef class _Unquoter: # <<<<<<<<<<<<<< - * cdef str _unsafe - * cdef bint _qs - */ - -struct __pyx_vtabstruct_4yarl_10_quoting_c__Unquoter { - PyObject *(*_do_unquote)(struct __pyx_obj_4yarl_10_quoting_c__Unquoter *, PyObject *); -}; -static struct __pyx_vtabstruct_4yarl_10_quoting_c__Unquoter *__pyx_vtabptr_4yarl_10_quoting_c__Unquoter; - -/* --- Runtime support code (head) --- */ -/* Refnanny.proto */ -#ifndef CYTHON_REFNANNY - #define CYTHON_REFNANNY 0 -#endif -#if CYTHON_REFNANNY - typedef struct { - void (*INCREF)(void*, PyObject*, int); - void (*DECREF)(void*, PyObject*, int); - void (*GOTREF)(void*, PyObject*, int); - void (*GIVEREF)(void*, PyObject*, int); - void* (*SetupContext)(const char*, int, const char*); - void (*FinishContext)(void**); - } __Pyx_RefNannyAPIStruct; - static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL; - static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname); - #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL; -#ifdef WITH_THREAD - #define __Pyx_RefNannySetupContext(name, acquire_gil)\ - if (acquire_gil) {\ - PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ - __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ - PyGILState_Release(__pyx_gilstate_save);\ - } else {\ - __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ - } -#else - #define __Pyx_RefNannySetupContext(name, acquire_gil)\ - __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__) -#endif - #define __Pyx_RefNannyFinishContext()\ - __Pyx_RefNanny->FinishContext(&__pyx_refnanny) - #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), __LINE__) - #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), __LINE__) - #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), __LINE__) - #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), __LINE__) - #define __Pyx_XINCREF(r) do { if((r) != NULL) {__Pyx_INCREF(r); }} while(0) - #define __Pyx_XDECREF(r) do { if((r) != NULL) {__Pyx_DECREF(r); }} while(0) - #define __Pyx_XGOTREF(r) do { if((r) != NULL) {__Pyx_GOTREF(r); }} while(0) - #define __Pyx_XGIVEREF(r) do { if((r) != NULL) {__Pyx_GIVEREF(r);}} while(0) -#else - #define __Pyx_RefNannyDeclarations - #define __Pyx_RefNannySetupContext(name, acquire_gil) - #define __Pyx_RefNannyFinishContext() - #define __Pyx_INCREF(r) Py_INCREF(r) - #define __Pyx_DECREF(r) Py_DECREF(r) - #define __Pyx_GOTREF(r) - #define __Pyx_GIVEREF(r) - #define __Pyx_XINCREF(r) Py_XINCREF(r) - #define __Pyx_XDECREF(r) Py_XDECREF(r) - #define __Pyx_XGOTREF(r) - #define __Pyx_XGIVEREF(r) -#endif -#define __Pyx_XDECREF_SET(r, v) do {\ - PyObject *tmp = (PyObject *) r;\ - r = v; __Pyx_XDECREF(tmp);\ - } while (0) -#define __Pyx_DECREF_SET(r, v) do {\ - PyObject *tmp = (PyObject *) r;\ - r = v; __Pyx_DECREF(tmp);\ - } while (0) -#define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0) -#define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0) - -/* PyObjectGetAttrStr.proto */ -#if CYTHON_USE_TYPE_SLOTS -static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name); -#else -#define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n) -#endif - -/* GetBuiltinName.proto */ -static PyObject *__Pyx_GetBuiltinName(PyObject *name); - -/* PyThreadStateGet.proto */ -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_PyThreadState_declare PyThreadState *__pyx_tstate; -#define __Pyx_PyThreadState_assign __pyx_tstate = __Pyx_PyThreadState_Current; -#define __Pyx_PyErr_Occurred() __pyx_tstate->curexc_type -#else -#define __Pyx_PyThreadState_declare -#define __Pyx_PyThreadState_assign -#define __Pyx_PyErr_Occurred() PyErr_Occurred() -#endif - -/* PyErrFetchRestore.proto */ -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_PyErr_Clear() __Pyx_ErrRestore(NULL, NULL, NULL) -#define __Pyx_ErrRestoreWithState(type, value, tb) __Pyx_ErrRestoreInState(PyThreadState_GET(), type, value, tb) -#define __Pyx_ErrFetchWithState(type, value, tb) __Pyx_ErrFetchInState(PyThreadState_GET(), type, value, tb) -#define __Pyx_ErrRestore(type, value, tb) __Pyx_ErrRestoreInState(__pyx_tstate, type, value, tb) -#define __Pyx_ErrFetch(type, value, tb) __Pyx_ErrFetchInState(__pyx_tstate, type, value, tb) -static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); -static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); -#if CYTHON_COMPILING_IN_CPYTHON -#define __Pyx_PyErr_SetNone(exc) (Py_INCREF(exc), __Pyx_ErrRestore((exc), NULL, NULL)) -#else -#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) -#endif -#else -#define __Pyx_PyErr_Clear() PyErr_Clear() -#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) -#define __Pyx_ErrRestoreWithState(type, value, tb) PyErr_Restore(type, value, tb) -#define __Pyx_ErrFetchWithState(type, value, tb) PyErr_Fetch(type, value, tb) -#define __Pyx_ErrRestoreInState(tstate, type, value, tb) PyErr_Restore(type, value, tb) -#define __Pyx_ErrFetchInState(tstate, type, value, tb) PyErr_Fetch(type, value, tb) -#define __Pyx_ErrRestore(type, value, tb) PyErr_Restore(type, value, tb) -#define __Pyx_ErrFetch(type, value, tb) PyErr_Fetch(type, value, tb) -#endif - -/* WriteUnraisableException.proto */ -static void __Pyx_WriteUnraisable(const char *name, int clineno, - int lineno, const char *filename, - int full_traceback, int nogil); - -/* RaiseDoubleKeywords.proto */ -static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name); - -/* ParseKeywords.proto */ -static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject **argnames[],\ - PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args,\ - const char* function_name); - -/* RaiseArgTupleInvalid.proto */ -static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact, - Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found); - -/* ArgTypeTest.proto */ -#define __Pyx_ArgTypeTest(obj, type, none_allowed, name, exact)\ - ((likely((Py_TYPE(obj) == type) | (none_allowed && (obj == Py_None)))) ? 1 :\ - __Pyx__ArgTypeTest(obj, type, name, exact)) -static int __Pyx__ArgTypeTest(PyObject *obj, PyTypeObject *type, const char *name, int exact); - -/* unicode_iter.proto */ -static CYTHON_INLINE int __Pyx_init_unicode_iteration( - PyObject* ustring, Py_ssize_t *length, void** data, int *kind); - -/* PyObjectCall.proto */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw); -#else -#define __Pyx_PyObject_Call(func, arg, kw) PyObject_Call(func, arg, kw) -#endif - -/* RaiseException.proto */ -static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause); - -/* PyCFunctionFastCall.proto */ -#if CYTHON_FAST_PYCCALL -static CYTHON_INLINE PyObject *__Pyx_PyCFunction_FastCall(PyObject *func, PyObject **args, Py_ssize_t nargs); -#else -#define __Pyx_PyCFunction_FastCall(func, args, nargs) (assert(0), NULL) -#endif - -/* PyFunctionFastCall.proto */ -#if CYTHON_FAST_PYCALL -#define __Pyx_PyFunction_FastCall(func, args, nargs)\ - __Pyx_PyFunction_FastCallDict((func), (args), (nargs), NULL) -#if 1 || PY_VERSION_HEX < 0x030600B1 -static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs); -#else -#define __Pyx_PyFunction_FastCallDict(func, args, nargs, kwargs) _PyFunction_FastCallDict(func, args, nargs, kwargs) -#endif -#define __Pyx_BUILD_ASSERT_EXPR(cond)\ - (sizeof(char [1 - 2*!(cond)]) - 1) -#ifndef Py_MEMBER_SIZE -#define Py_MEMBER_SIZE(type, member) sizeof(((type *)0)->member) -#endif - static size_t __pyx_pyframe_localsplus_offset = 0; - #include "frameobject.h" - #define __Pxy_PyFrame_Initialize_Offsets()\ - ((void)__Pyx_BUILD_ASSERT_EXPR(sizeof(PyFrameObject) == offsetof(PyFrameObject, f_localsplus) + Py_MEMBER_SIZE(PyFrameObject, f_localsplus)),\ - (void)(__pyx_pyframe_localsplus_offset = ((size_t)PyFrame_Type.tp_basicsize) - Py_MEMBER_SIZE(PyFrameObject, f_localsplus))) - #define __Pyx_PyFrame_GetLocalsplus(frame)\ - (assert(__pyx_pyframe_localsplus_offset), (PyObject **)(((char *)(frame)) + __pyx_pyframe_localsplus_offset)) -#endif - -/* PyObjectCallMethO.proto */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg); -#endif - -/* PyObjectCallOneArg.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg); - -/* GetException.proto */ -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_GetException(type, value, tb) __Pyx__GetException(__pyx_tstate, type, value, tb) -static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); -#else -static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb); -#endif - -/* SwapException.proto */ -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_ExceptionSwap(type, value, tb) __Pyx__ExceptionSwap(__pyx_tstate, type, value, tb) -static CYTHON_INLINE void __Pyx__ExceptionSwap(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); -#else -static CYTHON_INLINE void __Pyx_ExceptionSwap(PyObject **type, PyObject **value, PyObject **tb); -#endif - -/* GetTopmostException.proto */ -#if CYTHON_USE_EXC_INFO_STACK -static _PyErr_StackItem * __Pyx_PyErr_GetTopmostException(PyThreadState *tstate); -#endif - -/* SaveResetException.proto */ -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_ExceptionSave(type, value, tb) __Pyx__ExceptionSave(__pyx_tstate, type, value, tb) -static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); -#define __Pyx_ExceptionReset(type, value, tb) __Pyx__ExceptionReset(__pyx_tstate, type, value, tb) -static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); -#else -#define __Pyx_ExceptionSave(type, value, tb) PyErr_GetExcInfo(type, value, tb) -#define __Pyx_ExceptionReset(type, value, tb) PyErr_SetExcInfo(type, value, tb) -#endif - -/* GetItemIntUnicode.proto */ -#define __Pyx_GetItemInt_Unicode(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ - (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ - __Pyx_GetItemInt_Unicode_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ - (PyErr_SetString(PyExc_IndexError, "string index out of range"), (Py_UCS4)-1)) -static CYTHON_INLINE Py_UCS4 __Pyx_GetItemInt_Unicode_Fast(PyObject* ustring, Py_ssize_t i, - int wraparound, int boundscheck); - -/* ReRaiseException.proto */ -static CYTHON_INLINE void __Pyx_ReraiseException(void); - -/* PyErrExceptionMatches.proto */ -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_PyErr_ExceptionMatches(err) __Pyx_PyErr_ExceptionMatchesInState(__pyx_tstate, err) -static CYTHON_INLINE int __Pyx_PyErr_ExceptionMatchesInState(PyThreadState* tstate, PyObject* err); -#else -#define __Pyx_PyErr_ExceptionMatches(err) PyErr_ExceptionMatches(err) -#endif - -/* GetAttr.proto */ -static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *, PyObject *); - -/* GetAttr3.proto */ -static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *, PyObject *, PyObject *); - -/* PyDictVersioning.proto */ -#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS -#define __PYX_DICT_VERSION_INIT ((PY_UINT64_T) -1) -#define __PYX_GET_DICT_VERSION(dict) (((PyDictObject*)(dict))->ma_version_tag) -#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)\ - (version_var) = __PYX_GET_DICT_VERSION(dict);\ - (cache_var) = (value); -#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) {\ - static PY_UINT64_T __pyx_dict_version = 0;\ - static PyObject *__pyx_dict_cached_value = NULL;\ - if (likely(__PYX_GET_DICT_VERSION(DICT) == __pyx_dict_version)) {\ - (VAR) = __pyx_dict_cached_value;\ - } else {\ - (VAR) = __pyx_dict_cached_value = (LOOKUP);\ - __pyx_dict_version = __PYX_GET_DICT_VERSION(DICT);\ - }\ -} -static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj); -static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj); -static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version); -#else -#define __PYX_GET_DICT_VERSION(dict) (0) -#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var) -#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) (VAR) = (LOOKUP); -#endif - -/* GetModuleGlobalName.proto */ -#if CYTHON_USE_DICT_VERSIONS -#define __Pyx_GetModuleGlobalName(var, name) {\ - static PY_UINT64_T __pyx_dict_version = 0;\ - static PyObject *__pyx_dict_cached_value = NULL;\ - (var) = (likely(__pyx_dict_version == __PYX_GET_DICT_VERSION(__pyx_d))) ?\ - (likely(__pyx_dict_cached_value) ? __Pyx_NewRef(__pyx_dict_cached_value) : __Pyx_GetBuiltinName(name)) :\ - __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\ -} -#define __Pyx_GetModuleGlobalNameUncached(var, name) {\ - PY_UINT64_T __pyx_dict_version;\ - PyObject *__pyx_dict_cached_value;\ - (var) = __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\ -} -static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value); -#else -#define __Pyx_GetModuleGlobalName(var, name) (var) = __Pyx__GetModuleGlobalName(name) -#define __Pyx_GetModuleGlobalNameUncached(var, name) (var) = __Pyx__GetModuleGlobalName(name) -static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name); -#endif - -/* PyObjectCallNoArg.proto */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func); -#else -#define __Pyx_PyObject_CallNoArg(func) __Pyx_PyObject_Call(func, __pyx_empty_tuple, NULL) -#endif - -/* ListAppend.proto */ -#if CYTHON_USE_PYLIST_INTERNALS && CYTHON_ASSUME_SAFE_MACROS -static CYTHON_INLINE int __Pyx_PyList_Append(PyObject* list, PyObject* x) { - PyListObject* L = (PyListObject*) list; - Py_ssize_t len = Py_SIZE(list); - if (likely(L->allocated > len) & likely(len > (L->allocated >> 1))) { - Py_INCREF(x); - PyList_SET_ITEM(list, len, x); - __Pyx_SET_SIZE(list, len + 1); - return 0; - } - return PyList_Append(list, x); -} -#else -#define __Pyx_PyList_Append(L,x) PyList_Append(L,x) -#endif - -/* PyUnicode_Substring.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyUnicode_Substring( - PyObject* text, Py_ssize_t start, Py_ssize_t stop); - -/* PyUnicodeContains.proto */ -static CYTHON_INLINE int __Pyx_PyUnicode_ContainsTF(PyObject* substring, PyObject* text, int eq) { - int result = PyUnicode_Contains(text, substring); - return unlikely(result < 0) ? result : (result == (eq == Py_EQ)); -} - -/* PyObjectCall2Args.proto */ -static CYTHON_UNUSED PyObject* __Pyx_PyObject_Call2Args(PyObject* function, PyObject* arg1, PyObject* arg2); - -/* SliceObject.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyObject_GetSlice( - PyObject* obj, Py_ssize_t cstart, Py_ssize_t cstop, - PyObject** py_start, PyObject** py_stop, PyObject** py_slice, - int has_cstart, int has_cstop, int wraparound); - -/* Import.proto */ -static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level); - -/* ImportFrom.proto */ -static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name); - -/* GetItemInt.proto */ -#define __Pyx_GetItemInt(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ - (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ - __Pyx_GetItemInt_Fast(o, (Py_ssize_t)i, is_list, wraparound, boundscheck) :\ - (is_list ? (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL) :\ - __Pyx_GetItemInt_Generic(o, to_py_func(i)))) -#define __Pyx_GetItemInt_List(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ - (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ - __Pyx_GetItemInt_List_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ - (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL)) -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i, - int wraparound, int boundscheck); -#define __Pyx_GetItemInt_Tuple(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ - (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ - __Pyx_GetItemInt_Tuple_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ - (PyErr_SetString(PyExc_IndexError, "tuple index out of range"), (PyObject*)NULL)) -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i, - int wraparound, int boundscheck); -static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j); -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, - int is_list, int wraparound, int boundscheck); - -/* IncludeStringH.proto */ -#include - -/* HasAttr.proto */ -static CYTHON_INLINE int __Pyx_HasAttr(PyObject *, PyObject *); - -/* ExtTypeTest.proto */ -static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type); - -/* PyObject_GenericGetAttrNoDict.proto */ -#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 -static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name); -#else -#define __Pyx_PyObject_GenericGetAttrNoDict PyObject_GenericGetAttr -#endif - -/* PyObject_GenericGetAttr.proto */ -#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 -static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name); -#else -#define __Pyx_PyObject_GenericGetAttr PyObject_GenericGetAttr -#endif - -/* SetVTable.proto */ -static int __Pyx_SetVtable(PyObject *dict, void *vtable); - -/* PyObjectGetAttrStrNoError.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name); - -/* SetupReduce.proto */ -static int __Pyx_setup_reduce(PyObject* type_obj); - -/* TypeImport.proto */ -#ifndef __PYX_HAVE_RT_ImportType_proto -#define __PYX_HAVE_RT_ImportType_proto -enum __Pyx_ImportType_CheckSize { - __Pyx_ImportType_CheckSize_Error = 0, - __Pyx_ImportType_CheckSize_Warn = 1, - __Pyx_ImportType_CheckSize_Ignore = 2 -}; -static PyTypeObject *__Pyx_ImportType(PyObject* module, const char *module_name, const char *class_name, size_t size, enum __Pyx_ImportType_CheckSize check_size); -#endif - -/* CLineInTraceback.proto */ -#ifdef CYTHON_CLINE_IN_TRACEBACK -#define __Pyx_CLineForTraceback(tstate, c_line) (((CYTHON_CLINE_IN_TRACEBACK)) ? c_line : 0) -#else -static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line); -#endif - -/* CodeObjectCache.proto */ -typedef struct { - PyCodeObject* code_object; - int code_line; -} __Pyx_CodeObjectCacheEntry; -struct __Pyx_CodeObjectCache { - int count; - int max_count; - __Pyx_CodeObjectCacheEntry* entries; -}; -static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL}; -static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line); -static PyCodeObject *__pyx_find_code_object(int code_line); -static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object); - -/* AddTraceback.proto */ -static void __Pyx_AddTraceback(const char *funcname, int c_line, - int py_line, const char *filename); - -/* CIntToPy.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyInt_From_int(int value); - -/* CIntToPy.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value); - -/* PyUCS4InUnicode.proto */ -static CYTHON_INLINE int __Pyx_UnicodeContainsUCS4(PyObject* unicode, Py_UCS4 character); - -/* UnicodeAsUCS4.proto */ -static CYTHON_INLINE Py_UCS4 __Pyx_PyUnicode_AsPy_UCS4(PyObject*); - -/* CIntFromPy.proto */ -static CYTHON_INLINE uint8_t __Pyx_PyInt_As_uint8_t(PyObject *); - -/* CIntFromPy.proto */ -static CYTHON_INLINE uint64_t __Pyx_PyInt_As_uint64_t(PyObject *); - -/* CIntFromPy.proto */ -static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *); - -/* ObjectAsUCS4.proto */ -#define __Pyx_PyObject_AsPy_UCS4(x)\ - (likely(PyUnicode_Check(x)) ? __Pyx_PyUnicode_AsPy_UCS4(x) : __Pyx__PyObject_AsPy_UCS4(x)) -static Py_UCS4 __Pyx__PyObject_AsPy_UCS4(PyObject*); - -/* CIntFromPy.proto */ -static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *); - -/* FastTypeChecks.proto */ -#if CYTHON_COMPILING_IN_CPYTHON -#define __Pyx_TypeCheck(obj, type) __Pyx_IsSubtype(Py_TYPE(obj), (PyTypeObject *)type) -static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b); -static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject *type); -static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *type1, PyObject *type2); -#else -#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type) -#define __Pyx_PyErr_GivenExceptionMatches(err, type) PyErr_GivenExceptionMatches(err, type) -#define __Pyx_PyErr_GivenExceptionMatches2(err, type1, type2) (PyErr_GivenExceptionMatches(err, type1) || PyErr_GivenExceptionMatches(err, type2)) -#endif -#define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception) - -/* CheckBinaryVersion.proto */ -static int __Pyx_check_binary_version(void); - -/* InitStrings.proto */ -static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); - -static PyObject *__pyx_f_4yarl_10_quoting_c_7_Quoter__do_quote(struct __pyx_obj_4yarl_10_quoting_c__Quoter *__pyx_v_self, PyObject *__pyx_v_val, struct __pyx_t_4yarl_10_quoting_c_Writer *__pyx_v_writer); /* proto*/ -static CYTHON_INLINE int __pyx_f_4yarl_10_quoting_c_7_Quoter__write(struct __pyx_obj_4yarl_10_quoting_c__Quoter *__pyx_v_self, struct __pyx_t_4yarl_10_quoting_c_Writer *__pyx_v_writer, Py_UCS4 __pyx_v_ch); /* proto*/ -static PyObject *__pyx_f_4yarl_10_quoting_c_9_Unquoter__do_unquote(struct __pyx_obj_4yarl_10_quoting_c__Unquoter *__pyx_v_self, PyObject *__pyx_v_val); /* proto*/ - -/* Module declarations from 'libc.stdint' */ - -/* Module declarations from 'libc.string' */ - -/* Module declarations from 'libc.stdio' */ - -/* Module declarations from '__builtin__' */ - -/* Module declarations from 'cpython.type' */ -static PyTypeObject *__pyx_ptype_7cpython_4type_type = 0; - -/* Module declarations from 'cpython' */ - -/* Module declarations from 'cpython.object' */ - -/* Module declarations from 'cpython.exc' */ - -/* Module declarations from 'cpython.mem' */ - -/* Module declarations from 'cpython.unicode' */ - -/* Module declarations from 'yarl._quoting_c' */ -static PyTypeObject *__pyx_ptype_4yarl_10_quoting_c__Quoter = 0; -static PyTypeObject *__pyx_ptype_4yarl_10_quoting_c__Unquoter = 0; -static PyObject *__pyx_v_4yarl_10_quoting_c_GEN_DELIMS = 0; -static PyObject *__pyx_v_4yarl_10_quoting_c_SUB_DELIMS_WITHOUT_QS = 0; -static PyObject *__pyx_v_4yarl_10_quoting_c_SUB_DELIMS = 0; -static PyObject *__pyx_v_4yarl_10_quoting_c_RESERVED = 0; -static PyObject *__pyx_v_4yarl_10_quoting_c_UNRESERVED = 0; -static PyObject *__pyx_v_4yarl_10_quoting_c_ALLOWED = 0; -static PyObject *__pyx_v_4yarl_10_quoting_c_QS = 0; -static char __pyx_v_4yarl_10_quoting_c_BUFFER[0x2000]; -static uint8_t __pyx_v_4yarl_10_quoting_c_ALLOWED_TABLE[16]; -static uint8_t __pyx_v_4yarl_10_quoting_c_ALLOWED_NOTQS_TABLE[16]; -static CYTHON_INLINE Py_UCS4 __pyx_f_4yarl_10_quoting_c__to_hex(uint8_t); /*proto*/ -static CYTHON_INLINE int __pyx_f_4yarl_10_quoting_c__from_hex(Py_UCS4); /*proto*/ -static CYTHON_INLINE int __pyx_f_4yarl_10_quoting_c__is_lower_hex(Py_UCS4); /*proto*/ -static CYTHON_INLINE Py_UCS4 __pyx_f_4yarl_10_quoting_c__restore_ch(Py_UCS4, Py_UCS4); /*proto*/ -static CYTHON_INLINE int __pyx_f_4yarl_10_quoting_c_bit_at(uint8_t *, uint64_t); /*proto*/ -static CYTHON_INLINE void __pyx_f_4yarl_10_quoting_c_set_bit(uint8_t *, uint64_t); /*proto*/ -static CYTHON_INLINE void __pyx_f_4yarl_10_quoting_c__init_writer(struct __pyx_t_4yarl_10_quoting_c_Writer *); /*proto*/ -static CYTHON_INLINE void __pyx_f_4yarl_10_quoting_c__release_writer(struct __pyx_t_4yarl_10_quoting_c_Writer *); /*proto*/ -static CYTHON_INLINE int __pyx_f_4yarl_10_quoting_c__write_char(struct __pyx_t_4yarl_10_quoting_c_Writer *, Py_UCS4, int); /*proto*/ -static CYTHON_INLINE int __pyx_f_4yarl_10_quoting_c__write_pct(struct __pyx_t_4yarl_10_quoting_c_Writer *, uint8_t, int); /*proto*/ -static CYTHON_INLINE int __pyx_f_4yarl_10_quoting_c__write_utf8(struct __pyx_t_4yarl_10_quoting_c_Writer *, Py_UCS4); /*proto*/ -static PyObject *__pyx_f_4yarl_10_quoting_c___pyx_unpickle__Quoter__set_state(struct __pyx_obj_4yarl_10_quoting_c__Quoter *, PyObject *); /*proto*/ -static PyObject *__pyx_f_4yarl_10_quoting_c___pyx_unpickle__Unquoter__set_state(struct __pyx_obj_4yarl_10_quoting_c__Unquoter *, PyObject *); /*proto*/ -static int __Pyx_carray_from_py_uint8_t(PyObject *, uint8_t *, Py_ssize_t); /*proto*/ -#define __Pyx_MODULE_NAME "yarl._quoting_c" -extern int __pyx_module_is_main_yarl___quoting_c; -int __pyx_module_is_main_yarl___quoting_c = 0; - -/* Implementation of 'yarl._quoting_c' */ -static PyObject *__pyx_builtin_range; -static PyObject *__pyx_builtin_chr; -static PyObject *__pyx_builtin_ValueError; -static PyObject *__pyx_builtin_TypeError; -static PyObject *__pyx_builtin_UnicodeDecodeError; -static PyObject *__pyx_builtin_hex; -static PyObject *__pyx_builtin_OverflowError; -static PyObject *__pyx_builtin_enumerate; -static PyObject *__pyx_builtin_IndexError; -static const char __pyx_k_[] = ""; -static const char __pyx_k_i[] = "i"; -static const char __pyx_k__4[] = "+=&;"; -static const char __pyx_k__5[] = "+"; -static const char __pyx_k__6[] = " "; -static const char __pyx_k__7[] = "%"; -static const char __pyx_k__9[] = ":/?#[]@"; -static const char __pyx_k_qs[] = "qs"; -static const char __pyx_k__10[] = "!$'()*,"; -static const char __pyx_k__11[] = "+?=;"; -static const char __pyx_k__12[] = "-._~"; -static const char __pyx_k__13[] = "+&=;"; -static const char __pyx_k_chr[] = "chr"; -static const char __pyx_k_hex[] = "hex"; -static const char __pyx_k_new[] = "__new__"; -static const char __pyx_k_val[] = "val"; -static const char __pyx_k_dict[] = "__dict__"; -static const char __pyx_k_main[] = "__main__"; -static const char __pyx_k_name[] = "__name__"; -static const char __pyx_k_safe[] = "safe"; -static const char __pyx_k_test[] = "__test__"; -static const char __pyx_k_range[] = "range"; -static const char __pyx_k_upper[] = "upper"; -static const char __pyx_k_Quoter[] = "_Quoter"; -static const char __pyx_k_digits[] = "digits"; -static const char __pyx_k_import[] = "__import__"; -static const char __pyx_k_pickle[] = "pickle"; -static const char __pyx_k_reduce[] = "__reduce__"; -static const char __pyx_k_string[] = "string"; -static const char __pyx_k_unsafe[] = "unsafe"; -static const char __pyx_k_update[] = "update"; -static const char __pyx_k_requote[] = "requote"; -static const char __pyx_k_Unquoter[] = "_Unquoter"; -static const char __pyx_k_getstate[] = "__getstate__"; -static const char __pyx_k_pyx_type[] = "__pyx_type"; -static const char __pyx_k_setstate[] = "__setstate__"; -static const char __pyx_k_TypeError[] = "TypeError"; -static const char __pyx_k_enumerate[] = "enumerate"; -static const char __pyx_k_protected[] = "protected"; -static const char __pyx_k_pyx_state[] = "__pyx_state"; -static const char __pyx_k_reduce_ex[] = "__reduce_ex__"; -static const char __pyx_k_IndexError[] = "IndexError"; -static const char __pyx_k_ValueError[] = "ValueError"; -static const char __pyx_k_pyx_result[] = "__pyx_result"; -static const char __pyx_k_pyx_vtable[] = "__pyx_vtable__"; -static const char __pyx_k_PickleError[] = "PickleError"; -static const char __pyx_k_pyx_checksum[] = "__pyx_checksum"; -static const char __pyx_k_stringsource[] = "stringsource"; -static const char __pyx_k_OverflowError[] = "OverflowError"; -static const char __pyx_k_ascii_letters[] = "ascii_letters"; -static const char __pyx_k_reduce_cython[] = "__reduce_cython__"; -static const char __pyx_k_pyx_PickleError[] = "__pyx_PickleError"; -static const char __pyx_k_setstate_cython[] = "__setstate_cython__"; -static const char __pyx_k_yarl__quoting_c[] = "yarl._quoting_c"; -static const char __pyx_k_UnicodeDecodeError[] = "UnicodeDecodeError"; -static const char __pyx_k_cline_in_traceback[] = "cline_in_traceback"; -static const char __pyx_k_pyx_unpickle__Quoter[] = "__pyx_unpickle__Quoter"; -static const char __pyx_k_Argument_should_be_str[] = "Argument should be str"; -static const char __pyx_k_pyx_unpickle__Unquoter[] = "__pyx_unpickle__Unquoter"; -static const char __pyx_k_Incompatible_checksums_s_vs_0x27[] = "Incompatible checksums (%s vs 0x276577d = (_qs, _qs_quoter, _quoter, _unsafe))"; -static const char __pyx_k_Incompatible_checksums_s_vs_0xe9[] = "Incompatible checksums (%s vs 0xe91bd35 = (_protected_table, _qs, _requote, _safe_table))"; -static const char __pyx_k_Only_safe_symbols_with_ORD_128_a[] = "Only safe symbols with ORD < 128 are allowed"; -static PyObject *__pyx_kp_u_; -static PyObject *__pyx_kp_u_Argument_should_be_str; -static PyObject *__pyx_kp_s_Incompatible_checksums_s_vs_0x27; -static PyObject *__pyx_kp_s_Incompatible_checksums_s_vs_0xe9; -static PyObject *__pyx_n_s_IndexError; -static PyObject *__pyx_kp_u_Only_safe_symbols_with_ORD_128_a; -static PyObject *__pyx_n_s_OverflowError; -static PyObject *__pyx_n_s_PickleError; -static PyObject *__pyx_n_s_Quoter; -static PyObject *__pyx_n_s_TypeError; -static PyObject *__pyx_n_s_UnicodeDecodeError; -static PyObject *__pyx_n_s_Unquoter; -static PyObject *__pyx_n_s_ValueError; -static PyObject *__pyx_kp_u__10; -static PyObject *__pyx_kp_u__11; -static PyObject *__pyx_kp_u__12; -static PyObject *__pyx_kp_u__13; -static PyObject *__pyx_kp_u__4; -static PyObject *__pyx_kp_u__5; -static PyObject *__pyx_kp_u__6; -static PyObject *__pyx_kp_u__7; -static PyObject *__pyx_kp_u__9; -static PyObject *__pyx_n_s_ascii_letters; -static PyObject *__pyx_n_s_chr; -static PyObject *__pyx_n_s_cline_in_traceback; -static PyObject *__pyx_n_s_dict; -static PyObject *__pyx_n_s_digits; -static PyObject *__pyx_n_s_enumerate; -static PyObject *__pyx_n_s_getstate; -static PyObject *__pyx_n_s_hex; -static PyObject *__pyx_n_s_i; -static PyObject *__pyx_n_s_import; -static PyObject *__pyx_n_s_main; -static PyObject *__pyx_n_s_name; -static PyObject *__pyx_n_s_new; -static PyObject *__pyx_n_s_pickle; -static PyObject *__pyx_n_s_protected; -static PyObject *__pyx_n_s_pyx_PickleError; -static PyObject *__pyx_n_s_pyx_checksum; -static PyObject *__pyx_n_s_pyx_result; -static PyObject *__pyx_n_s_pyx_state; -static PyObject *__pyx_n_s_pyx_type; -static PyObject *__pyx_n_s_pyx_unpickle__Quoter; -static PyObject *__pyx_n_s_pyx_unpickle__Unquoter; -static PyObject *__pyx_n_s_pyx_vtable; -static PyObject *__pyx_n_s_qs; -static PyObject *__pyx_n_s_range; -static PyObject *__pyx_n_s_reduce; -static PyObject *__pyx_n_s_reduce_cython; -static PyObject *__pyx_n_s_reduce_ex; -static PyObject *__pyx_n_s_requote; -static PyObject *__pyx_n_s_safe; -static PyObject *__pyx_n_s_setstate; -static PyObject *__pyx_n_s_setstate_cython; -static PyObject *__pyx_n_s_string; -static PyObject *__pyx_kp_s_stringsource; -static PyObject *__pyx_n_s_test; -static PyObject *__pyx_n_s_unsafe; -static PyObject *__pyx_n_s_update; -static PyObject *__pyx_n_s_upper; -static PyObject *__pyx_n_s_val; -static PyObject *__pyx_n_s_yarl__quoting_c; -static int __pyx_pf_4yarl_10_quoting_c_7_Quoter___init__(struct __pyx_obj_4yarl_10_quoting_c__Quoter *__pyx_v_self, PyObject *__pyx_v_safe, PyObject *__pyx_v_protected, int __pyx_v_qs, int __pyx_v_requote); /* proto */ -static PyObject *__pyx_pf_4yarl_10_quoting_c_7_Quoter_2__call__(struct __pyx_obj_4yarl_10_quoting_c__Quoter *__pyx_v_self, PyObject *__pyx_v_val); /* proto */ -static PyObject *__pyx_pf_4yarl_10_quoting_c_7_Quoter_4__reduce_cython__(struct __pyx_obj_4yarl_10_quoting_c__Quoter *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_4yarl_10_quoting_c_7_Quoter_6__setstate_cython__(struct __pyx_obj_4yarl_10_quoting_c__Quoter *__pyx_v_self, PyObject *__pyx_v___pyx_state); /* proto */ -static int __pyx_pf_4yarl_10_quoting_c_9_Unquoter___init__(struct __pyx_obj_4yarl_10_quoting_c__Unquoter *__pyx_v_self, PyObject *__pyx_v_unsafe, PyObject *__pyx_v_qs); /* proto */ -static PyObject *__pyx_pf_4yarl_10_quoting_c_9_Unquoter_2__call__(struct __pyx_obj_4yarl_10_quoting_c__Unquoter *__pyx_v_self, PyObject *__pyx_v_val); /* proto */ -static PyObject *__pyx_pf_4yarl_10_quoting_c_9_Unquoter_4__reduce_cython__(struct __pyx_obj_4yarl_10_quoting_c__Unquoter *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_4yarl_10_quoting_c_9_Unquoter_6__setstate_cython__(struct __pyx_obj_4yarl_10_quoting_c__Unquoter *__pyx_v_self, PyObject *__pyx_v___pyx_state); /* proto */ -static PyObject *__pyx_pf_4yarl_10_quoting_c___pyx_unpickle__Quoter(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state); /* proto */ -static PyObject *__pyx_pf_4yarl_10_quoting_c_2__pyx_unpickle__Unquoter(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state); /* proto */ -static PyObject *__pyx_tp_new_4yarl_10_quoting_c__Quoter(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/ -static PyObject *__pyx_tp_new_4yarl_10_quoting_c__Unquoter(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/ -static PyObject *__pyx_int_2; -static PyObject *__pyx_int_41310077; -static PyObject *__pyx_int_244432181; -static PyObject *__pyx_slice__8; -static PyObject *__pyx_tuple__2; -static PyObject *__pyx_tuple__3; -static PyObject *__pyx_tuple__14; -static PyObject *__pyx_tuple__16; -static PyObject *__pyx_codeobj__15; -static PyObject *__pyx_codeobj__17; -/* Late includes */ - -/* "yarl/_quoting_c.pyx":23 - * cdef char BUFFER[BUF_SIZE] - * - * cdef inline Py_UCS4 _to_hex(uint8_t v): # <<<<<<<<<<<<<< - * if v < 10: - * return (v+0x30) # ord('0') == 0x30 - */ - -static CYTHON_INLINE Py_UCS4 __pyx_f_4yarl_10_quoting_c__to_hex(uint8_t __pyx_v_v) { - Py_UCS4 __pyx_r; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - __Pyx_RefNannySetupContext("_to_hex", 0); - - /* "yarl/_quoting_c.pyx":24 - * - * cdef inline Py_UCS4 _to_hex(uint8_t v): - * if v < 10: # <<<<<<<<<<<<<< - * return (v+0x30) # ord('0') == 0x30 - * else: - */ - __pyx_t_1 = ((__pyx_v_v < 10) != 0); - if (__pyx_t_1) { - - /* "yarl/_quoting_c.pyx":25 - * cdef inline Py_UCS4 _to_hex(uint8_t v): - * if v < 10: - * return (v+0x30) # ord('0') == 0x30 # <<<<<<<<<<<<<< - * else: - * return (v+0x41-10) # ord('A') == 0x41 - */ - __pyx_r = ((Py_UCS4)(__pyx_v_v + 0x30)); - goto __pyx_L0; - - /* "yarl/_quoting_c.pyx":24 - * - * cdef inline Py_UCS4 _to_hex(uint8_t v): - * if v < 10: # <<<<<<<<<<<<<< - * return (v+0x30) # ord('0') == 0x30 - * else: - */ - } - - /* "yarl/_quoting_c.pyx":27 - * return (v+0x30) # ord('0') == 0x30 - * else: - * return (v+0x41-10) # ord('A') == 0x41 # <<<<<<<<<<<<<< - * - * - */ - /*else*/ { - __pyx_r = ((Py_UCS4)((__pyx_v_v + 0x41) - 10)); - goto __pyx_L0; - } - - /* "yarl/_quoting_c.pyx":23 - * cdef char BUFFER[BUF_SIZE] - * - * cdef inline Py_UCS4 _to_hex(uint8_t v): # <<<<<<<<<<<<<< - * if v < 10: - * return (v+0x30) # ord('0') == 0x30 - */ - - /* function exit code */ - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "yarl/_quoting_c.pyx":30 - * - * - * cdef inline int _from_hex(Py_UCS4 v): # <<<<<<<<<<<<<< - * if '0' <= v <= '9': - * return (v) - 0x30 # ord('0') == 0x30 - */ - -static CYTHON_INLINE int __pyx_f_4yarl_10_quoting_c__from_hex(Py_UCS4 __pyx_v_v) { - int __pyx_r; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - int __pyx_t_2; - __Pyx_RefNannySetupContext("_from_hex", 0); - - /* "yarl/_quoting_c.pyx":31 - * - * cdef inline int _from_hex(Py_UCS4 v): - * if '0' <= v <= '9': # <<<<<<<<<<<<<< - * return (v) - 0x30 # ord('0') == 0x30 - * elif 'A' <= v <= 'F': - */ - __pyx_t_1 = (48 <= __pyx_v_v); - if (__pyx_t_1) { - __pyx_t_1 = (__pyx_v_v <= 57); - } - __pyx_t_2 = (__pyx_t_1 != 0); - if (__pyx_t_2) { - - /* "yarl/_quoting_c.pyx":32 - * cdef inline int _from_hex(Py_UCS4 v): - * if '0' <= v <= '9': - * return (v) - 0x30 # ord('0') == 0x30 # <<<<<<<<<<<<<< - * elif 'A' <= v <= 'F': - * return (v) - 0x41 + 10 # ord('A') == 0x41 - */ - __pyx_r = (((int)__pyx_v_v) - 0x30); - goto __pyx_L0; - - /* "yarl/_quoting_c.pyx":31 - * - * cdef inline int _from_hex(Py_UCS4 v): - * if '0' <= v <= '9': # <<<<<<<<<<<<<< - * return (v) - 0x30 # ord('0') == 0x30 - * elif 'A' <= v <= 'F': - */ - } - - /* "yarl/_quoting_c.pyx":33 - * if '0' <= v <= '9': - * return (v) - 0x30 # ord('0') == 0x30 - * elif 'A' <= v <= 'F': # <<<<<<<<<<<<<< - * return (v) - 0x41 + 10 # ord('A') == 0x41 - * elif 'a' <= v <= 'f': - */ - __pyx_t_2 = (65 <= __pyx_v_v); - if (__pyx_t_2) { - __pyx_t_2 = (__pyx_v_v <= 70); - } - __pyx_t_1 = (__pyx_t_2 != 0); - if (__pyx_t_1) { - - /* "yarl/_quoting_c.pyx":34 - * return (v) - 0x30 # ord('0') == 0x30 - * elif 'A' <= v <= 'F': - * return (v) - 0x41 + 10 # ord('A') == 0x41 # <<<<<<<<<<<<<< - * elif 'a' <= v <= 'f': - * return (v) - 0x61 + 10 # ord('a') == 0x61 - */ - __pyx_r = ((((int)__pyx_v_v) - 0x41) + 10); - goto __pyx_L0; - - /* "yarl/_quoting_c.pyx":33 - * if '0' <= v <= '9': - * return (v) - 0x30 # ord('0') == 0x30 - * elif 'A' <= v <= 'F': # <<<<<<<<<<<<<< - * return (v) - 0x41 + 10 # ord('A') == 0x41 - * elif 'a' <= v <= 'f': - */ - } - - /* "yarl/_quoting_c.pyx":35 - * elif 'A' <= v <= 'F': - * return (v) - 0x41 + 10 # ord('A') == 0x41 - * elif 'a' <= v <= 'f': # <<<<<<<<<<<<<< - * return (v) - 0x61 + 10 # ord('a') == 0x61 - * else: - */ - __pyx_t_1 = (97 <= __pyx_v_v); - if (__pyx_t_1) { - __pyx_t_1 = (__pyx_v_v <= 0x66); - } - __pyx_t_2 = (__pyx_t_1 != 0); - if (__pyx_t_2) { - - /* "yarl/_quoting_c.pyx":36 - * return (v) - 0x41 + 10 # ord('A') == 0x41 - * elif 'a' <= v <= 'f': - * return (v) - 0x61 + 10 # ord('a') == 0x61 # <<<<<<<<<<<<<< - * else: - * return -1 - */ - __pyx_r = ((((int)__pyx_v_v) - 0x61) + 10); - goto __pyx_L0; - - /* "yarl/_quoting_c.pyx":35 - * elif 'A' <= v <= 'F': - * return (v) - 0x41 + 10 # ord('A') == 0x41 - * elif 'a' <= v <= 'f': # <<<<<<<<<<<<<< - * return (v) - 0x61 + 10 # ord('a') == 0x61 - * else: - */ - } - - /* "yarl/_quoting_c.pyx":38 - * return (v) - 0x61 + 10 # ord('a') == 0x61 - * else: - * return -1 # <<<<<<<<<<<<<< - * - * - */ - /*else*/ { - __pyx_r = -1; - goto __pyx_L0; - } - - /* "yarl/_quoting_c.pyx":30 - * - * - * cdef inline int _from_hex(Py_UCS4 v): # <<<<<<<<<<<<<< - * if '0' <= v <= '9': - * return (v) - 0x30 # ord('0') == 0x30 - */ - - /* function exit code */ - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "yarl/_quoting_c.pyx":41 - * - * - * cdef inline int _is_lower_hex(Py_UCS4 v): # <<<<<<<<<<<<<< - * return 'a' <= v <= 'f' - * - */ - -static CYTHON_INLINE int __pyx_f_4yarl_10_quoting_c__is_lower_hex(Py_UCS4 __pyx_v_v) { - int __pyx_r; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - __Pyx_RefNannySetupContext("_is_lower_hex", 0); - - /* "yarl/_quoting_c.pyx":42 - * - * cdef inline int _is_lower_hex(Py_UCS4 v): - * return 'a' <= v <= 'f' # <<<<<<<<<<<<<< - * - * - */ - __pyx_t_1 = (97 <= __pyx_v_v); - if (__pyx_t_1) { - __pyx_t_1 = (__pyx_v_v <= 0x66); - } - __pyx_r = __pyx_t_1; - goto __pyx_L0; - - /* "yarl/_quoting_c.pyx":41 - * - * - * cdef inline int _is_lower_hex(Py_UCS4 v): # <<<<<<<<<<<<<< - * return 'a' <= v <= 'f' - * - */ - - /* function exit code */ - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "yarl/_quoting_c.pyx":45 - * - * - * cdef inline Py_UCS4 _restore_ch(Py_UCS4 d1, Py_UCS4 d2): # <<<<<<<<<<<<<< - * cdef int digit1 = _from_hex(d1) - * if digit1 < 0: - */ - -static CYTHON_INLINE Py_UCS4 __pyx_f_4yarl_10_quoting_c__restore_ch(Py_UCS4 __pyx_v_d1, Py_UCS4 __pyx_v_d2) { - int __pyx_v_digit1; - int __pyx_v_digit2; - Py_UCS4 __pyx_r; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - __Pyx_RefNannySetupContext("_restore_ch", 0); - - /* "yarl/_quoting_c.pyx":46 - * - * cdef inline Py_UCS4 _restore_ch(Py_UCS4 d1, Py_UCS4 d2): - * cdef int digit1 = _from_hex(d1) # <<<<<<<<<<<<<< - * if digit1 < 0: - * return -1 - */ - __pyx_v_digit1 = __pyx_f_4yarl_10_quoting_c__from_hex(__pyx_v_d1); - - /* "yarl/_quoting_c.pyx":47 - * cdef inline Py_UCS4 _restore_ch(Py_UCS4 d1, Py_UCS4 d2): - * cdef int digit1 = _from_hex(d1) - * if digit1 < 0: # <<<<<<<<<<<<<< - * return -1 - * cdef int digit2 = _from_hex(d2) - */ - __pyx_t_1 = ((__pyx_v_digit1 < 0) != 0); - if (__pyx_t_1) { - - /* "yarl/_quoting_c.pyx":48 - * cdef int digit1 = _from_hex(d1) - * if digit1 < 0: - * return -1 # <<<<<<<<<<<<<< - * cdef int digit2 = _from_hex(d2) - * if digit2 < 0: - */ - __pyx_r = ((Py_UCS4)-1L); - goto __pyx_L0; - - /* "yarl/_quoting_c.pyx":47 - * cdef inline Py_UCS4 _restore_ch(Py_UCS4 d1, Py_UCS4 d2): - * cdef int digit1 = _from_hex(d1) - * if digit1 < 0: # <<<<<<<<<<<<<< - * return -1 - * cdef int digit2 = _from_hex(d2) - */ - } - - /* "yarl/_quoting_c.pyx":49 - * if digit1 < 0: - * return -1 - * cdef int digit2 = _from_hex(d2) # <<<<<<<<<<<<<< - * if digit2 < 0: - * return -1 - */ - __pyx_v_digit2 = __pyx_f_4yarl_10_quoting_c__from_hex(__pyx_v_d2); - - /* "yarl/_quoting_c.pyx":50 - * return -1 - * cdef int digit2 = _from_hex(d2) - * if digit2 < 0: # <<<<<<<<<<<<<< - * return -1 - * return (digit1 << 4 | digit2) - */ - __pyx_t_1 = ((__pyx_v_digit2 < 0) != 0); - if (__pyx_t_1) { - - /* "yarl/_quoting_c.pyx":51 - * cdef int digit2 = _from_hex(d2) - * if digit2 < 0: - * return -1 # <<<<<<<<<<<<<< - * return (digit1 << 4 | digit2) - * - */ - __pyx_r = ((Py_UCS4)-1L); - goto __pyx_L0; - - /* "yarl/_quoting_c.pyx":50 - * return -1 - * cdef int digit2 = _from_hex(d2) - * if digit2 < 0: # <<<<<<<<<<<<<< - * return -1 - * return (digit1 << 4 | digit2) - */ - } - - /* "yarl/_quoting_c.pyx":52 - * if digit2 < 0: - * return -1 - * return (digit1 << 4 | digit2) # <<<<<<<<<<<<<< - * - * - */ - __pyx_r = ((Py_UCS4)((__pyx_v_digit1 << 4) | __pyx_v_digit2)); - goto __pyx_L0; - - /* "yarl/_quoting_c.pyx":45 - * - * - * cdef inline Py_UCS4 _restore_ch(Py_UCS4 d1, Py_UCS4 d2): # <<<<<<<<<<<<<< - * cdef int digit1 = _from_hex(d1) - * if digit1 < 0: - */ - - /* function exit code */ - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "yarl/_quoting_c.pyx":59 - * - * - * cdef inline bint bit_at(uint8_t array[], uint64_t ch): # <<<<<<<<<<<<<< - * return array[ch >> 3] & (1 << (ch & 7)) - * - */ - -static CYTHON_INLINE int __pyx_f_4yarl_10_quoting_c_bit_at(uint8_t *__pyx_v_array, uint64_t __pyx_v_ch) { - int __pyx_r; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("bit_at", 0); - - /* "yarl/_quoting_c.pyx":60 - * - * cdef inline bint bit_at(uint8_t array[], uint64_t ch): - * return array[ch >> 3] & (1 << (ch & 7)) # <<<<<<<<<<<<<< - * - * - */ - __pyx_r = ((__pyx_v_array[(__pyx_v_ch >> 3)]) & (1 << (__pyx_v_ch & 7))); - goto __pyx_L0; - - /* "yarl/_quoting_c.pyx":59 - * - * - * cdef inline bint bit_at(uint8_t array[], uint64_t ch): # <<<<<<<<<<<<<< - * return array[ch >> 3] & (1 << (ch & 7)) - * - */ - - /* function exit code */ - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "yarl/_quoting_c.pyx":63 - * - * - * cdef inline void set_bit(uint8_t array[], uint64_t ch): # <<<<<<<<<<<<<< - * array[ch >> 3] |= (1 << (ch & 7)) - * - */ - -static CYTHON_INLINE void __pyx_f_4yarl_10_quoting_c_set_bit(uint8_t *__pyx_v_array, uint64_t __pyx_v_ch) { - __Pyx_RefNannyDeclarations - uint64_t __pyx_t_1; - __Pyx_RefNannySetupContext("set_bit", 0); - - /* "yarl/_quoting_c.pyx":64 - * - * cdef inline void set_bit(uint8_t array[], uint64_t ch): - * array[ch >> 3] |= (1 << (ch & 7)) # <<<<<<<<<<<<<< - * - * - */ - __pyx_t_1 = (__pyx_v_ch >> 3); - (__pyx_v_array[__pyx_t_1]) = ((__pyx_v_array[__pyx_t_1]) | (1 << (__pyx_v_ch & 7))); - - /* "yarl/_quoting_c.pyx":63 - * - * - * cdef inline void set_bit(uint8_t array[], uint64_t ch): # <<<<<<<<<<<<<< - * array[ch >> 3] |= (1 << (ch & 7)) - * - */ - - /* function exit code */ - __Pyx_RefNannyFinishContext(); -} - -/* "yarl/_quoting_c.pyx":86 - * - * - * cdef inline void _init_writer(Writer* writer): # <<<<<<<<<<<<<< - * writer.buf = &BUFFER[0] - * writer.size = BUF_SIZE - */ - -static CYTHON_INLINE void __pyx_f_4yarl_10_quoting_c__init_writer(struct __pyx_t_4yarl_10_quoting_c_Writer *__pyx_v_writer) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("_init_writer", 0); - - /* "yarl/_quoting_c.pyx":87 - * - * cdef inline void _init_writer(Writer* writer): - * writer.buf = &BUFFER[0] # <<<<<<<<<<<<<< - * writer.size = BUF_SIZE - * writer.pos = 0 - */ - __pyx_v_writer->buf = (&(__pyx_v_4yarl_10_quoting_c_BUFFER[0])); - - /* "yarl/_quoting_c.pyx":88 - * cdef inline void _init_writer(Writer* writer): - * writer.buf = &BUFFER[0] - * writer.size = BUF_SIZE # <<<<<<<<<<<<<< - * writer.pos = 0 - * writer.changed = 0 - */ - __pyx_v_writer->size = 0x2000; - - /* "yarl/_quoting_c.pyx":89 - * writer.buf = &BUFFER[0] - * writer.size = BUF_SIZE - * writer.pos = 0 # <<<<<<<<<<<<<< - * writer.changed = 0 - * - */ - __pyx_v_writer->pos = 0; - - /* "yarl/_quoting_c.pyx":90 - * writer.size = BUF_SIZE - * writer.pos = 0 - * writer.changed = 0 # <<<<<<<<<<<<<< - * - * - */ - __pyx_v_writer->changed = 0; - - /* "yarl/_quoting_c.pyx":86 - * - * - * cdef inline void _init_writer(Writer* writer): # <<<<<<<<<<<<<< - * writer.buf = &BUFFER[0] - * writer.size = BUF_SIZE - */ - - /* function exit code */ - __Pyx_RefNannyFinishContext(); -} - -/* "yarl/_quoting_c.pyx":93 - * - * - * cdef inline void _release_writer(Writer* writer): # <<<<<<<<<<<<<< - * if writer.buf != BUFFER: - * PyMem_Free(writer.buf) - */ - -static CYTHON_INLINE void __pyx_f_4yarl_10_quoting_c__release_writer(struct __pyx_t_4yarl_10_quoting_c_Writer *__pyx_v_writer) { - __Pyx_RefNannyDeclarations - int __pyx_t_1; - __Pyx_RefNannySetupContext("_release_writer", 0); - - /* "yarl/_quoting_c.pyx":94 - * - * cdef inline void _release_writer(Writer* writer): - * if writer.buf != BUFFER: # <<<<<<<<<<<<<< - * PyMem_Free(writer.buf) - * - */ - __pyx_t_1 = ((__pyx_v_writer->buf != __pyx_v_4yarl_10_quoting_c_BUFFER) != 0); - if (__pyx_t_1) { - - /* "yarl/_quoting_c.pyx":95 - * cdef inline void _release_writer(Writer* writer): - * if writer.buf != BUFFER: - * PyMem_Free(writer.buf) # <<<<<<<<<<<<<< - * - * - */ - PyMem_Free(__pyx_v_writer->buf); - - /* "yarl/_quoting_c.pyx":94 - * - * cdef inline void _release_writer(Writer* writer): - * if writer.buf != BUFFER: # <<<<<<<<<<<<<< - * PyMem_Free(writer.buf) - * - */ - } - - /* "yarl/_quoting_c.pyx":93 - * - * - * cdef inline void _release_writer(Writer* writer): # <<<<<<<<<<<<<< - * if writer.buf != BUFFER: - * PyMem_Free(writer.buf) - */ - - /* function exit code */ - __Pyx_RefNannyFinishContext(); -} - -/* "yarl/_quoting_c.pyx":98 - * - * - * cdef inline int _write_char(Writer* writer, Py_UCS4 ch, bint changed): # <<<<<<<<<<<<<< - * cdef char * buf - * cdef Py_ssize_t size - */ - -static CYTHON_INLINE int __pyx_f_4yarl_10_quoting_c__write_char(struct __pyx_t_4yarl_10_quoting_c_Writer *__pyx_v_writer, Py_UCS4 __pyx_v_ch, int __pyx_v_changed) { - char *__pyx_v_buf; - Py_ssize_t __pyx_v_size; - int __pyx_r; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - PyObject *__pyx_t_2; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("_write_char", 0); - - /* "yarl/_quoting_c.pyx":102 - * cdef Py_ssize_t size - * - * if writer.pos == writer.size: # <<<<<<<<<<<<<< - * # reallocate - * size = writer.size + BUF_SIZE - */ - __pyx_t_1 = ((__pyx_v_writer->pos == __pyx_v_writer->size) != 0); - if (__pyx_t_1) { - - /* "yarl/_quoting_c.pyx":104 - * if writer.pos == writer.size: - * # reallocate - * size = writer.size + BUF_SIZE # <<<<<<<<<<<<<< - * if writer.buf == BUFFER: - * buf = PyMem_Malloc(size) - */ - __pyx_v_size = (__pyx_v_writer->size + 0x2000); - - /* "yarl/_quoting_c.pyx":105 - * # reallocate - * size = writer.size + BUF_SIZE - * if writer.buf == BUFFER: # <<<<<<<<<<<<<< - * buf = PyMem_Malloc(size) - * if buf == NULL: - */ - __pyx_t_1 = ((__pyx_v_writer->buf == __pyx_v_4yarl_10_quoting_c_BUFFER) != 0); - if (__pyx_t_1) { - - /* "yarl/_quoting_c.pyx":106 - * size = writer.size + BUF_SIZE - * if writer.buf == BUFFER: - * buf = PyMem_Malloc(size) # <<<<<<<<<<<<<< - * if buf == NULL: - * PyErr_NoMemory() - */ - __pyx_v_buf = ((char *)PyMem_Malloc(__pyx_v_size)); - - /* "yarl/_quoting_c.pyx":107 - * if writer.buf == BUFFER: - * buf = PyMem_Malloc(size) - * if buf == NULL: # <<<<<<<<<<<<<< - * PyErr_NoMemory() - * return -1 - */ - __pyx_t_1 = ((__pyx_v_buf == NULL) != 0); - if (__pyx_t_1) { - - /* "yarl/_quoting_c.pyx":108 - * buf = PyMem_Malloc(size) - * if buf == NULL: - * PyErr_NoMemory() # <<<<<<<<<<<<<< - * return -1 - * memcpy(buf, writer.buf, writer.size) - */ - __pyx_t_2 = PyErr_NoMemory(); if (unlikely(__pyx_t_2 == ((PyObject *)NULL))) __PYX_ERR(0, 108, __pyx_L1_error) - - /* "yarl/_quoting_c.pyx":109 - * if buf == NULL: - * PyErr_NoMemory() - * return -1 # <<<<<<<<<<<<<< - * memcpy(buf, writer.buf, writer.size) - * else: - */ - __pyx_r = -1; - goto __pyx_L0; - - /* "yarl/_quoting_c.pyx":107 - * if writer.buf == BUFFER: - * buf = PyMem_Malloc(size) - * if buf == NULL: # <<<<<<<<<<<<<< - * PyErr_NoMemory() - * return -1 - */ - } - - /* "yarl/_quoting_c.pyx":110 - * PyErr_NoMemory() - * return -1 - * memcpy(buf, writer.buf, writer.size) # <<<<<<<<<<<<<< - * else: - * buf = PyMem_Realloc(writer.buf, size) - */ - (void)(memcpy(__pyx_v_buf, __pyx_v_writer->buf, __pyx_v_writer->size)); - - /* "yarl/_quoting_c.pyx":105 - * # reallocate - * size = writer.size + BUF_SIZE - * if writer.buf == BUFFER: # <<<<<<<<<<<<<< - * buf = PyMem_Malloc(size) - * if buf == NULL: - */ - goto __pyx_L4; - } - - /* "yarl/_quoting_c.pyx":112 - * memcpy(buf, writer.buf, writer.size) - * else: - * buf = PyMem_Realloc(writer.buf, size) # <<<<<<<<<<<<<< - * if buf == NULL: - * PyErr_NoMemory() - */ - /*else*/ { - __pyx_v_buf = ((char *)PyMem_Realloc(__pyx_v_writer->buf, __pyx_v_size)); - - /* "yarl/_quoting_c.pyx":113 - * else: - * buf = PyMem_Realloc(writer.buf, size) - * if buf == NULL: # <<<<<<<<<<<<<< - * PyErr_NoMemory() - * return -1 - */ - __pyx_t_1 = ((__pyx_v_buf == NULL) != 0); - if (__pyx_t_1) { - - /* "yarl/_quoting_c.pyx":114 - * buf = PyMem_Realloc(writer.buf, size) - * if buf == NULL: - * PyErr_NoMemory() # <<<<<<<<<<<<<< - * return -1 - * writer.buf = buf - */ - __pyx_t_2 = PyErr_NoMemory(); if (unlikely(__pyx_t_2 == ((PyObject *)NULL))) __PYX_ERR(0, 114, __pyx_L1_error) - - /* "yarl/_quoting_c.pyx":115 - * if buf == NULL: - * PyErr_NoMemory() - * return -1 # <<<<<<<<<<<<<< - * writer.buf = buf - * writer.size = size - */ - __pyx_r = -1; - goto __pyx_L0; - - /* "yarl/_quoting_c.pyx":113 - * else: - * buf = PyMem_Realloc(writer.buf, size) - * if buf == NULL: # <<<<<<<<<<<<<< - * PyErr_NoMemory() - * return -1 - */ - } - } - __pyx_L4:; - - /* "yarl/_quoting_c.pyx":116 - * PyErr_NoMemory() - * return -1 - * writer.buf = buf # <<<<<<<<<<<<<< - * writer.size = size - * writer.buf[writer.pos] = ch - */ - __pyx_v_writer->buf = __pyx_v_buf; - - /* "yarl/_quoting_c.pyx":117 - * return -1 - * writer.buf = buf - * writer.size = size # <<<<<<<<<<<<<< - * writer.buf[writer.pos] = ch - * writer.pos += 1 - */ - __pyx_v_writer->size = __pyx_v_size; - - /* "yarl/_quoting_c.pyx":102 - * cdef Py_ssize_t size - * - * if writer.pos == writer.size: # <<<<<<<<<<<<<< - * # reallocate - * size = writer.size + BUF_SIZE - */ - } - - /* "yarl/_quoting_c.pyx":118 - * writer.buf = buf - * writer.size = size - * writer.buf[writer.pos] = ch # <<<<<<<<<<<<<< - * writer.pos += 1 - * writer.changed |= changed - */ - (__pyx_v_writer->buf[__pyx_v_writer->pos]) = ((char)__pyx_v_ch); - - /* "yarl/_quoting_c.pyx":119 - * writer.size = size - * writer.buf[writer.pos] = ch - * writer.pos += 1 # <<<<<<<<<<<<<< - * writer.changed |= changed - * return 0 - */ - __pyx_v_writer->pos = (__pyx_v_writer->pos + 1); - - /* "yarl/_quoting_c.pyx":120 - * writer.buf[writer.pos] = ch - * writer.pos += 1 - * writer.changed |= changed # <<<<<<<<<<<<<< - * return 0 - * - */ - __pyx_v_writer->changed = (__pyx_v_writer->changed | __pyx_v_changed); - - /* "yarl/_quoting_c.pyx":121 - * writer.pos += 1 - * writer.changed |= changed - * return 0 # <<<<<<<<<<<<<< - * - * - */ - __pyx_r = 0; - goto __pyx_L0; - - /* "yarl/_quoting_c.pyx":98 - * - * - * cdef inline int _write_char(Writer* writer, Py_UCS4 ch, bint changed): # <<<<<<<<<<<<<< - * cdef char * buf - * cdef Py_ssize_t size - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_WriteUnraisable("yarl._quoting_c._write_char", __pyx_clineno, __pyx_lineno, __pyx_filename, 1, 0); - __pyx_r = 0; - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "yarl/_quoting_c.pyx":124 - * - * - * cdef inline int _write_pct(Writer* writer, uint8_t ch, bint changed): # <<<<<<<<<<<<<< - * if _write_char(writer, '%', changed) < 0: - * return -1 - */ - -static CYTHON_INLINE int __pyx_f_4yarl_10_quoting_c__write_pct(struct __pyx_t_4yarl_10_quoting_c_Writer *__pyx_v_writer, uint8_t __pyx_v_ch, int __pyx_v_changed) { - int __pyx_r; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - __Pyx_RefNannySetupContext("_write_pct", 0); - - /* "yarl/_quoting_c.pyx":125 - * - * cdef inline int _write_pct(Writer* writer, uint8_t ch, bint changed): - * if _write_char(writer, '%', changed) < 0: # <<<<<<<<<<<<<< - * return -1 - * if _write_char(writer, _to_hex(ch >> 4), changed) < 0: - */ - __pyx_t_1 = ((__pyx_f_4yarl_10_quoting_c__write_char(__pyx_v_writer, 37, __pyx_v_changed) < 0) != 0); - if (__pyx_t_1) { - - /* "yarl/_quoting_c.pyx":126 - * cdef inline int _write_pct(Writer* writer, uint8_t ch, bint changed): - * if _write_char(writer, '%', changed) < 0: - * return -1 # <<<<<<<<<<<<<< - * if _write_char(writer, _to_hex(ch >> 4), changed) < 0: - * return -1 - */ - __pyx_r = -1; - goto __pyx_L0; - - /* "yarl/_quoting_c.pyx":125 - * - * cdef inline int _write_pct(Writer* writer, uint8_t ch, bint changed): - * if _write_char(writer, '%', changed) < 0: # <<<<<<<<<<<<<< - * return -1 - * if _write_char(writer, _to_hex(ch >> 4), changed) < 0: - */ - } - - /* "yarl/_quoting_c.pyx":127 - * if _write_char(writer, '%', changed) < 0: - * return -1 - * if _write_char(writer, _to_hex(ch >> 4), changed) < 0: # <<<<<<<<<<<<<< - * return -1 - * return _write_char(writer, _to_hex(ch & 0x0f), changed) - */ - __pyx_t_1 = ((__pyx_f_4yarl_10_quoting_c__write_char(__pyx_v_writer, __pyx_f_4yarl_10_quoting_c__to_hex((((uint8_t)__pyx_v_ch) >> 4)), __pyx_v_changed) < 0) != 0); - if (__pyx_t_1) { - - /* "yarl/_quoting_c.pyx":128 - * return -1 - * if _write_char(writer, _to_hex(ch >> 4), changed) < 0: - * return -1 # <<<<<<<<<<<<<< - * return _write_char(writer, _to_hex(ch & 0x0f), changed) - * - */ - __pyx_r = -1; - goto __pyx_L0; - - /* "yarl/_quoting_c.pyx":127 - * if _write_char(writer, '%', changed) < 0: - * return -1 - * if _write_char(writer, _to_hex(ch >> 4), changed) < 0: # <<<<<<<<<<<<<< - * return -1 - * return _write_char(writer, _to_hex(ch & 0x0f), changed) - */ - } - - /* "yarl/_quoting_c.pyx":129 - * if _write_char(writer, _to_hex(ch >> 4), changed) < 0: - * return -1 - * return _write_char(writer, _to_hex(ch & 0x0f), changed) # <<<<<<<<<<<<<< - * - * - */ - __pyx_r = __pyx_f_4yarl_10_quoting_c__write_char(__pyx_v_writer, __pyx_f_4yarl_10_quoting_c__to_hex((((uint8_t)__pyx_v_ch) & 0x0f)), __pyx_v_changed); - goto __pyx_L0; - - /* "yarl/_quoting_c.pyx":124 - * - * - * cdef inline int _write_pct(Writer* writer, uint8_t ch, bint changed): # <<<<<<<<<<<<<< - * if _write_char(writer, '%', changed) < 0: - * return -1 - */ - - /* function exit code */ - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "yarl/_quoting_c.pyx":132 - * - * - * cdef inline int _write_utf8(Writer* writer, Py_UCS4 symbol): # <<<<<<<<<<<<<< - * cdef uint64_t utf = symbol - * - */ - -static CYTHON_INLINE int __pyx_f_4yarl_10_quoting_c__write_utf8(struct __pyx_t_4yarl_10_quoting_c_Writer *__pyx_v_writer, Py_UCS4 __pyx_v_symbol) { - uint64_t __pyx_v_utf; - int __pyx_r; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - int __pyx_t_2; - __Pyx_RefNannySetupContext("_write_utf8", 0); - - /* "yarl/_quoting_c.pyx":133 - * - * cdef inline int _write_utf8(Writer* writer, Py_UCS4 symbol): - * cdef uint64_t utf = symbol # <<<<<<<<<<<<<< - * - * if utf < 0x80: - */ - __pyx_v_utf = ((uint64_t)__pyx_v_symbol); - - /* "yarl/_quoting_c.pyx":135 - * cdef uint64_t utf = symbol - * - * if utf < 0x80: # <<<<<<<<<<<<<< - * return _write_pct(writer, utf, True) - * elif utf < 0x800: - */ - __pyx_t_1 = ((__pyx_v_utf < 0x80) != 0); - if (__pyx_t_1) { - - /* "yarl/_quoting_c.pyx":136 - * - * if utf < 0x80: - * return _write_pct(writer, utf, True) # <<<<<<<<<<<<<< - * elif utf < 0x800: - * if _write_pct(writer, (0xc0 | (utf >> 6)), True) < 0: - */ - __pyx_r = __pyx_f_4yarl_10_quoting_c__write_pct(__pyx_v_writer, ((uint8_t)__pyx_v_utf), 1); - goto __pyx_L0; - - /* "yarl/_quoting_c.pyx":135 - * cdef uint64_t utf = symbol - * - * if utf < 0x80: # <<<<<<<<<<<<<< - * return _write_pct(writer, utf, True) - * elif utf < 0x800: - */ - } - - /* "yarl/_quoting_c.pyx":137 - * if utf < 0x80: - * return _write_pct(writer, utf, True) - * elif utf < 0x800: # <<<<<<<<<<<<<< - * if _write_pct(writer, (0xc0 | (utf >> 6)), True) < 0: - * return -1 - */ - __pyx_t_1 = ((__pyx_v_utf < 0x800) != 0); - if (__pyx_t_1) { - - /* "yarl/_quoting_c.pyx":138 - * return _write_pct(writer, utf, True) - * elif utf < 0x800: - * if _write_pct(writer, (0xc0 | (utf >> 6)), True) < 0: # <<<<<<<<<<<<<< - * return -1 - * return _write_pct(writer, (0x80 | (utf & 0x3f)), True) - */ - __pyx_t_1 = ((__pyx_f_4yarl_10_quoting_c__write_pct(__pyx_v_writer, ((uint8_t)(0xc0 | (__pyx_v_utf >> 6))), 1) < 0) != 0); - if (__pyx_t_1) { - - /* "yarl/_quoting_c.pyx":139 - * elif utf < 0x800: - * if _write_pct(writer, (0xc0 | (utf >> 6)), True) < 0: - * return -1 # <<<<<<<<<<<<<< - * return _write_pct(writer, (0x80 | (utf & 0x3f)), True) - * elif 0xD800 <= utf <= 0xDFFF: - */ - __pyx_r = -1; - goto __pyx_L0; - - /* "yarl/_quoting_c.pyx":138 - * return _write_pct(writer, utf, True) - * elif utf < 0x800: - * if _write_pct(writer, (0xc0 | (utf >> 6)), True) < 0: # <<<<<<<<<<<<<< - * return -1 - * return _write_pct(writer, (0x80 | (utf & 0x3f)), True) - */ - } - - /* "yarl/_quoting_c.pyx":140 - * if _write_pct(writer, (0xc0 | (utf >> 6)), True) < 0: - * return -1 - * return _write_pct(writer, (0x80 | (utf & 0x3f)), True) # <<<<<<<<<<<<<< - * elif 0xD800 <= utf <= 0xDFFF: - * # surogate pair, ignored - */ - __pyx_r = __pyx_f_4yarl_10_quoting_c__write_pct(__pyx_v_writer, ((uint8_t)(0x80 | (__pyx_v_utf & 0x3f))), 1); - goto __pyx_L0; - - /* "yarl/_quoting_c.pyx":137 - * if utf < 0x80: - * return _write_pct(writer, utf, True) - * elif utf < 0x800: # <<<<<<<<<<<<<< - * if _write_pct(writer, (0xc0 | (utf >> 6)), True) < 0: - * return -1 - */ - } - - /* "yarl/_quoting_c.pyx":141 - * return -1 - * return _write_pct(writer, (0x80 | (utf & 0x3f)), True) - * elif 0xD800 <= utf <= 0xDFFF: # <<<<<<<<<<<<<< - * # surogate pair, ignored - * return 0 - */ - __pyx_t_1 = (0xD800 <= __pyx_v_utf); - if (__pyx_t_1) { - __pyx_t_1 = (__pyx_v_utf <= 0xDFFF); - } - __pyx_t_2 = (__pyx_t_1 != 0); - if (__pyx_t_2) { - - /* "yarl/_quoting_c.pyx":143 - * elif 0xD800 <= utf <= 0xDFFF: - * # surogate pair, ignored - * return 0 # <<<<<<<<<<<<<< - * elif utf < 0x10000: - * if _write_pct(writer, (0xe0 | (utf >> 12)), True) < 0: - */ - __pyx_r = 0; - goto __pyx_L0; - - /* "yarl/_quoting_c.pyx":141 - * return -1 - * return _write_pct(writer, (0x80 | (utf & 0x3f)), True) - * elif 0xD800 <= utf <= 0xDFFF: # <<<<<<<<<<<<<< - * # surogate pair, ignored - * return 0 - */ - } - - /* "yarl/_quoting_c.pyx":144 - * # surogate pair, ignored - * return 0 - * elif utf < 0x10000: # <<<<<<<<<<<<<< - * if _write_pct(writer, (0xe0 | (utf >> 12)), True) < 0: - * return -1 - */ - __pyx_t_2 = ((__pyx_v_utf < 0x10000) != 0); - if (__pyx_t_2) { - - /* "yarl/_quoting_c.pyx":145 - * return 0 - * elif utf < 0x10000: - * if _write_pct(writer, (0xe0 | (utf >> 12)), True) < 0: # <<<<<<<<<<<<<< - * return -1 - * if _write_pct(writer, (0x80 | ((utf >> 6) & 0x3f)), - */ - __pyx_t_2 = ((__pyx_f_4yarl_10_quoting_c__write_pct(__pyx_v_writer, ((uint8_t)(0xe0 | (__pyx_v_utf >> 12))), 1) < 0) != 0); - if (__pyx_t_2) { - - /* "yarl/_quoting_c.pyx":146 - * elif utf < 0x10000: - * if _write_pct(writer, (0xe0 | (utf >> 12)), True) < 0: - * return -1 # <<<<<<<<<<<<<< - * if _write_pct(writer, (0x80 | ((utf >> 6) & 0x3f)), - * True) < 0: - */ - __pyx_r = -1; - goto __pyx_L0; - - /* "yarl/_quoting_c.pyx":145 - * return 0 - * elif utf < 0x10000: - * if _write_pct(writer, (0xe0 | (utf >> 12)), True) < 0: # <<<<<<<<<<<<<< - * return -1 - * if _write_pct(writer, (0x80 | ((utf >> 6) & 0x3f)), - */ - } - - /* "yarl/_quoting_c.pyx":148 - * return -1 - * if _write_pct(writer, (0x80 | ((utf >> 6) & 0x3f)), - * True) < 0: # <<<<<<<<<<<<<< - * return -1 - * return _write_pct(writer, (0x80 | (utf & 0x3f)), True) - */ - __pyx_t_2 = ((__pyx_f_4yarl_10_quoting_c__write_pct(__pyx_v_writer, ((uint8_t)(0x80 | ((__pyx_v_utf >> 6) & 0x3f))), 1) < 0) != 0); - - /* "yarl/_quoting_c.pyx":147 - * if _write_pct(writer, (0xe0 | (utf >> 12)), True) < 0: - * return -1 - * if _write_pct(writer, (0x80 | ((utf >> 6) & 0x3f)), # <<<<<<<<<<<<<< - * True) < 0: - * return -1 - */ - if (__pyx_t_2) { - - /* "yarl/_quoting_c.pyx":149 - * if _write_pct(writer, (0x80 | ((utf >> 6) & 0x3f)), - * True) < 0: - * return -1 # <<<<<<<<<<<<<< - * return _write_pct(writer, (0x80 | (utf & 0x3f)), True) - * elif utf > 0x10FFFF: - */ - __pyx_r = -1; - goto __pyx_L0; - - /* "yarl/_quoting_c.pyx":147 - * if _write_pct(writer, (0xe0 | (utf >> 12)), True) < 0: - * return -1 - * if _write_pct(writer, (0x80 | ((utf >> 6) & 0x3f)), # <<<<<<<<<<<<<< - * True) < 0: - * return -1 - */ - } - - /* "yarl/_quoting_c.pyx":150 - * True) < 0: - * return -1 - * return _write_pct(writer, (0x80 | (utf & 0x3f)), True) # <<<<<<<<<<<<<< - * elif utf > 0x10FFFF: - * # symbol is too large - */ - __pyx_r = __pyx_f_4yarl_10_quoting_c__write_pct(__pyx_v_writer, ((uint8_t)(0x80 | (__pyx_v_utf & 0x3f))), 1); - goto __pyx_L0; - - /* "yarl/_quoting_c.pyx":144 - * # surogate pair, ignored - * return 0 - * elif utf < 0x10000: # <<<<<<<<<<<<<< - * if _write_pct(writer, (0xe0 | (utf >> 12)), True) < 0: - * return -1 - */ - } - - /* "yarl/_quoting_c.pyx":151 - * return -1 - * return _write_pct(writer, (0x80 | (utf & 0x3f)), True) - * elif utf > 0x10FFFF: # <<<<<<<<<<<<<< - * # symbol is too large - * return 0 - */ - __pyx_t_2 = ((__pyx_v_utf > 0x10FFFF) != 0); - if (__pyx_t_2) { - - /* "yarl/_quoting_c.pyx":153 - * elif utf > 0x10FFFF: - * # symbol is too large - * return 0 # <<<<<<<<<<<<<< - * else: - * if _write_pct(writer, (0xf0 | (utf >> 18)), True) < 0: - */ - __pyx_r = 0; - goto __pyx_L0; - - /* "yarl/_quoting_c.pyx":151 - * return -1 - * return _write_pct(writer, (0x80 | (utf & 0x3f)), True) - * elif utf > 0x10FFFF: # <<<<<<<<<<<<<< - * # symbol is too large - * return 0 - */ - } - - /* "yarl/_quoting_c.pyx":155 - * return 0 - * else: - * if _write_pct(writer, (0xf0 | (utf >> 18)), True) < 0: # <<<<<<<<<<<<<< - * return -1 - * if _write_pct(writer, (0x80 | ((utf >> 12) & 0x3f)), - */ - /*else*/ { - __pyx_t_2 = ((__pyx_f_4yarl_10_quoting_c__write_pct(__pyx_v_writer, ((uint8_t)(0xf0 | (__pyx_v_utf >> 18))), 1) < 0) != 0); - if (__pyx_t_2) { - - /* "yarl/_quoting_c.pyx":156 - * else: - * if _write_pct(writer, (0xf0 | (utf >> 18)), True) < 0: - * return -1 # <<<<<<<<<<<<<< - * if _write_pct(writer, (0x80 | ((utf >> 12) & 0x3f)), - * True) < 0: - */ - __pyx_r = -1; - goto __pyx_L0; - - /* "yarl/_quoting_c.pyx":155 - * return 0 - * else: - * if _write_pct(writer, (0xf0 | (utf >> 18)), True) < 0: # <<<<<<<<<<<<<< - * return -1 - * if _write_pct(writer, (0x80 | ((utf >> 12) & 0x3f)), - */ - } - - /* "yarl/_quoting_c.pyx":158 - * return -1 - * if _write_pct(writer, (0x80 | ((utf >> 12) & 0x3f)), - * True) < 0: # <<<<<<<<<<<<<< - * return -1 - * if _write_pct(writer, (0x80 | ((utf >> 6) & 0x3f)), - */ - __pyx_t_2 = ((__pyx_f_4yarl_10_quoting_c__write_pct(__pyx_v_writer, ((uint8_t)(0x80 | ((__pyx_v_utf >> 12) & 0x3f))), 1) < 0) != 0); - - /* "yarl/_quoting_c.pyx":157 - * if _write_pct(writer, (0xf0 | (utf >> 18)), True) < 0: - * return -1 - * if _write_pct(writer, (0x80 | ((utf >> 12) & 0x3f)), # <<<<<<<<<<<<<< - * True) < 0: - * return -1 - */ - if (__pyx_t_2) { - - /* "yarl/_quoting_c.pyx":159 - * if _write_pct(writer, (0x80 | ((utf >> 12) & 0x3f)), - * True) < 0: - * return -1 # <<<<<<<<<<<<<< - * if _write_pct(writer, (0x80 | ((utf >> 6) & 0x3f)), - * True) < 0: - */ - __pyx_r = -1; - goto __pyx_L0; - - /* "yarl/_quoting_c.pyx":157 - * if _write_pct(writer, (0xf0 | (utf >> 18)), True) < 0: - * return -1 - * if _write_pct(writer, (0x80 | ((utf >> 12) & 0x3f)), # <<<<<<<<<<<<<< - * True) < 0: - * return -1 - */ - } - - /* "yarl/_quoting_c.pyx":161 - * return -1 - * if _write_pct(writer, (0x80 | ((utf >> 6) & 0x3f)), - * True) < 0: # <<<<<<<<<<<<<< - * return -1 - * return _write_pct(writer, (0x80 | (utf & 0x3f)), True) - */ - __pyx_t_2 = ((__pyx_f_4yarl_10_quoting_c__write_pct(__pyx_v_writer, ((uint8_t)(0x80 | ((__pyx_v_utf >> 6) & 0x3f))), 1) < 0) != 0); - - /* "yarl/_quoting_c.pyx":160 - * True) < 0: - * return -1 - * if _write_pct(writer, (0x80 | ((utf >> 6) & 0x3f)), # <<<<<<<<<<<<<< - * True) < 0: - * return -1 - */ - if (__pyx_t_2) { - - /* "yarl/_quoting_c.pyx":162 - * if _write_pct(writer, (0x80 | ((utf >> 6) & 0x3f)), - * True) < 0: - * return -1 # <<<<<<<<<<<<<< - * return _write_pct(writer, (0x80 | (utf & 0x3f)), True) - * - */ - __pyx_r = -1; - goto __pyx_L0; - - /* "yarl/_quoting_c.pyx":160 - * True) < 0: - * return -1 - * if _write_pct(writer, (0x80 | ((utf >> 6) & 0x3f)), # <<<<<<<<<<<<<< - * True) < 0: - * return -1 - */ - } - - /* "yarl/_quoting_c.pyx":163 - * True) < 0: - * return -1 - * return _write_pct(writer, (0x80 | (utf & 0x3f)), True) # <<<<<<<<<<<<<< - * - * - */ - __pyx_r = __pyx_f_4yarl_10_quoting_c__write_pct(__pyx_v_writer, ((uint8_t)(0x80 | (__pyx_v_utf & 0x3f))), 1); - goto __pyx_L0; - } - - /* "yarl/_quoting_c.pyx":132 - * - * - * cdef inline int _write_utf8(Writer* writer, Py_UCS4 symbol): # <<<<<<<<<<<<<< - * cdef uint64_t utf = symbol - * - */ - - /* function exit code */ - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "yarl/_quoting_c.pyx":176 - * cdef uint8_t _protected_table[16] - * - * def __init__( # <<<<<<<<<<<<<< - * self, *, str safe='', str protected='', bint qs=False, bint requote=True, - * ): - */ - -/* Python wrapper */ -static int __pyx_pw_4yarl_10_quoting_c_7_Quoter_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ -static int __pyx_pw_4yarl_10_quoting_c_7_Quoter_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { - PyObject *__pyx_v_safe = 0; - PyObject *__pyx_v_protected = 0; - int __pyx_v_qs; - int __pyx_v_requote; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - int __pyx_r; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__init__ (wrapper)", 0); - { - static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_safe,&__pyx_n_s_protected,&__pyx_n_s_qs,&__pyx_n_s_requote,0}; - PyObject* values[4] = {0,0,0,0}; - values[0] = ((PyObject*)__pyx_kp_u_); - values[1] = ((PyObject*)__pyx_kp_u_); - if (unlikely(__pyx_kwds)) { - Py_ssize_t kw_args; - const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); - switch (pos_args) { - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = PyDict_Size(__pyx_kwds); - if (kw_args > 0 && likely(kw_args <= 4)) { - Py_ssize_t index; - for (index = 0; index < 4 && kw_args > 0; index++) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, *__pyx_pyargnames[index]); - if (value) { values[index] = value; kw_args--; } - } - } - if (unlikely(kw_args > 0)) { - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, 0, "__init__") < 0)) __PYX_ERR(0, 176, __pyx_L3_error) - } - } else if (PyTuple_GET_SIZE(__pyx_args) != 0) { - goto __pyx_L5_argtuple_error; - } else { - } - __pyx_v_safe = ((PyObject*)values[0]); - __pyx_v_protected = ((PyObject*)values[1]); - if (values[2]) { - __pyx_v_qs = __Pyx_PyObject_IsTrue(values[2]); if (unlikely((__pyx_v_qs == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 177, __pyx_L3_error) - } else { - - /* "yarl/_quoting_c.pyx":177 - * - * def __init__( - * self, *, str safe='', str protected='', bint qs=False, bint requote=True, # <<<<<<<<<<<<<< - * ): - * cdef Py_UCS4 ch - */ - __pyx_v_qs = ((int)0); - } - if (values[3]) { - __pyx_v_requote = __Pyx_PyObject_IsTrue(values[3]); if (unlikely((__pyx_v_requote == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 177, __pyx_L3_error) - } else { - __pyx_v_requote = ((int)1); - } - } - goto __pyx_L4_argument_unpacking_done; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("__init__", 1, 0, 0, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 176, __pyx_L3_error) - __pyx_L3_error:; - __Pyx_AddTraceback("yarl._quoting_c._Quoter.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return -1; - __pyx_L4_argument_unpacking_done:; - if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_safe), (&PyUnicode_Type), 1, "safe", 1))) __PYX_ERR(0, 177, __pyx_L1_error) - if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_protected), (&PyUnicode_Type), 1, "protected", 1))) __PYX_ERR(0, 177, __pyx_L1_error) - __pyx_r = __pyx_pf_4yarl_10_quoting_c_7_Quoter___init__(((struct __pyx_obj_4yarl_10_quoting_c__Quoter *)__pyx_v_self), __pyx_v_safe, __pyx_v_protected, __pyx_v_qs, __pyx_v_requote); - - /* "yarl/_quoting_c.pyx":176 - * cdef uint8_t _protected_table[16] - * - * def __init__( # <<<<<<<<<<<<<< - * self, *, str safe='', str protected='', bint qs=False, bint requote=True, - * ): - */ - - /* function exit code */ - goto __pyx_L0; - __pyx_L1_error:; - __pyx_r = -1; - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static int __pyx_pf_4yarl_10_quoting_c_7_Quoter___init__(struct __pyx_obj_4yarl_10_quoting_c__Quoter *__pyx_v_self, PyObject *__pyx_v_safe, PyObject *__pyx_v_protected, int __pyx_v_qs, int __pyx_v_requote) { - Py_UCS4 __pyx_v_ch; - int __pyx_r; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - PyObject *__pyx_t_2 = NULL; - Py_ssize_t __pyx_t_3; - Py_ssize_t __pyx_t_4; - void *__pyx_t_5; - int __pyx_t_6; - int __pyx_t_7; - Py_ssize_t __pyx_t_8; - PyObject *__pyx_t_9 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__init__", 0); - - /* "yarl/_quoting_c.pyx":181 - * cdef Py_UCS4 ch - * - * self._qs = qs # <<<<<<<<<<<<<< - * self._requote = requote - * - */ - __pyx_v_self->_qs = __pyx_v_qs; - - /* "yarl/_quoting_c.pyx":182 - * - * self._qs = qs - * self._requote = requote # <<<<<<<<<<<<<< - * - * if not self._qs: - */ - __pyx_v_self->_requote = __pyx_v_requote; - - /* "yarl/_quoting_c.pyx":184 - * self._requote = requote - * - * if not self._qs: # <<<<<<<<<<<<<< - * memcpy(self._safe_table, - * ALLOWED_NOTQS_TABLE, - */ - __pyx_t_1 = ((!(__pyx_v_self->_qs != 0)) != 0); - if (__pyx_t_1) { - - /* "yarl/_quoting_c.pyx":185 - * - * if not self._qs: - * memcpy(self._safe_table, # <<<<<<<<<<<<<< - * ALLOWED_NOTQS_TABLE, - * sizeof(self._safe_table)) - */ - (void)(memcpy(__pyx_v_self->_safe_table, __pyx_v_4yarl_10_quoting_c_ALLOWED_NOTQS_TABLE, (sizeof(__pyx_v_self->_safe_table)))); - - /* "yarl/_quoting_c.pyx":184 - * self._requote = requote - * - * if not self._qs: # <<<<<<<<<<<<<< - * memcpy(self._safe_table, - * ALLOWED_NOTQS_TABLE, - */ - goto __pyx_L3; - } - - /* "yarl/_quoting_c.pyx":189 - * sizeof(self._safe_table)) - * else: - * memcpy(self._safe_table, # <<<<<<<<<<<<<< - * ALLOWED_TABLE, - * sizeof(self._safe_table)) - */ - /*else*/ { - - /* "yarl/_quoting_c.pyx":191 - * memcpy(self._safe_table, - * ALLOWED_TABLE, - * sizeof(self._safe_table)) # <<<<<<<<<<<<<< - * for ch in safe: - * if ord(ch) > 127: - */ - (void)(memcpy(__pyx_v_self->_safe_table, __pyx_v_4yarl_10_quoting_c_ALLOWED_TABLE, (sizeof(__pyx_v_self->_safe_table)))); - } - __pyx_L3:; - - /* "yarl/_quoting_c.pyx":192 - * ALLOWED_TABLE, - * sizeof(self._safe_table)) - * for ch in safe: # <<<<<<<<<<<<<< - * if ord(ch) > 127: - * raise ValueError("Only safe symbols with ORD < 128 are allowed") - */ - if (unlikely(__pyx_v_safe == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' is not iterable"); - __PYX_ERR(0, 192, __pyx_L1_error) - } - __Pyx_INCREF(__pyx_v_safe); - __pyx_t_2 = __pyx_v_safe; - __pyx_t_7 = __Pyx_init_unicode_iteration(__pyx_t_2, (&__pyx_t_4), (&__pyx_t_5), (&__pyx_t_6)); if (unlikely(__pyx_t_7 == ((int)-1))) __PYX_ERR(0, 192, __pyx_L1_error) - for (__pyx_t_8 = 0; __pyx_t_8 < __pyx_t_4; __pyx_t_8++) { - __pyx_t_3 = __pyx_t_8; - __pyx_v_ch = __Pyx_PyUnicode_READ(__pyx_t_6, __pyx_t_5, __pyx_t_3); - - /* "yarl/_quoting_c.pyx":193 - * sizeof(self._safe_table)) - * for ch in safe: - * if ord(ch) > 127: # <<<<<<<<<<<<<< - * raise ValueError("Only safe symbols with ORD < 128 are allowed") - * set_bit(self._safe_table, ch) - */ - __pyx_t_1 = ((((long)__pyx_v_ch) > 0x7F) != 0); - if (unlikely(__pyx_t_1)) { - - /* "yarl/_quoting_c.pyx":194 - * for ch in safe: - * if ord(ch) > 127: - * raise ValueError("Only safe symbols with ORD < 128 are allowed") # <<<<<<<<<<<<<< - * set_bit(self._safe_table, ch) - * - */ - __pyx_t_9 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__2, NULL); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 194, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_9); - __Pyx_Raise(__pyx_t_9, 0, 0, 0); - __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; - __PYX_ERR(0, 194, __pyx_L1_error) - - /* "yarl/_quoting_c.pyx":193 - * sizeof(self._safe_table)) - * for ch in safe: - * if ord(ch) > 127: # <<<<<<<<<<<<<< - * raise ValueError("Only safe symbols with ORD < 128 are allowed") - * set_bit(self._safe_table, ch) - */ - } - - /* "yarl/_quoting_c.pyx":195 - * if ord(ch) > 127: - * raise ValueError("Only safe symbols with ORD < 128 are allowed") - * set_bit(self._safe_table, ch) # <<<<<<<<<<<<<< - * - * memset(self._protected_table, 0, sizeof(self._protected_table)) - */ - __pyx_f_4yarl_10_quoting_c_set_bit(__pyx_v_self->_safe_table, __pyx_v_ch); - } - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "yarl/_quoting_c.pyx":197 - * set_bit(self._safe_table, ch) - * - * memset(self._protected_table, 0, sizeof(self._protected_table)) # <<<<<<<<<<<<<< - * for ch in protected: - * if ord(ch) > 127: - */ - (void)(memset(__pyx_v_self->_protected_table, 0, (sizeof(__pyx_v_self->_protected_table)))); - - /* "yarl/_quoting_c.pyx":198 - * - * memset(self._protected_table, 0, sizeof(self._protected_table)) - * for ch in protected: # <<<<<<<<<<<<<< - * if ord(ch) > 127: - * raise ValueError("Only safe symbols with ORD < 128 are allowed") - */ - if (unlikely(__pyx_v_protected == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' is not iterable"); - __PYX_ERR(0, 198, __pyx_L1_error) - } - __Pyx_INCREF(__pyx_v_protected); - __pyx_t_2 = __pyx_v_protected; - __pyx_t_7 = __Pyx_init_unicode_iteration(__pyx_t_2, (&__pyx_t_3), (&__pyx_t_5), (&__pyx_t_6)); if (unlikely(__pyx_t_7 == ((int)-1))) __PYX_ERR(0, 198, __pyx_L1_error) - for (__pyx_t_8 = 0; __pyx_t_8 < __pyx_t_3; __pyx_t_8++) { - __pyx_t_4 = __pyx_t_8; - __pyx_v_ch = __Pyx_PyUnicode_READ(__pyx_t_6, __pyx_t_5, __pyx_t_4); - - /* "yarl/_quoting_c.pyx":199 - * memset(self._protected_table, 0, sizeof(self._protected_table)) - * for ch in protected: - * if ord(ch) > 127: # <<<<<<<<<<<<<< - * raise ValueError("Only safe symbols with ORD < 128 are allowed") - * set_bit(self._safe_table, ch) - */ - __pyx_t_1 = ((((long)__pyx_v_ch) > 0x7F) != 0); - if (unlikely(__pyx_t_1)) { - - /* "yarl/_quoting_c.pyx":200 - * for ch in protected: - * if ord(ch) > 127: - * raise ValueError("Only safe symbols with ORD < 128 are allowed") # <<<<<<<<<<<<<< - * set_bit(self._safe_table, ch) - * set_bit(self._protected_table, ch) - */ - __pyx_t_9 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__2, NULL); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 200, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_9); - __Pyx_Raise(__pyx_t_9, 0, 0, 0); - __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; - __PYX_ERR(0, 200, __pyx_L1_error) - - /* "yarl/_quoting_c.pyx":199 - * memset(self._protected_table, 0, sizeof(self._protected_table)) - * for ch in protected: - * if ord(ch) > 127: # <<<<<<<<<<<<<< - * raise ValueError("Only safe symbols with ORD < 128 are allowed") - * set_bit(self._safe_table, ch) - */ - } - - /* "yarl/_quoting_c.pyx":201 - * if ord(ch) > 127: - * raise ValueError("Only safe symbols with ORD < 128 are allowed") - * set_bit(self._safe_table, ch) # <<<<<<<<<<<<<< - * set_bit(self._protected_table, ch) - * - */ - __pyx_f_4yarl_10_quoting_c_set_bit(__pyx_v_self->_safe_table, __pyx_v_ch); - - /* "yarl/_quoting_c.pyx":202 - * raise ValueError("Only safe symbols with ORD < 128 are allowed") - * set_bit(self._safe_table, ch) - * set_bit(self._protected_table, ch) # <<<<<<<<<<<<<< - * - * def __call__(self, val): - */ - __pyx_f_4yarl_10_quoting_c_set_bit(__pyx_v_self->_protected_table, __pyx_v_ch); - } - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "yarl/_quoting_c.pyx":176 - * cdef uint8_t _protected_table[16] - * - * def __init__( # <<<<<<<<<<<<<< - * self, *, str safe='', str protected='', bint qs=False, bint requote=True, - * ): - */ - - /* function exit code */ - __pyx_r = 0; - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_9); - __Pyx_AddTraceback("yarl._quoting_c._Quoter.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = -1; - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "yarl/_quoting_c.pyx":204 - * set_bit(self._protected_table, ch) - * - * def __call__(self, val): # <<<<<<<<<<<<<< - * cdef Writer writer - * if val is None: - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_4yarl_10_quoting_c_7_Quoter_3__call__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ -static PyObject *__pyx_pw_4yarl_10_quoting_c_7_Quoter_3__call__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { - PyObject *__pyx_v_val = 0; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__call__ (wrapper)", 0); - { - static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_val,0}; - PyObject* values[1] = {0}; - if (unlikely(__pyx_kwds)) { - Py_ssize_t kw_args; - const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); - switch (pos_args) { - case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = PyDict_Size(__pyx_kwds); - switch (pos_args) { - case 0: - if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_val)) != 0)) kw_args--; - else goto __pyx_L5_argtuple_error; - } - if (unlikely(kw_args > 0)) { - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__call__") < 0)) __PYX_ERR(0, 204, __pyx_L3_error) - } - } else if (PyTuple_GET_SIZE(__pyx_args) != 1) { - goto __pyx_L5_argtuple_error; - } else { - values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - } - __pyx_v_val = values[0]; - } - goto __pyx_L4_argument_unpacking_done; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("__call__", 1, 1, 1, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 204, __pyx_L3_error) - __pyx_L3_error:; - __Pyx_AddTraceback("yarl._quoting_c._Quoter.__call__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_4yarl_10_quoting_c_7_Quoter_2__call__(((struct __pyx_obj_4yarl_10_quoting_c__Quoter *)__pyx_v_self), __pyx_v_val); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_4yarl_10_quoting_c_7_Quoter_2__call__(struct __pyx_obj_4yarl_10_quoting_c__Quoter *__pyx_v_self, PyObject *__pyx_v_val) { - struct __pyx_t_4yarl_10_quoting_c_Writer __pyx_v_writer; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - int __pyx_t_4; - int __pyx_t_5; - char const *__pyx_t_6; - PyObject *__pyx_t_7 = NULL; - PyObject *__pyx_t_8 = NULL; - PyObject *__pyx_t_9 = NULL; - PyObject *__pyx_t_10 = NULL; - PyObject *__pyx_t_11 = NULL; - PyObject *__pyx_t_12 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__call__", 0); - __Pyx_INCREF(__pyx_v_val); - - /* "yarl/_quoting_c.pyx":206 - * def __call__(self, val): - * cdef Writer writer - * if val is None: # <<<<<<<<<<<<<< - * return None - * if type(val) is not str: - */ - __pyx_t_1 = (__pyx_v_val == Py_None); - __pyx_t_2 = (__pyx_t_1 != 0); - if (__pyx_t_2) { - - /* "yarl/_quoting_c.pyx":207 - * cdef Writer writer - * if val is None: - * return None # <<<<<<<<<<<<<< - * if type(val) is not str: - * if isinstance(val, str): - */ - __Pyx_XDECREF(__pyx_r); - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - - /* "yarl/_quoting_c.pyx":206 - * def __call__(self, val): - * cdef Writer writer - * if val is None: # <<<<<<<<<<<<<< - * return None - * if type(val) is not str: - */ - } - - /* "yarl/_quoting_c.pyx":208 - * if val is None: - * return None - * if type(val) is not str: # <<<<<<<<<<<<<< - * if isinstance(val, str): - * # derived from str - */ - __pyx_t_2 = (((PyObject *)Py_TYPE(__pyx_v_val)) != ((PyObject *)(&PyUnicode_Type))); - __pyx_t_1 = (__pyx_t_2 != 0); - if (__pyx_t_1) { - - /* "yarl/_quoting_c.pyx":209 - * return None - * if type(val) is not str: - * if isinstance(val, str): # <<<<<<<<<<<<<< - * # derived from str - * val = str(val) - */ - __pyx_t_1 = PyUnicode_Check(__pyx_v_val); - __pyx_t_2 = (__pyx_t_1 != 0); - if (likely(__pyx_t_2)) { - - /* "yarl/_quoting_c.pyx":211 - * if isinstance(val, str): - * # derived from str - * val = str(val) # <<<<<<<<<<<<<< - * else: - * raise TypeError("Argument should be str") - */ - __pyx_t_3 = __Pyx_PyObject_CallOneArg(((PyObject *)(&PyUnicode_Type)), __pyx_v_val); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 211, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF_SET(__pyx_v_val, __pyx_t_3); - __pyx_t_3 = 0; - - /* "yarl/_quoting_c.pyx":209 - * return None - * if type(val) is not str: - * if isinstance(val, str): # <<<<<<<<<<<<<< - * # derived from str - * val = str(val) - */ - goto __pyx_L5; - } - - /* "yarl/_quoting_c.pyx":213 - * val = str(val) - * else: - * raise TypeError("Argument should be str") # <<<<<<<<<<<<<< - * _init_writer(&writer) - * try: - */ - /*else*/ { - __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__3, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 213, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_Raise(__pyx_t_3, 0, 0, 0); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __PYX_ERR(0, 213, __pyx_L1_error) - } - __pyx_L5:; - - /* "yarl/_quoting_c.pyx":208 - * if val is None: - * return None - * if type(val) is not str: # <<<<<<<<<<<<<< - * if isinstance(val, str): - * # derived from str - */ - } - - /* "yarl/_quoting_c.pyx":214 - * else: - * raise TypeError("Argument should be str") - * _init_writer(&writer) # <<<<<<<<<<<<<< - * try: - * return self._do_quote(val, &writer) - */ - __pyx_f_4yarl_10_quoting_c__init_writer((&__pyx_v_writer)); - - /* "yarl/_quoting_c.pyx":215 - * raise TypeError("Argument should be str") - * _init_writer(&writer) - * try: # <<<<<<<<<<<<<< - * return self._do_quote(val, &writer) - * finally: - */ - /*try:*/ { - - /* "yarl/_quoting_c.pyx":216 - * _init_writer(&writer) - * try: - * return self._do_quote(val, &writer) # <<<<<<<<<<<<<< - * finally: - * _release_writer(&writer) - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_3 = ((struct __pyx_vtabstruct_4yarl_10_quoting_c__Quoter *)__pyx_v_self->__pyx_vtab)->_do_quote(__pyx_v_self, ((PyObject*)__pyx_v_val), (&__pyx_v_writer)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 216, __pyx_L7_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_r = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L6_return; - } - - /* "yarl/_quoting_c.pyx":218 - * return self._do_quote(val, &writer) - * finally: - * _release_writer(&writer) # <<<<<<<<<<<<<< - * - * cdef str _do_quote(self, str val, Writer *writer): - */ - /*finally:*/ { - __pyx_L7_error:; - /*exception exit:*/{ - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - __pyx_t_7 = 0; __pyx_t_8 = 0; __pyx_t_9 = 0; __pyx_t_10 = 0; __pyx_t_11 = 0; __pyx_t_12 = 0; - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (PY_MAJOR_VERSION >= 3) __Pyx_ExceptionSwap(&__pyx_t_10, &__pyx_t_11, &__pyx_t_12); - if ((PY_MAJOR_VERSION < 3) || unlikely(__Pyx_GetException(&__pyx_t_7, &__pyx_t_8, &__pyx_t_9) < 0)) __Pyx_ErrFetch(&__pyx_t_7, &__pyx_t_8, &__pyx_t_9); - __Pyx_XGOTREF(__pyx_t_7); - __Pyx_XGOTREF(__pyx_t_8); - __Pyx_XGOTREF(__pyx_t_9); - __Pyx_XGOTREF(__pyx_t_10); - __Pyx_XGOTREF(__pyx_t_11); - __Pyx_XGOTREF(__pyx_t_12); - __pyx_t_4 = __pyx_lineno; __pyx_t_5 = __pyx_clineno; __pyx_t_6 = __pyx_filename; - { - __pyx_f_4yarl_10_quoting_c__release_writer((&__pyx_v_writer)); - } - if (PY_MAJOR_VERSION >= 3) { - __Pyx_XGIVEREF(__pyx_t_10); - __Pyx_XGIVEREF(__pyx_t_11); - __Pyx_XGIVEREF(__pyx_t_12); - __Pyx_ExceptionReset(__pyx_t_10, __pyx_t_11, __pyx_t_12); - } - __Pyx_XGIVEREF(__pyx_t_7); - __Pyx_XGIVEREF(__pyx_t_8); - __Pyx_XGIVEREF(__pyx_t_9); - __Pyx_ErrRestore(__pyx_t_7, __pyx_t_8, __pyx_t_9); - __pyx_t_7 = 0; __pyx_t_8 = 0; __pyx_t_9 = 0; __pyx_t_10 = 0; __pyx_t_11 = 0; __pyx_t_12 = 0; - __pyx_lineno = __pyx_t_4; __pyx_clineno = __pyx_t_5; __pyx_filename = __pyx_t_6; - goto __pyx_L1_error; - } - __pyx_L6_return: { - __pyx_t_12 = __pyx_r; - __pyx_r = 0; - __pyx_f_4yarl_10_quoting_c__release_writer((&__pyx_v_writer)); - __pyx_r = __pyx_t_12; - __pyx_t_12 = 0; - goto __pyx_L0; - } - } - - /* "yarl/_quoting_c.pyx":204 - * set_bit(self._protected_table, ch) - * - * def __call__(self, val): # <<<<<<<<<<<<<< - * cdef Writer writer - * if val is None: - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("yarl._quoting_c._Quoter.__call__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v_val); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "yarl/_quoting_c.pyx":220 - * _release_writer(&writer) - * - * cdef str _do_quote(self, str val, Writer *writer): # <<<<<<<<<<<<<< - * cdef Py_UCS4 ch - * cdef int changed - */ - -static PyObject *__pyx_f_4yarl_10_quoting_c_7_Quoter__do_quote(struct __pyx_obj_4yarl_10_quoting_c__Quoter *__pyx_v_self, PyObject *__pyx_v_val, struct __pyx_t_4yarl_10_quoting_c_Writer *__pyx_v_writer) { - Py_UCS4 __pyx_v_ch; - int __pyx_v_changed; - int __pyx_v_idx; - int __pyx_v_length; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - Py_ssize_t __pyx_t_1; - int __pyx_t_2; - Py_UCS4 __pyx_t_3; - int __pyx_t_4; - long __pyx_t_5; - Py_UCS4 __pyx_t_6; - int __pyx_t_7; - int __pyx_t_8; - PyObject *__pyx_t_9 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("_do_quote", 0); - - /* "yarl/_quoting_c.pyx":223 - * cdef Py_UCS4 ch - * cdef int changed - * cdef int idx = 0 # <<<<<<<<<<<<<< - * cdef int length = len(val) - * - */ - __pyx_v_idx = 0; - - /* "yarl/_quoting_c.pyx":224 - * cdef int changed - * cdef int idx = 0 - * cdef int length = len(val) # <<<<<<<<<<<<<< - * - * while idx < length: - */ - if (unlikely(__pyx_v_val == Py_None)) { - PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); - __PYX_ERR(0, 224, __pyx_L1_error) - } - __pyx_t_1 = __Pyx_PyUnicode_GET_LENGTH(__pyx_v_val); if (unlikely(__pyx_t_1 == ((Py_ssize_t)-1))) __PYX_ERR(0, 224, __pyx_L1_error) - __pyx_v_length = __pyx_t_1; - - /* "yarl/_quoting_c.pyx":226 - * cdef int length = len(val) - * - * while idx < length: # <<<<<<<<<<<<<< - * ch = val[idx] - * idx += 1 - */ - while (1) { - __pyx_t_2 = ((__pyx_v_idx < __pyx_v_length) != 0); - if (!__pyx_t_2) break; - - /* "yarl/_quoting_c.pyx":227 - * - * while idx < length: - * ch = val[idx] # <<<<<<<<<<<<<< - * idx += 1 - * if ch == '%' and self._requote and idx <= length - 2: - */ - __pyx_t_3 = __Pyx_GetItemInt_Unicode(__pyx_v_val, __pyx_v_idx, int, 1, __Pyx_PyInt_From_int, 0, 1, 1); if (unlikely(__pyx_t_3 == (Py_UCS4)-1)) __PYX_ERR(0, 227, __pyx_L1_error) - __pyx_v_ch = __pyx_t_3; - - /* "yarl/_quoting_c.pyx":228 - * while idx < length: - * ch = val[idx] - * idx += 1 # <<<<<<<<<<<<<< - * if ch == '%' and self._requote and idx <= length - 2: - * ch = _restore_ch(val[idx], val[idx + 1]) - */ - __pyx_v_idx = (__pyx_v_idx + 1); - - /* "yarl/_quoting_c.pyx":229 - * ch = val[idx] - * idx += 1 - * if ch == '%' and self._requote and idx <= length - 2: # <<<<<<<<<<<<<< - * ch = _restore_ch(val[idx], val[idx + 1]) - * if ch != -1: - */ - __pyx_t_4 = ((__pyx_v_ch == 37) != 0); - if (__pyx_t_4) { - } else { - __pyx_t_2 = __pyx_t_4; - goto __pyx_L6_bool_binop_done; - } - __pyx_t_4 = (__pyx_v_self->_requote != 0); - if (__pyx_t_4) { - } else { - __pyx_t_2 = __pyx_t_4; - goto __pyx_L6_bool_binop_done; - } - __pyx_t_4 = ((__pyx_v_idx <= (__pyx_v_length - 2)) != 0); - __pyx_t_2 = __pyx_t_4; - __pyx_L6_bool_binop_done:; - if (__pyx_t_2) { - - /* "yarl/_quoting_c.pyx":230 - * idx += 1 - * if ch == '%' and self._requote and idx <= length - 2: - * ch = _restore_ch(val[idx], val[idx + 1]) # <<<<<<<<<<<<<< - * if ch != -1: - * idx += 2 - */ - __pyx_t_3 = __Pyx_GetItemInt_Unicode(__pyx_v_val, __pyx_v_idx, int, 1, __Pyx_PyInt_From_int, 0, 1, 1); if (unlikely(__pyx_t_3 == (Py_UCS4)-1)) __PYX_ERR(0, 230, __pyx_L1_error) - __pyx_t_5 = (__pyx_v_idx + 1); - __pyx_t_6 = __Pyx_GetItemInt_Unicode(__pyx_v_val, __pyx_t_5, long, 1, __Pyx_PyInt_From_long, 0, 1, 1); if (unlikely(__pyx_t_6 == (Py_UCS4)-1)) __PYX_ERR(0, 230, __pyx_L1_error) - __pyx_v_ch = __pyx_f_4yarl_10_quoting_c__restore_ch(__pyx_t_3, __pyx_t_6); - - /* "yarl/_quoting_c.pyx":231 - * if ch == '%' and self._requote and idx <= length - 2: - * ch = _restore_ch(val[idx], val[idx + 1]) - * if ch != -1: # <<<<<<<<<<<<<< - * idx += 2 - * if ch < 128: - */ - __pyx_t_2 = ((__pyx_v_ch != ((Py_UCS4)-1L)) != 0); - if (__pyx_t_2) { - - /* "yarl/_quoting_c.pyx":232 - * ch = _restore_ch(val[idx], val[idx + 1]) - * if ch != -1: - * idx += 2 # <<<<<<<<<<<<<< - * if ch < 128: - * if bit_at(self._protected_table, ch): - */ - __pyx_v_idx = (__pyx_v_idx + 2); - - /* "yarl/_quoting_c.pyx":233 - * if ch != -1: - * idx += 2 - * if ch < 128: # <<<<<<<<<<<<<< - * if bit_at(self._protected_table, ch): - * if _write_pct(writer, ch, True) < 0: - */ - __pyx_t_2 = ((__pyx_v_ch < 0x80) != 0); - if (__pyx_t_2) { - - /* "yarl/_quoting_c.pyx":234 - * idx += 2 - * if ch < 128: - * if bit_at(self._protected_table, ch): # <<<<<<<<<<<<<< - * if _write_pct(writer, ch, True) < 0: - * raise - */ - __pyx_t_2 = (__pyx_f_4yarl_10_quoting_c_bit_at(__pyx_v_self->_protected_table, __pyx_v_ch) != 0); - if (__pyx_t_2) { - - /* "yarl/_quoting_c.pyx":235 - * if ch < 128: - * if bit_at(self._protected_table, ch): - * if _write_pct(writer, ch, True) < 0: # <<<<<<<<<<<<<< - * raise - * continue - */ - __pyx_t_2 = ((__pyx_f_4yarl_10_quoting_c__write_pct(__pyx_v_writer, __pyx_v_ch, 1) < 0) != 0); - if (unlikely(__pyx_t_2)) { - - /* "yarl/_quoting_c.pyx":236 - * if bit_at(self._protected_table, ch): - * if _write_pct(writer, ch, True) < 0: - * raise # <<<<<<<<<<<<<< - * continue - * - */ - __Pyx_ReraiseException(); __PYX_ERR(0, 236, __pyx_L1_error) - - /* "yarl/_quoting_c.pyx":235 - * if ch < 128: - * if bit_at(self._protected_table, ch): - * if _write_pct(writer, ch, True) < 0: # <<<<<<<<<<<<<< - * raise - * continue - */ - } - - /* "yarl/_quoting_c.pyx":237 - * if _write_pct(writer, ch, True) < 0: - * raise - * continue # <<<<<<<<<<<<<< - * - * if bit_at(self._safe_table, ch): - */ - goto __pyx_L3_continue; - - /* "yarl/_quoting_c.pyx":234 - * idx += 2 - * if ch < 128: - * if bit_at(self._protected_table, ch): # <<<<<<<<<<<<<< - * if _write_pct(writer, ch, True) < 0: - * raise - */ - } - - /* "yarl/_quoting_c.pyx":239 - * continue - * - * if bit_at(self._safe_table, ch): # <<<<<<<<<<<<<< - * if _write_char(writer, ch, True) < 0: - * raise - */ - __pyx_t_2 = (__pyx_f_4yarl_10_quoting_c_bit_at(__pyx_v_self->_safe_table, __pyx_v_ch) != 0); - if (__pyx_t_2) { - - /* "yarl/_quoting_c.pyx":240 - * - * if bit_at(self._safe_table, ch): - * if _write_char(writer, ch, True) < 0: # <<<<<<<<<<<<<< - * raise - * continue - */ - __pyx_t_2 = ((__pyx_f_4yarl_10_quoting_c__write_char(__pyx_v_writer, __pyx_v_ch, 1) < 0) != 0); - if (unlikely(__pyx_t_2)) { - - /* "yarl/_quoting_c.pyx":241 - * if bit_at(self._safe_table, ch): - * if _write_char(writer, ch, True) < 0: - * raise # <<<<<<<<<<<<<< - * continue - * - */ - __Pyx_ReraiseException(); __PYX_ERR(0, 241, __pyx_L1_error) - - /* "yarl/_quoting_c.pyx":240 - * - * if bit_at(self._safe_table, ch): - * if _write_char(writer, ch, True) < 0: # <<<<<<<<<<<<<< - * raise - * continue - */ - } - - /* "yarl/_quoting_c.pyx":242 - * if _write_char(writer, ch, True) < 0: - * raise - * continue # <<<<<<<<<<<<<< - * - * changed = (_is_lower_hex(val[idx - 2]) or - */ - goto __pyx_L3_continue; - - /* "yarl/_quoting_c.pyx":239 - * continue - * - * if bit_at(self._safe_table, ch): # <<<<<<<<<<<<<< - * if _write_char(writer, ch, True) < 0: - * raise - */ - } - - /* "yarl/_quoting_c.pyx":233 - * if ch != -1: - * idx += 2 - * if ch < 128: # <<<<<<<<<<<<<< - * if bit_at(self._protected_table, ch): - * if _write_pct(writer, ch, True) < 0: - */ - } - - /* "yarl/_quoting_c.pyx":244 - * continue - * - * changed = (_is_lower_hex(val[idx - 2]) or # <<<<<<<<<<<<<< - * _is_lower_hex(val[idx - 1])) - * if _write_pct(writer, ch, changed) < 0: - */ - __pyx_t_5 = (__pyx_v_idx - 2); - __pyx_t_6 = __Pyx_GetItemInt_Unicode(__pyx_v_val, __pyx_t_5, long, 1, __Pyx_PyInt_From_long, 0, 1, 1); if (unlikely(__pyx_t_6 == (Py_UCS4)-1)) __PYX_ERR(0, 244, __pyx_L1_error) - __pyx_t_8 = __pyx_f_4yarl_10_quoting_c__is_lower_hex(__pyx_t_6); - if (!__pyx_t_8) { - } else { - __pyx_t_7 = __pyx_t_8; - goto __pyx_L15_bool_binop_done; - } - - /* "yarl/_quoting_c.pyx":245 - * - * changed = (_is_lower_hex(val[idx - 2]) or - * _is_lower_hex(val[idx - 1])) # <<<<<<<<<<<<<< - * if _write_pct(writer, ch, changed) < 0: - * raise - */ - __pyx_t_5 = (__pyx_v_idx - 1); - __pyx_t_6 = __Pyx_GetItemInt_Unicode(__pyx_v_val, __pyx_t_5, long, 1, __Pyx_PyInt_From_long, 0, 1, 1); if (unlikely(__pyx_t_6 == (Py_UCS4)-1)) __PYX_ERR(0, 245, __pyx_L1_error) - __pyx_t_8 = __pyx_f_4yarl_10_quoting_c__is_lower_hex(__pyx_t_6); - __pyx_t_7 = __pyx_t_8; - __pyx_L15_bool_binop_done:; - __pyx_v_changed = __pyx_t_7; - - /* "yarl/_quoting_c.pyx":246 - * changed = (_is_lower_hex(val[idx - 2]) or - * _is_lower_hex(val[idx - 1])) - * if _write_pct(writer, ch, changed) < 0: # <<<<<<<<<<<<<< - * raise - * continue - */ - __pyx_t_2 = ((__pyx_f_4yarl_10_quoting_c__write_pct(__pyx_v_writer, __pyx_v_ch, __pyx_v_changed) < 0) != 0); - if (unlikely(__pyx_t_2)) { - - /* "yarl/_quoting_c.pyx":247 - * _is_lower_hex(val[idx - 1])) - * if _write_pct(writer, ch, changed) < 0: - * raise # <<<<<<<<<<<<<< - * continue - * else: - */ - __Pyx_ReraiseException(); __PYX_ERR(0, 247, __pyx_L1_error) - - /* "yarl/_quoting_c.pyx":246 - * changed = (_is_lower_hex(val[idx - 2]) or - * _is_lower_hex(val[idx - 1])) - * if _write_pct(writer, ch, changed) < 0: # <<<<<<<<<<<<<< - * raise - * continue - */ - } - - /* "yarl/_quoting_c.pyx":248 - * if _write_pct(writer, ch, changed) < 0: - * raise - * continue # <<<<<<<<<<<<<< - * else: - * ch = '%' - */ - goto __pyx_L3_continue; - - /* "yarl/_quoting_c.pyx":231 - * if ch == '%' and self._requote and idx <= length - 2: - * ch = _restore_ch(val[idx], val[idx + 1]) - * if ch != -1: # <<<<<<<<<<<<<< - * idx += 2 - * if ch < 128: - */ - } - - /* "yarl/_quoting_c.pyx":250 - * continue - * else: - * ch = '%' # <<<<<<<<<<<<<< - * - * if self._write(writer, ch) < 0: - */ - /*else*/ { - __pyx_v_ch = 37; - } - - /* "yarl/_quoting_c.pyx":229 - * ch = val[idx] - * idx += 1 - * if ch == '%' and self._requote and idx <= length - 2: # <<<<<<<<<<<<<< - * ch = _restore_ch(val[idx], val[idx + 1]) - * if ch != -1: - */ - } - - /* "yarl/_quoting_c.pyx":252 - * ch = '%' - * - * if self._write(writer, ch) < 0: # <<<<<<<<<<<<<< - * raise - * - */ - __pyx_t_2 = ((__pyx_f_4yarl_10_quoting_c_7_Quoter__write(__pyx_v_self, __pyx_v_writer, __pyx_v_ch) < 0) != 0); - if (unlikely(__pyx_t_2)) { - - /* "yarl/_quoting_c.pyx":253 - * - * if self._write(writer, ch) < 0: - * raise # <<<<<<<<<<<<<< - * - * if not writer.changed: - */ - __Pyx_ReraiseException(); __PYX_ERR(0, 253, __pyx_L1_error) - - /* "yarl/_quoting_c.pyx":252 - * ch = '%' - * - * if self._write(writer, ch) < 0: # <<<<<<<<<<<<<< - * raise - * - */ - } - __pyx_L3_continue:; - } - - /* "yarl/_quoting_c.pyx":255 - * raise - * - * if not writer.changed: # <<<<<<<<<<<<<< - * return val - * else: - */ - __pyx_t_2 = ((!(__pyx_v_writer->changed != 0)) != 0); - if (__pyx_t_2) { - - /* "yarl/_quoting_c.pyx":256 - * - * if not writer.changed: - * return val # <<<<<<<<<<<<<< - * else: - * return PyUnicode_DecodeASCII(writer.buf, writer.pos, "strict") - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v_val); - __pyx_r = __pyx_v_val; - goto __pyx_L0; - - /* "yarl/_quoting_c.pyx":255 - * raise - * - * if not writer.changed: # <<<<<<<<<<<<<< - * return val - * else: - */ - } - - /* "yarl/_quoting_c.pyx":258 - * return val - * else: - * return PyUnicode_DecodeASCII(writer.buf, writer.pos, "strict") # <<<<<<<<<<<<<< - * - * cdef inline int _write(self, Writer *writer, Py_UCS4 ch): - */ - /*else*/ { - __Pyx_XDECREF(__pyx_r); - __pyx_t_9 = PyUnicode_DecodeASCII(__pyx_v_writer->buf, __pyx_v_writer->pos, ((char *)"strict")); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 258, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_9); - __pyx_r = ((PyObject*)__pyx_t_9); - __pyx_t_9 = 0; - goto __pyx_L0; - } - - /* "yarl/_quoting_c.pyx":220 - * _release_writer(&writer) - * - * cdef str _do_quote(self, str val, Writer *writer): # <<<<<<<<<<<<<< - * cdef Py_UCS4 ch - * cdef int changed - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_9); - __Pyx_AddTraceback("yarl._quoting_c._Quoter._do_quote", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "yarl/_quoting_c.pyx":260 - * return PyUnicode_DecodeASCII(writer.buf, writer.pos, "strict") - * - * cdef inline int _write(self, Writer *writer, Py_UCS4 ch): # <<<<<<<<<<<<<< - * if self._qs: - * if ch == ' ': - */ - -static CYTHON_INLINE int __pyx_f_4yarl_10_quoting_c_7_Quoter__write(struct __pyx_obj_4yarl_10_quoting_c__Quoter *__pyx_v_self, struct __pyx_t_4yarl_10_quoting_c_Writer *__pyx_v_writer, Py_UCS4 __pyx_v_ch) { - int __pyx_r; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - int __pyx_t_2; - __Pyx_RefNannySetupContext("_write", 0); - - /* "yarl/_quoting_c.pyx":261 - * - * cdef inline int _write(self, Writer *writer, Py_UCS4 ch): - * if self._qs: # <<<<<<<<<<<<<< - * if ch == ' ': - * return _write_char(writer, '+', True) - */ - __pyx_t_1 = (__pyx_v_self->_qs != 0); - if (__pyx_t_1) { - - /* "yarl/_quoting_c.pyx":262 - * cdef inline int _write(self, Writer *writer, Py_UCS4 ch): - * if self._qs: - * if ch == ' ': # <<<<<<<<<<<<<< - * return _write_char(writer, '+', True) - * - */ - __pyx_t_1 = ((__pyx_v_ch == 32) != 0); - if (__pyx_t_1) { - - /* "yarl/_quoting_c.pyx":263 - * if self._qs: - * if ch == ' ': - * return _write_char(writer, '+', True) # <<<<<<<<<<<<<< - * - * if ch < 128 and bit_at(self._safe_table, ch): - */ - __pyx_r = __pyx_f_4yarl_10_quoting_c__write_char(__pyx_v_writer, 43, 1); - goto __pyx_L0; - - /* "yarl/_quoting_c.pyx":262 - * cdef inline int _write(self, Writer *writer, Py_UCS4 ch): - * if self._qs: - * if ch == ' ': # <<<<<<<<<<<<<< - * return _write_char(writer, '+', True) - * - */ - } - - /* "yarl/_quoting_c.pyx":261 - * - * cdef inline int _write(self, Writer *writer, Py_UCS4 ch): - * if self._qs: # <<<<<<<<<<<<<< - * if ch == ' ': - * return _write_char(writer, '+', True) - */ - } - - /* "yarl/_quoting_c.pyx":265 - * return _write_char(writer, '+', True) - * - * if ch < 128 and bit_at(self._safe_table, ch): # <<<<<<<<<<<<<< - * return _write_char(writer, ch, False) - * - */ - __pyx_t_2 = ((__pyx_v_ch < 0x80) != 0); - if (__pyx_t_2) { - } else { - __pyx_t_1 = __pyx_t_2; - goto __pyx_L6_bool_binop_done; - } - __pyx_t_2 = (__pyx_f_4yarl_10_quoting_c_bit_at(__pyx_v_self->_safe_table, __pyx_v_ch) != 0); - __pyx_t_1 = __pyx_t_2; - __pyx_L6_bool_binop_done:; - if (__pyx_t_1) { - - /* "yarl/_quoting_c.pyx":266 - * - * if ch < 128 and bit_at(self._safe_table, ch): - * return _write_char(writer, ch, False) # <<<<<<<<<<<<<< - * - * return _write_utf8(writer, ch) - */ - __pyx_r = __pyx_f_4yarl_10_quoting_c__write_char(__pyx_v_writer, __pyx_v_ch, 0); - goto __pyx_L0; - - /* "yarl/_quoting_c.pyx":265 - * return _write_char(writer, '+', True) - * - * if ch < 128 and bit_at(self._safe_table, ch): # <<<<<<<<<<<<<< - * return _write_char(writer, ch, False) - * - */ - } - - /* "yarl/_quoting_c.pyx":268 - * return _write_char(writer, ch, False) - * - * return _write_utf8(writer, ch) # <<<<<<<<<<<<<< - * - * - */ - __pyx_r = __pyx_f_4yarl_10_quoting_c__write_utf8(__pyx_v_writer, __pyx_v_ch); - goto __pyx_L0; - - /* "yarl/_quoting_c.pyx":260 - * return PyUnicode_DecodeASCII(writer.buf, writer.pos, "strict") - * - * cdef inline int _write(self, Writer *writer, Py_UCS4 ch): # <<<<<<<<<<<<<< - * if self._qs: - * if ch == ' ': - */ - - /* function exit code */ - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "(tree fragment)":1 - * def __reduce_cython__(self): # <<<<<<<<<<<<<< - * cdef tuple state - * cdef object _dict - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_4yarl_10_quoting_c_7_Quoter_5__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ -static PyObject *__pyx_pw_4yarl_10_quoting_c_7_Quoter_5__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0); - __pyx_r = __pyx_pf_4yarl_10_quoting_c_7_Quoter_4__reduce_cython__(((struct __pyx_obj_4yarl_10_quoting_c__Quoter *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_4yarl_10_quoting_c_7_Quoter_4__reduce_cython__(struct __pyx_obj_4yarl_10_quoting_c__Quoter *__pyx_v_self) { - PyObject *__pyx_v_state = 0; - PyObject *__pyx_v__dict = 0; - int __pyx_v_use_setstate; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - PyObject *__pyx_t_5 = NULL; - int __pyx_t_6; - int __pyx_t_7; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__reduce_cython__", 0); - - /* "(tree fragment)":5 - * cdef object _dict - * cdef bint use_setstate - * state = (self._protected_table, self._qs, self._requote, self._safe_table) # <<<<<<<<<<<<<< - * _dict = getattr(self, '__dict__', None) - * if _dict is not None: - */ - __pyx_t_1 = __Pyx_PyObject_FromCString(__pyx_v_self->_protected_table); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 5, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = __Pyx_PyBool_FromLong(__pyx_v_self->_qs); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 5, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = __Pyx_PyBool_FromLong(__pyx_v_self->_requote); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 5, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_4 = __Pyx_PyObject_FromCString(__pyx_v_self->_safe_table); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 5, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_5 = PyTuple_New(4); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 5, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_5); - __Pyx_GIVEREF(__pyx_t_1); - PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_1); - __Pyx_GIVEREF(__pyx_t_2); - PyTuple_SET_ITEM(__pyx_t_5, 1, __pyx_t_2); - __Pyx_GIVEREF(__pyx_t_3); - PyTuple_SET_ITEM(__pyx_t_5, 2, __pyx_t_3); - __Pyx_GIVEREF(__pyx_t_4); - PyTuple_SET_ITEM(__pyx_t_5, 3, __pyx_t_4); - __pyx_t_1 = 0; - __pyx_t_2 = 0; - __pyx_t_3 = 0; - __pyx_t_4 = 0; - __pyx_v_state = ((PyObject*)__pyx_t_5); - __pyx_t_5 = 0; - - /* "(tree fragment)":6 - * cdef bint use_setstate - * state = (self._protected_table, self._qs, self._requote, self._safe_table) - * _dict = getattr(self, '__dict__', None) # <<<<<<<<<<<<<< - * if _dict is not None: - * state += (_dict,) - */ - __pyx_t_5 = __Pyx_GetAttr3(((PyObject *)__pyx_v_self), __pyx_n_s_dict, Py_None); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 6, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_5); - __pyx_v__dict = __pyx_t_5; - __pyx_t_5 = 0; - - /* "(tree fragment)":7 - * state = (self._protected_table, self._qs, self._requote, self._safe_table) - * _dict = getattr(self, '__dict__', None) - * if _dict is not None: # <<<<<<<<<<<<<< - * state += (_dict,) - * use_setstate = True - */ - __pyx_t_6 = (__pyx_v__dict != Py_None); - __pyx_t_7 = (__pyx_t_6 != 0); - if (__pyx_t_7) { - - /* "(tree fragment)":8 - * _dict = getattr(self, '__dict__', None) - * if _dict is not None: - * state += (_dict,) # <<<<<<<<<<<<<< - * use_setstate = True - * else: - */ - __pyx_t_5 = PyTuple_New(1); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 8, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_5); - __Pyx_INCREF(__pyx_v__dict); - __Pyx_GIVEREF(__pyx_v__dict); - PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_v__dict); - __pyx_t_4 = PyNumber_InPlaceAdd(__pyx_v_state, __pyx_t_5); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 8, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - __Pyx_DECREF_SET(__pyx_v_state, ((PyObject*)__pyx_t_4)); - __pyx_t_4 = 0; - - /* "(tree fragment)":9 - * if _dict is not None: - * state += (_dict,) - * use_setstate = True # <<<<<<<<<<<<<< - * else: - * use_setstate = False - */ - __pyx_v_use_setstate = 1; - - /* "(tree fragment)":7 - * state = (self._protected_table, self._qs, self._requote, self._safe_table) - * _dict = getattr(self, '__dict__', None) - * if _dict is not None: # <<<<<<<<<<<<<< - * state += (_dict,) - * use_setstate = True - */ - goto __pyx_L3; - } - - /* "(tree fragment)":11 - * use_setstate = True - * else: - * use_setstate = False # <<<<<<<<<<<<<< - * if use_setstate: - * return __pyx_unpickle__Quoter, (type(self), 0xe91bd35, None), state - */ - /*else*/ { - __pyx_v_use_setstate = 0; - } - __pyx_L3:; - - /* "(tree fragment)":12 - * else: - * use_setstate = False - * if use_setstate: # <<<<<<<<<<<<<< - * return __pyx_unpickle__Quoter, (type(self), 0xe91bd35, None), state - * else: - */ - __pyx_t_7 = (__pyx_v_use_setstate != 0); - if (__pyx_t_7) { - - /* "(tree fragment)":13 - * use_setstate = False - * if use_setstate: - * return __pyx_unpickle__Quoter, (type(self), 0xe91bd35, None), state # <<<<<<<<<<<<<< - * else: - * return __pyx_unpickle__Quoter, (type(self), 0xe91bd35, state) - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_pyx_unpickle__Quoter); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_5 = PyTuple_New(3); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_5); - __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); - __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); - PyTuple_SET_ITEM(__pyx_t_5, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); - __Pyx_INCREF(__pyx_int_244432181); - __Pyx_GIVEREF(__pyx_int_244432181); - PyTuple_SET_ITEM(__pyx_t_5, 1, __pyx_int_244432181); - __Pyx_INCREF(Py_None); - __Pyx_GIVEREF(Py_None); - PyTuple_SET_ITEM(__pyx_t_5, 2, Py_None); - __pyx_t_3 = PyTuple_New(3); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_GIVEREF(__pyx_t_4); - PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_4); - __Pyx_GIVEREF(__pyx_t_5); - PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_t_5); - __Pyx_INCREF(__pyx_v_state); - __Pyx_GIVEREF(__pyx_v_state); - PyTuple_SET_ITEM(__pyx_t_3, 2, __pyx_v_state); - __pyx_t_4 = 0; - __pyx_t_5 = 0; - __pyx_r = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L0; - - /* "(tree fragment)":12 - * else: - * use_setstate = False - * if use_setstate: # <<<<<<<<<<<<<< - * return __pyx_unpickle__Quoter, (type(self), 0xe91bd35, None), state - * else: - */ - } - - /* "(tree fragment)":15 - * return __pyx_unpickle__Quoter, (type(self), 0xe91bd35, None), state - * else: - * return __pyx_unpickle__Quoter, (type(self), 0xe91bd35, state) # <<<<<<<<<<<<<< - * def __setstate_cython__(self, __pyx_state): - * __pyx_unpickle__Quoter__set_state(self, __pyx_state) - */ - /*else*/ { - __Pyx_XDECREF(__pyx_r); - __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_pyx_unpickle__Quoter); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_5 = PyTuple_New(3); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_5); - __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); - __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); - PyTuple_SET_ITEM(__pyx_t_5, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); - __Pyx_INCREF(__pyx_int_244432181); - __Pyx_GIVEREF(__pyx_int_244432181); - PyTuple_SET_ITEM(__pyx_t_5, 1, __pyx_int_244432181); - __Pyx_INCREF(__pyx_v_state); - __Pyx_GIVEREF(__pyx_v_state); - PyTuple_SET_ITEM(__pyx_t_5, 2, __pyx_v_state); - __pyx_t_4 = PyTuple_New(2); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_GIVEREF(__pyx_t_3); - PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_3); - __Pyx_GIVEREF(__pyx_t_5); - PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_5); - __pyx_t_3 = 0; - __pyx_t_5 = 0; - __pyx_r = __pyx_t_4; - __pyx_t_4 = 0; - goto __pyx_L0; - } - - /* "(tree fragment)":1 - * def __reduce_cython__(self): # <<<<<<<<<<<<<< - * cdef tuple state - * cdef object _dict - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_XDECREF(__pyx_t_5); - __Pyx_AddTraceback("yarl._quoting_c._Quoter.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v_state); - __Pyx_XDECREF(__pyx_v__dict); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "(tree fragment)":16 - * else: - * return __pyx_unpickle__Quoter, (type(self), 0xe91bd35, state) - * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< - * __pyx_unpickle__Quoter__set_state(self, __pyx_state) - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_4yarl_10_quoting_c_7_Quoter_7__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state); /*proto*/ -static PyObject *__pyx_pw_4yarl_10_quoting_c_7_Quoter_7__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0); - __pyx_r = __pyx_pf_4yarl_10_quoting_c_7_Quoter_6__setstate_cython__(((struct __pyx_obj_4yarl_10_quoting_c__Quoter *)__pyx_v_self), ((PyObject *)__pyx_v___pyx_state)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_4yarl_10_quoting_c_7_Quoter_6__setstate_cython__(struct __pyx_obj_4yarl_10_quoting_c__Quoter *__pyx_v_self, PyObject *__pyx_v___pyx_state) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__setstate_cython__", 0); - - /* "(tree fragment)":17 - * return __pyx_unpickle__Quoter, (type(self), 0xe91bd35, state) - * def __setstate_cython__(self, __pyx_state): - * __pyx_unpickle__Quoter__set_state(self, __pyx_state) # <<<<<<<<<<<<<< - */ - if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v___pyx_state)->tp_name), 0))) __PYX_ERR(1, 17, __pyx_L1_error) - __pyx_t_1 = __pyx_f_4yarl_10_quoting_c___pyx_unpickle__Quoter__set_state(__pyx_v_self, ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 17, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "(tree fragment)":16 - * else: - * return __pyx_unpickle__Quoter, (type(self), 0xe91bd35, state) - * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< - * __pyx_unpickle__Quoter__set_state(self, __pyx_state) - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("yarl._quoting_c._Quoter.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "yarl/_quoting_c.pyx":277 - * cdef _Quoter _qs_quoter - * - * def __init__(self, *, unsafe='', qs=False): # <<<<<<<<<<<<<< - * self._unsafe = unsafe - * self._qs = qs - */ - -/* Python wrapper */ -static int __pyx_pw_4yarl_10_quoting_c_9_Unquoter_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ -static int __pyx_pw_4yarl_10_quoting_c_9_Unquoter_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { - PyObject *__pyx_v_unsafe = 0; - PyObject *__pyx_v_qs = 0; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - int __pyx_r; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__init__ (wrapper)", 0); - { - static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_unsafe,&__pyx_n_s_qs,0}; - PyObject* values[2] = {0,0}; - values[0] = ((PyObject *)__pyx_kp_u_); - values[1] = ((PyObject *)Py_False); - if (unlikely(__pyx_kwds)) { - Py_ssize_t kw_args; - const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); - switch (pos_args) { - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = PyDict_Size(__pyx_kwds); - if (kw_args > 0 && likely(kw_args <= 2)) { - Py_ssize_t index; - for (index = 0; index < 2 && kw_args > 0; index++) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, *__pyx_pyargnames[index]); - if (value) { values[index] = value; kw_args--; } - } - } - if (unlikely(kw_args > 0)) { - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, 0, "__init__") < 0)) __PYX_ERR(0, 277, __pyx_L3_error) - } - } else if (PyTuple_GET_SIZE(__pyx_args) != 0) { - goto __pyx_L5_argtuple_error; - } else { - } - __pyx_v_unsafe = values[0]; - __pyx_v_qs = values[1]; - } - goto __pyx_L4_argument_unpacking_done; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("__init__", 1, 0, 0, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 277, __pyx_L3_error) - __pyx_L3_error:; - __Pyx_AddTraceback("yarl._quoting_c._Unquoter.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return -1; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_4yarl_10_quoting_c_9_Unquoter___init__(((struct __pyx_obj_4yarl_10_quoting_c__Unquoter *)__pyx_v_self), __pyx_v_unsafe, __pyx_v_qs); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static int __pyx_pf_4yarl_10_quoting_c_9_Unquoter___init__(struct __pyx_obj_4yarl_10_quoting_c__Unquoter *__pyx_v_self, PyObject *__pyx_v_unsafe, PyObject *__pyx_v_qs) { - int __pyx_r; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__init__", 0); - - /* "yarl/_quoting_c.pyx":278 - * - * def __init__(self, *, unsafe='', qs=False): - * self._unsafe = unsafe # <<<<<<<<<<<<<< - * self._qs = qs - * self._quoter = _Quoter() - */ - if (!(likely(PyUnicode_CheckExact(__pyx_v_unsafe))||((__pyx_v_unsafe) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "unicode", Py_TYPE(__pyx_v_unsafe)->tp_name), 0))) __PYX_ERR(0, 278, __pyx_L1_error) - __pyx_t_1 = __pyx_v_unsafe; - __Pyx_INCREF(__pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_v_self->_unsafe); - __Pyx_DECREF(__pyx_v_self->_unsafe); - __pyx_v_self->_unsafe = ((PyObject*)__pyx_t_1); - __pyx_t_1 = 0; - - /* "yarl/_quoting_c.pyx":279 - * def __init__(self, *, unsafe='', qs=False): - * self._unsafe = unsafe - * self._qs = qs # <<<<<<<<<<<<<< - * self._quoter = _Quoter() - * self._qs_quoter = _Quoter(qs=True) - */ - __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_v_qs); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 279, __pyx_L1_error) - __pyx_v_self->_qs = __pyx_t_2; - - /* "yarl/_quoting_c.pyx":280 - * self._unsafe = unsafe - * self._qs = qs - * self._quoter = _Quoter() # <<<<<<<<<<<<<< - * self._qs_quoter = _Quoter(qs=True) - * - */ - __pyx_t_1 = __Pyx_PyObject_CallNoArg(((PyObject *)__pyx_ptype_4yarl_10_quoting_c__Quoter)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 280, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_v_self->_quoter); - __Pyx_DECREF(((PyObject *)__pyx_v_self->_quoter)); - __pyx_v_self->_quoter = ((struct __pyx_obj_4yarl_10_quoting_c__Quoter *)__pyx_t_1); - __pyx_t_1 = 0; - - /* "yarl/_quoting_c.pyx":281 - * self._qs = qs - * self._quoter = _Quoter() - * self._qs_quoter = _Quoter(qs=True) # <<<<<<<<<<<<<< - * - * def __call__(self, val): - */ - __pyx_t_1 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 281, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (PyDict_SetItem(__pyx_t_1, __pyx_n_s_qs, Py_True) < 0) __PYX_ERR(0, 281, __pyx_L1_error) - __pyx_t_3 = __Pyx_PyObject_Call(((PyObject *)__pyx_ptype_4yarl_10_quoting_c__Quoter), __pyx_empty_tuple, __pyx_t_1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 281, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_GIVEREF(__pyx_t_3); - __Pyx_GOTREF(__pyx_v_self->_qs_quoter); - __Pyx_DECREF(((PyObject *)__pyx_v_self->_qs_quoter)); - __pyx_v_self->_qs_quoter = ((struct __pyx_obj_4yarl_10_quoting_c__Quoter *)__pyx_t_3); - __pyx_t_3 = 0; - - /* "yarl/_quoting_c.pyx":277 - * cdef _Quoter _qs_quoter - * - * def __init__(self, *, unsafe='', qs=False): # <<<<<<<<<<<<<< - * self._unsafe = unsafe - * self._qs = qs - */ - - /* function exit code */ - __pyx_r = 0; - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("yarl._quoting_c._Unquoter.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = -1; - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "yarl/_quoting_c.pyx":283 - * self._qs_quoter = _Quoter(qs=True) - * - * def __call__(self, val): # <<<<<<<<<<<<<< - * if val is None: - * return None - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_4yarl_10_quoting_c_9_Unquoter_3__call__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ -static PyObject *__pyx_pw_4yarl_10_quoting_c_9_Unquoter_3__call__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { - PyObject *__pyx_v_val = 0; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__call__ (wrapper)", 0); - { - static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_val,0}; - PyObject* values[1] = {0}; - if (unlikely(__pyx_kwds)) { - Py_ssize_t kw_args; - const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); - switch (pos_args) { - case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = PyDict_Size(__pyx_kwds); - switch (pos_args) { - case 0: - if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_val)) != 0)) kw_args--; - else goto __pyx_L5_argtuple_error; - } - if (unlikely(kw_args > 0)) { - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__call__") < 0)) __PYX_ERR(0, 283, __pyx_L3_error) - } - } else if (PyTuple_GET_SIZE(__pyx_args) != 1) { - goto __pyx_L5_argtuple_error; - } else { - values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - } - __pyx_v_val = values[0]; - } - goto __pyx_L4_argument_unpacking_done; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("__call__", 1, 1, 1, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 283, __pyx_L3_error) - __pyx_L3_error:; - __Pyx_AddTraceback("yarl._quoting_c._Unquoter.__call__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_4yarl_10_quoting_c_9_Unquoter_2__call__(((struct __pyx_obj_4yarl_10_quoting_c__Unquoter *)__pyx_v_self), __pyx_v_val); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_4yarl_10_quoting_c_9_Unquoter_2__call__(struct __pyx_obj_4yarl_10_quoting_c__Unquoter *__pyx_v_self, PyObject *__pyx_v_val) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__call__", 0); - __Pyx_INCREF(__pyx_v_val); - - /* "yarl/_quoting_c.pyx":284 - * - * def __call__(self, val): - * if val is None: # <<<<<<<<<<<<<< - * return None - * if type(val) is not str: - */ - __pyx_t_1 = (__pyx_v_val == Py_None); - __pyx_t_2 = (__pyx_t_1 != 0); - if (__pyx_t_2) { - - /* "yarl/_quoting_c.pyx":285 - * def __call__(self, val): - * if val is None: - * return None # <<<<<<<<<<<<<< - * if type(val) is not str: - * if isinstance(val, str): - */ - __Pyx_XDECREF(__pyx_r); - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - - /* "yarl/_quoting_c.pyx":284 - * - * def __call__(self, val): - * if val is None: # <<<<<<<<<<<<<< - * return None - * if type(val) is not str: - */ - } - - /* "yarl/_quoting_c.pyx":286 - * if val is None: - * return None - * if type(val) is not str: # <<<<<<<<<<<<<< - * if isinstance(val, str): - * # derived from str - */ - __pyx_t_2 = (((PyObject *)Py_TYPE(__pyx_v_val)) != ((PyObject *)(&PyUnicode_Type))); - __pyx_t_1 = (__pyx_t_2 != 0); - if (__pyx_t_1) { - - /* "yarl/_quoting_c.pyx":287 - * return None - * if type(val) is not str: - * if isinstance(val, str): # <<<<<<<<<<<<<< - * # derived from str - * val = str(val) - */ - __pyx_t_1 = PyUnicode_Check(__pyx_v_val); - __pyx_t_2 = (__pyx_t_1 != 0); - if (likely(__pyx_t_2)) { - - /* "yarl/_quoting_c.pyx":289 - * if isinstance(val, str): - * # derived from str - * val = str(val) # <<<<<<<<<<<<<< - * else: - * raise TypeError("Argument should be str") - */ - __pyx_t_3 = __Pyx_PyObject_CallOneArg(((PyObject *)(&PyUnicode_Type)), __pyx_v_val); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 289, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF_SET(__pyx_v_val, __pyx_t_3); - __pyx_t_3 = 0; - - /* "yarl/_quoting_c.pyx":287 - * return None - * if type(val) is not str: - * if isinstance(val, str): # <<<<<<<<<<<<<< - * # derived from str - * val = str(val) - */ - goto __pyx_L5; - } - - /* "yarl/_quoting_c.pyx":291 - * val = str(val) - * else: - * raise TypeError("Argument should be str") # <<<<<<<<<<<<<< - * return self._do_unquote(val) - * - */ - /*else*/ { - __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__3, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 291, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_Raise(__pyx_t_3, 0, 0, 0); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __PYX_ERR(0, 291, __pyx_L1_error) - } - __pyx_L5:; - - /* "yarl/_quoting_c.pyx":286 - * if val is None: - * return None - * if type(val) is not str: # <<<<<<<<<<<<<< - * if isinstance(val, str): - * # derived from str - */ - } - - /* "yarl/_quoting_c.pyx":292 - * else: - * raise TypeError("Argument should be str") - * return self._do_unquote(val) # <<<<<<<<<<<<<< - * - * cdef str _do_unquote(self, str val): - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_3 = ((struct __pyx_vtabstruct_4yarl_10_quoting_c__Unquoter *)__pyx_v_self->__pyx_vtab)->_do_unquote(__pyx_v_self, ((PyObject*)__pyx_v_val)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 292, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_r = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L0; - - /* "yarl/_quoting_c.pyx":283 - * self._qs_quoter = _Quoter(qs=True) - * - * def __call__(self, val): # <<<<<<<<<<<<<< - * if val is None: - * return None - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("yarl._quoting_c._Unquoter.__call__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v_val); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "yarl/_quoting_c.pyx":294 - * return self._do_unquote(val) - * - * cdef str _do_unquote(self, str val): # <<<<<<<<<<<<<< - * if len(val) == 0: - * return val - */ - -static PyObject *__pyx_f_4yarl_10_quoting_c_9_Unquoter__do_unquote(struct __pyx_obj_4yarl_10_quoting_c__Unquoter *__pyx_v_self, PyObject *__pyx_v_val) { - PyObject *__pyx_v_ret = 0; - char __pyx_v_buffer[4]; - Py_ssize_t __pyx_v_buflen; - Py_ssize_t __pyx_v_consumed; - PyObject *__pyx_v_unquoted = 0; - Py_UCS4 __pyx_v_ch; - Py_ssize_t __pyx_v_idx; - Py_ssize_t __pyx_v_length; - Py_ssize_t __pyx_v_start_pct; - PyObject *__pyx_v_h = NULL; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - Py_ssize_t __pyx_t_1; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - Py_UCS4 __pyx_t_4; - int __pyx_t_5; - Py_UCS4 __pyx_t_6; - PyObject *__pyx_t_7 = NULL; - PyObject *__pyx_t_8 = NULL; - PyObject *__pyx_t_9 = NULL; - int __pyx_t_10; - PyObject *__pyx_t_11 = NULL; - PyObject *__pyx_t_12 = NULL; - PyObject *__pyx_t_13 = NULL; - int __pyx_t_14; - PyObject *__pyx_t_15 = NULL; - PyObject *__pyx_t_16 = NULL; - PyObject *__pyx_t_17 = NULL; - PyObject *__pyx_t_18 = NULL; - PyObject *__pyx_t_19 = NULL; - PyObject *__pyx_t_20 = NULL; - int __pyx_t_21; - PyObject *(*__pyx_t_22)(PyObject *); - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("_do_unquote", 0); - - /* "yarl/_quoting_c.pyx":295 - * - * cdef str _do_unquote(self, str val): - * if len(val) == 0: # <<<<<<<<<<<<<< - * return val - * cdef list ret = [] - */ - if (unlikely(__pyx_v_val == Py_None)) { - PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); - __PYX_ERR(0, 295, __pyx_L1_error) - } - __pyx_t_1 = __Pyx_PyUnicode_GET_LENGTH(__pyx_v_val); if (unlikely(__pyx_t_1 == ((Py_ssize_t)-1))) __PYX_ERR(0, 295, __pyx_L1_error) - __pyx_t_2 = ((__pyx_t_1 == 0) != 0); - if (__pyx_t_2) { - - /* "yarl/_quoting_c.pyx":296 - * cdef str _do_unquote(self, str val): - * if len(val) == 0: - * return val # <<<<<<<<<<<<<< - * cdef list ret = [] - * cdef char buffer[4] - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v_val); - __pyx_r = __pyx_v_val; - goto __pyx_L0; - - /* "yarl/_quoting_c.pyx":295 - * - * cdef str _do_unquote(self, str val): - * if len(val) == 0: # <<<<<<<<<<<<<< - * return val - * cdef list ret = [] - */ - } - - /* "yarl/_quoting_c.pyx":297 - * if len(val) == 0: - * return val - * cdef list ret = [] # <<<<<<<<<<<<<< - * cdef char buffer[4] - * cdef Py_ssize_t buflen = 0 - */ - __pyx_t_3 = PyList_New(0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 297, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_v_ret = ((PyObject*)__pyx_t_3); - __pyx_t_3 = 0; - - /* "yarl/_quoting_c.pyx":299 - * cdef list ret = [] - * cdef char buffer[4] - * cdef Py_ssize_t buflen = 0 # <<<<<<<<<<<<<< - * cdef Py_ssize_t consumed - * cdef str unquoted - */ - __pyx_v_buflen = 0; - - /* "yarl/_quoting_c.pyx":302 - * cdef Py_ssize_t consumed - * cdef str unquoted - * cdef Py_UCS4 ch = 0 # <<<<<<<<<<<<<< - * cdef Py_ssize_t idx = 0 - * cdef Py_ssize_t length = len(val) - */ - __pyx_v_ch = 0; - - /* "yarl/_quoting_c.pyx":303 - * cdef str unquoted - * cdef Py_UCS4 ch = 0 - * cdef Py_ssize_t idx = 0 # <<<<<<<<<<<<<< - * cdef Py_ssize_t length = len(val) - * cdef Py_ssize_t start_pct - */ - __pyx_v_idx = 0; - - /* "yarl/_quoting_c.pyx":304 - * cdef Py_UCS4 ch = 0 - * cdef Py_ssize_t idx = 0 - * cdef Py_ssize_t length = len(val) # <<<<<<<<<<<<<< - * cdef Py_ssize_t start_pct - * - */ - if (unlikely(__pyx_v_val == Py_None)) { - PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); - __PYX_ERR(0, 304, __pyx_L1_error) - } - __pyx_t_1 = __Pyx_PyUnicode_GET_LENGTH(__pyx_v_val); if (unlikely(__pyx_t_1 == ((Py_ssize_t)-1))) __PYX_ERR(0, 304, __pyx_L1_error) - __pyx_v_length = __pyx_t_1; - - /* "yarl/_quoting_c.pyx":307 - * cdef Py_ssize_t start_pct - * - * while idx < length: # <<<<<<<<<<<<<< - * ch = val[idx] - * idx += 1 - */ - while (1) { - __pyx_t_2 = ((__pyx_v_idx < __pyx_v_length) != 0); - if (!__pyx_t_2) break; - - /* "yarl/_quoting_c.pyx":308 - * - * while idx < length: - * ch = val[idx] # <<<<<<<<<<<<<< - * idx += 1 - * if ch == '%' and idx <= length - 2: - */ - __pyx_t_4 = __Pyx_GetItemInt_Unicode(__pyx_v_val, __pyx_v_idx, Py_ssize_t, 1, PyInt_FromSsize_t, 0, 1, 1); if (unlikely(__pyx_t_4 == (Py_UCS4)-1)) __PYX_ERR(0, 308, __pyx_L1_error) - __pyx_v_ch = __pyx_t_4; - - /* "yarl/_quoting_c.pyx":309 - * while idx < length: - * ch = val[idx] - * idx += 1 # <<<<<<<<<<<<<< - * if ch == '%' and idx <= length - 2: - * ch = _restore_ch(val[idx], val[idx + 1]) - */ - __pyx_v_idx = (__pyx_v_idx + 1); - - /* "yarl/_quoting_c.pyx":310 - * ch = val[idx] - * idx += 1 - * if ch == '%' and idx <= length - 2: # <<<<<<<<<<<<<< - * ch = _restore_ch(val[idx], val[idx + 1]) - * if ch != -1: - */ - __pyx_t_5 = ((__pyx_v_ch == 37) != 0); - if (__pyx_t_5) { - } else { - __pyx_t_2 = __pyx_t_5; - goto __pyx_L7_bool_binop_done; - } - __pyx_t_5 = ((__pyx_v_idx <= (__pyx_v_length - 2)) != 0); - __pyx_t_2 = __pyx_t_5; - __pyx_L7_bool_binop_done:; - if (__pyx_t_2) { - - /* "yarl/_quoting_c.pyx":311 - * idx += 1 - * if ch == '%' and idx <= length - 2: - * ch = _restore_ch(val[idx], val[idx + 1]) # <<<<<<<<<<<<<< - * if ch != -1: - * idx += 2 - */ - __pyx_t_4 = __Pyx_GetItemInt_Unicode(__pyx_v_val, __pyx_v_idx, Py_ssize_t, 1, PyInt_FromSsize_t, 0, 1, 1); if (unlikely(__pyx_t_4 == (Py_UCS4)-1)) __PYX_ERR(0, 311, __pyx_L1_error) - __pyx_t_1 = (__pyx_v_idx + 1); - __pyx_t_6 = __Pyx_GetItemInt_Unicode(__pyx_v_val, __pyx_t_1, Py_ssize_t, 1, PyInt_FromSsize_t, 0, 1, 1); if (unlikely(__pyx_t_6 == (Py_UCS4)-1)) __PYX_ERR(0, 311, __pyx_L1_error) - __pyx_v_ch = __pyx_f_4yarl_10_quoting_c__restore_ch(__pyx_t_4, __pyx_t_6); - - /* "yarl/_quoting_c.pyx":312 - * if ch == '%' and idx <= length - 2: - * ch = _restore_ch(val[idx], val[idx + 1]) - * if ch != -1: # <<<<<<<<<<<<<< - * idx += 2 - * assert buflen < 4 - */ - __pyx_t_2 = ((__pyx_v_ch != ((Py_UCS4)-1L)) != 0); - if (__pyx_t_2) { - - /* "yarl/_quoting_c.pyx":313 - * ch = _restore_ch(val[idx], val[idx + 1]) - * if ch != -1: - * idx += 2 # <<<<<<<<<<<<<< - * assert buflen < 4 - * buffer[buflen] = ch - */ - __pyx_v_idx = (__pyx_v_idx + 2); - - /* "yarl/_quoting_c.pyx":314 - * if ch != -1: - * idx += 2 - * assert buflen < 4 # <<<<<<<<<<<<<< - * buffer[buflen] = ch - * buflen += 1 - */ - #ifndef CYTHON_WITHOUT_ASSERTIONS - if (unlikely(!Py_OptimizeFlag)) { - if (unlikely(!((__pyx_v_buflen < 4) != 0))) { - PyErr_SetNone(PyExc_AssertionError); - __PYX_ERR(0, 314, __pyx_L1_error) - } - } - #endif - - /* "yarl/_quoting_c.pyx":315 - * idx += 2 - * assert buflen < 4 - * buffer[buflen] = ch # <<<<<<<<<<<<<< - * buflen += 1 - * try: - */ - (__pyx_v_buffer[__pyx_v_buflen]) = __pyx_v_ch; - - /* "yarl/_quoting_c.pyx":316 - * assert buflen < 4 - * buffer[buflen] = ch - * buflen += 1 # <<<<<<<<<<<<<< - * try: - * unquoted = PyUnicode_DecodeUTF8Stateful(buffer, buflen, - */ - __pyx_v_buflen = (__pyx_v_buflen + 1); - - /* "yarl/_quoting_c.pyx":317 - * buffer[buflen] = ch - * buflen += 1 - * try: # <<<<<<<<<<<<<< - * unquoted = PyUnicode_DecodeUTF8Stateful(buffer, buflen, - * NULL, &consumed) - */ - { - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - __Pyx_ExceptionSave(&__pyx_t_7, &__pyx_t_8, &__pyx_t_9); - __Pyx_XGOTREF(__pyx_t_7); - __Pyx_XGOTREF(__pyx_t_8); - __Pyx_XGOTREF(__pyx_t_9); - /*try:*/ { - - /* "yarl/_quoting_c.pyx":318 - * buflen += 1 - * try: - * unquoted = PyUnicode_DecodeUTF8Stateful(buffer, buflen, # <<<<<<<<<<<<<< - * NULL, &consumed) - * except UnicodeDecodeError: - */ - __pyx_t_3 = PyUnicode_DecodeUTF8Stateful(__pyx_v_buffer, __pyx_v_buflen, NULL, (&__pyx_v_consumed)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 318, __pyx_L10_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_XDECREF_SET(__pyx_v_unquoted, ((PyObject*)__pyx_t_3)); - __pyx_t_3 = 0; - - /* "yarl/_quoting_c.pyx":317 - * buffer[buflen] = ch - * buflen += 1 - * try: # <<<<<<<<<<<<<< - * unquoted = PyUnicode_DecodeUTF8Stateful(buffer, buflen, - * NULL, &consumed) - */ - } - __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; - __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; - __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; - goto __pyx_L17_try_end; - __pyx_L10_error:; - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "yarl/_quoting_c.pyx":320 - * unquoted = PyUnicode_DecodeUTF8Stateful(buffer, buflen, - * NULL, &consumed) - * except UnicodeDecodeError: # <<<<<<<<<<<<<< - * start_pct = idx - buflen * 3 - * buffer[0] = ch - */ - __pyx_t_10 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_UnicodeDecodeError); - if (__pyx_t_10) { - __Pyx_AddTraceback("yarl._quoting_c._Unquoter._do_unquote", __pyx_clineno, __pyx_lineno, __pyx_filename); - if (__Pyx_GetException(&__pyx_t_3, &__pyx_t_11, &__pyx_t_12) < 0) __PYX_ERR(0, 320, __pyx_L12_except_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_GOTREF(__pyx_t_11); - __Pyx_GOTREF(__pyx_t_12); - - /* "yarl/_quoting_c.pyx":321 - * NULL, &consumed) - * except UnicodeDecodeError: - * start_pct = idx - buflen * 3 # <<<<<<<<<<<<<< - * buffer[0] = ch - * buflen = 1 - */ - __pyx_v_start_pct = (__pyx_v_idx - (__pyx_v_buflen * 3)); - - /* "yarl/_quoting_c.pyx":322 - * except UnicodeDecodeError: - * start_pct = idx - buflen * 3 - * buffer[0] = ch # <<<<<<<<<<<<<< - * buflen = 1 - * ret.append(val[start_pct : idx - 3]) - */ - (__pyx_v_buffer[0]) = __pyx_v_ch; - - /* "yarl/_quoting_c.pyx":323 - * start_pct = idx - buflen * 3 - * buffer[0] = ch - * buflen = 1 # <<<<<<<<<<<<<< - * ret.append(val[start_pct : idx - 3]) - * try: - */ - __pyx_v_buflen = 1; - - /* "yarl/_quoting_c.pyx":324 - * buffer[0] = ch - * buflen = 1 - * ret.append(val[start_pct : idx - 3]) # <<<<<<<<<<<<<< - * try: - * unquoted = PyUnicode_DecodeUTF8Stateful(buffer, buflen, - */ - if (unlikely(__pyx_v_val == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(0, 324, __pyx_L12_except_error) - } - __pyx_t_13 = __Pyx_PyUnicode_Substring(__pyx_v_val, __pyx_v_start_pct, (__pyx_v_idx - 3)); if (unlikely(!__pyx_t_13)) __PYX_ERR(0, 324, __pyx_L12_except_error) - __Pyx_GOTREF(__pyx_t_13); - __pyx_t_14 = __Pyx_PyList_Append(__pyx_v_ret, __pyx_t_13); if (unlikely(__pyx_t_14 == ((int)-1))) __PYX_ERR(0, 324, __pyx_L12_except_error) - __Pyx_DECREF(__pyx_t_13); __pyx_t_13 = 0; - - /* "yarl/_quoting_c.pyx":325 - * buflen = 1 - * ret.append(val[start_pct : idx - 3]) - * try: # <<<<<<<<<<<<<< - * unquoted = PyUnicode_DecodeUTF8Stateful(buffer, buflen, - * NULL, &consumed) - */ - { - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - __Pyx_ExceptionSave(&__pyx_t_15, &__pyx_t_16, &__pyx_t_17); - __Pyx_XGOTREF(__pyx_t_15); - __Pyx_XGOTREF(__pyx_t_16); - __Pyx_XGOTREF(__pyx_t_17); - /*try:*/ { - - /* "yarl/_quoting_c.pyx":326 - * ret.append(val[start_pct : idx - 3]) - * try: - * unquoted = PyUnicode_DecodeUTF8Stateful(buffer, buflen, # <<<<<<<<<<<<<< - * NULL, &consumed) - * except UnicodeDecodeError: - */ - __pyx_t_13 = PyUnicode_DecodeUTF8Stateful(__pyx_v_buffer, __pyx_v_buflen, NULL, (&__pyx_v_consumed)); if (unlikely(!__pyx_t_13)) __PYX_ERR(0, 326, __pyx_L20_error) - __Pyx_GOTREF(__pyx_t_13); - __Pyx_XDECREF_SET(__pyx_v_unquoted, ((PyObject*)__pyx_t_13)); - __pyx_t_13 = 0; - - /* "yarl/_quoting_c.pyx":325 - * buflen = 1 - * ret.append(val[start_pct : idx - 3]) - * try: # <<<<<<<<<<<<<< - * unquoted = PyUnicode_DecodeUTF8Stateful(buffer, buflen, - * NULL, &consumed) - */ - } - __Pyx_XDECREF(__pyx_t_15); __pyx_t_15 = 0; - __Pyx_XDECREF(__pyx_t_16); __pyx_t_16 = 0; - __Pyx_XDECREF(__pyx_t_17); __pyx_t_17 = 0; - goto __pyx_L27_try_end; - __pyx_L20_error:; - __Pyx_XDECREF(__pyx_t_13); __pyx_t_13 = 0; - - /* "yarl/_quoting_c.pyx":328 - * unquoted = PyUnicode_DecodeUTF8Stateful(buffer, buflen, - * NULL, &consumed) - * except UnicodeDecodeError: # <<<<<<<<<<<<<< - * buflen = 0 - * ret.append(val[idx - 3 : idx]) - */ - __pyx_t_10 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_UnicodeDecodeError); - if (__pyx_t_10) { - __Pyx_AddTraceback("yarl._quoting_c._Unquoter._do_unquote", __pyx_clineno, __pyx_lineno, __pyx_filename); - if (__Pyx_GetException(&__pyx_t_13, &__pyx_t_18, &__pyx_t_19) < 0) __PYX_ERR(0, 328, __pyx_L22_except_error) - __Pyx_GOTREF(__pyx_t_13); - __Pyx_GOTREF(__pyx_t_18); - __Pyx_GOTREF(__pyx_t_19); - - /* "yarl/_quoting_c.pyx":329 - * NULL, &consumed) - * except UnicodeDecodeError: - * buflen = 0 # <<<<<<<<<<<<<< - * ret.append(val[idx - 3 : idx]) - * continue - */ - __pyx_v_buflen = 0; - - /* "yarl/_quoting_c.pyx":330 - * except UnicodeDecodeError: - * buflen = 0 - * ret.append(val[idx - 3 : idx]) # <<<<<<<<<<<<<< - * continue - * if not unquoted: - */ - if (unlikely(__pyx_v_val == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(0, 330, __pyx_L22_except_error) - } - __pyx_t_20 = __Pyx_PyUnicode_Substring(__pyx_v_val, (__pyx_v_idx - 3), __pyx_v_idx); if (unlikely(!__pyx_t_20)) __PYX_ERR(0, 330, __pyx_L22_except_error) - __Pyx_GOTREF(__pyx_t_20); - __pyx_t_14 = __Pyx_PyList_Append(__pyx_v_ret, __pyx_t_20); if (unlikely(__pyx_t_14 == ((int)-1))) __PYX_ERR(0, 330, __pyx_L22_except_error) - __Pyx_DECREF(__pyx_t_20); __pyx_t_20 = 0; - - /* "yarl/_quoting_c.pyx":331 - * buflen = 0 - * ret.append(val[idx - 3 : idx]) - * continue # <<<<<<<<<<<<<< - * if not unquoted: - * assert consumed == 0 - */ - goto __pyx_L29_except_continue; - __pyx_L29_except_continue:; - __Pyx_DECREF(__pyx_t_13); __pyx_t_13 = 0; - __Pyx_DECREF(__pyx_t_18); __pyx_t_18 = 0; - __Pyx_DECREF(__pyx_t_19); __pyx_t_19 = 0; - goto __pyx_L26_try_continue; - } - goto __pyx_L22_except_error; - __pyx_L22_except_error:; - - /* "yarl/_quoting_c.pyx":325 - * buflen = 1 - * ret.append(val[start_pct : idx - 3]) - * try: # <<<<<<<<<<<<<< - * unquoted = PyUnicode_DecodeUTF8Stateful(buffer, buflen, - * NULL, &consumed) - */ - __Pyx_XGIVEREF(__pyx_t_15); - __Pyx_XGIVEREF(__pyx_t_16); - __Pyx_XGIVEREF(__pyx_t_17); - __Pyx_ExceptionReset(__pyx_t_15, __pyx_t_16, __pyx_t_17); - goto __pyx_L12_except_error; - __pyx_L26_try_continue:; - __Pyx_XGIVEREF(__pyx_t_15); - __Pyx_XGIVEREF(__pyx_t_16); - __Pyx_XGIVEREF(__pyx_t_17); - __Pyx_ExceptionReset(__pyx_t_15, __pyx_t_16, __pyx_t_17); - goto __pyx_L19_except_continue; - __pyx_L27_try_end:; - } - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0; - __Pyx_XDECREF(__pyx_t_12); __pyx_t_12 = 0; - goto __pyx_L11_exception_handled; - __pyx_L19_except_continue:; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; - __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; - goto __pyx_L16_try_continue; - } - goto __pyx_L12_except_error; - __pyx_L12_except_error:; - - /* "yarl/_quoting_c.pyx":317 - * buffer[buflen] = ch - * buflen += 1 - * try: # <<<<<<<<<<<<<< - * unquoted = PyUnicode_DecodeUTF8Stateful(buffer, buflen, - * NULL, &consumed) - */ - __Pyx_XGIVEREF(__pyx_t_7); - __Pyx_XGIVEREF(__pyx_t_8); - __Pyx_XGIVEREF(__pyx_t_9); - __Pyx_ExceptionReset(__pyx_t_7, __pyx_t_8, __pyx_t_9); - goto __pyx_L1_error; - __pyx_L16_try_continue:; - __Pyx_XGIVEREF(__pyx_t_7); - __Pyx_XGIVEREF(__pyx_t_8); - __Pyx_XGIVEREF(__pyx_t_9); - __Pyx_ExceptionReset(__pyx_t_7, __pyx_t_8, __pyx_t_9); - goto __pyx_L4_continue; - __pyx_L11_exception_handled:; - __Pyx_XGIVEREF(__pyx_t_7); - __Pyx_XGIVEREF(__pyx_t_8); - __Pyx_XGIVEREF(__pyx_t_9); - __Pyx_ExceptionReset(__pyx_t_7, __pyx_t_8, __pyx_t_9); - __pyx_L17_try_end:; - } - - /* "yarl/_quoting_c.pyx":332 - * ret.append(val[idx - 3 : idx]) - * continue - * if not unquoted: # <<<<<<<<<<<<<< - * assert consumed == 0 - * continue - */ - __pyx_t_2 = (__pyx_v_unquoted != Py_None)&&(__Pyx_PyUnicode_IS_TRUE(__pyx_v_unquoted) != 0); - __pyx_t_5 = ((!__pyx_t_2) != 0); - if (__pyx_t_5) { - - /* "yarl/_quoting_c.pyx":333 - * continue - * if not unquoted: - * assert consumed == 0 # <<<<<<<<<<<<<< - * continue - * assert consumed == buflen - */ - #ifndef CYTHON_WITHOUT_ASSERTIONS - if (unlikely(!Py_OptimizeFlag)) { - if (unlikely(!((__pyx_v_consumed == 0) != 0))) { - PyErr_SetNone(PyExc_AssertionError); - __PYX_ERR(0, 333, __pyx_L1_error) - } - } - #endif - - /* "yarl/_quoting_c.pyx":334 - * if not unquoted: - * assert consumed == 0 - * continue # <<<<<<<<<<<<<< - * assert consumed == buflen - * buflen = 0 - */ - goto __pyx_L4_continue; - - /* "yarl/_quoting_c.pyx":332 - * ret.append(val[idx - 3 : idx]) - * continue - * if not unquoted: # <<<<<<<<<<<<<< - * assert consumed == 0 - * continue - */ - } - - /* "yarl/_quoting_c.pyx":335 - * assert consumed == 0 - * continue - * assert consumed == buflen # <<<<<<<<<<<<<< - * buflen = 0 - * if self._qs and unquoted in '+=&;': - */ - #ifndef CYTHON_WITHOUT_ASSERTIONS - if (unlikely(!Py_OptimizeFlag)) { - if (unlikely(!((__pyx_v_consumed == __pyx_v_buflen) != 0))) { - PyErr_SetNone(PyExc_AssertionError); - __PYX_ERR(0, 335, __pyx_L1_error) - } - } - #endif - - /* "yarl/_quoting_c.pyx":336 - * continue - * assert consumed == buflen - * buflen = 0 # <<<<<<<<<<<<<< - * if self._qs and unquoted in '+=&;': - * ret.append(self._qs_quoter(unquoted)) - */ - __pyx_v_buflen = 0; - - /* "yarl/_quoting_c.pyx":337 - * assert consumed == buflen - * buflen = 0 - * if self._qs and unquoted in '+=&;': # <<<<<<<<<<<<<< - * ret.append(self._qs_quoter(unquoted)) - * elif unquoted in self._unsafe: - */ - __pyx_t_2 = (__pyx_v_self->_qs != 0); - if (__pyx_t_2) { - } else { - __pyx_t_5 = __pyx_t_2; - goto __pyx_L32_bool_binop_done; - } - __pyx_t_2 = (__Pyx_PyUnicode_ContainsTF(__pyx_v_unquoted, __pyx_kp_u__4, Py_EQ)); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 337, __pyx_L1_error) - __pyx_t_21 = (__pyx_t_2 != 0); - __pyx_t_5 = __pyx_t_21; - __pyx_L32_bool_binop_done:; - if (__pyx_t_5) { - - /* "yarl/_quoting_c.pyx":338 - * buflen = 0 - * if self._qs and unquoted in '+=&;': - * ret.append(self._qs_quoter(unquoted)) # <<<<<<<<<<<<<< - * elif unquoted in self._unsafe: - * ret.append(self._quoter(unquoted)) - */ - __Pyx_INCREF(((PyObject *)__pyx_v_self->_qs_quoter)); - __pyx_t_11 = ((PyObject *)__pyx_v_self->_qs_quoter); __pyx_t_3 = NULL; - if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_11))) { - __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_11); - if (likely(__pyx_t_3)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_11); - __Pyx_INCREF(__pyx_t_3); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_11, function); - } - } - __pyx_t_12 = (__pyx_t_3) ? __Pyx_PyObject_Call2Args(__pyx_t_11, __pyx_t_3, __pyx_v_unquoted) : __Pyx_PyObject_CallOneArg(__pyx_t_11, __pyx_v_unquoted); - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 338, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_12); - __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; - __pyx_t_14 = __Pyx_PyList_Append(__pyx_v_ret, __pyx_t_12); if (unlikely(__pyx_t_14 == ((int)-1))) __PYX_ERR(0, 338, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; - - /* "yarl/_quoting_c.pyx":337 - * assert consumed == buflen - * buflen = 0 - * if self._qs and unquoted in '+=&;': # <<<<<<<<<<<<<< - * ret.append(self._qs_quoter(unquoted)) - * elif unquoted in self._unsafe: - */ - goto __pyx_L31; - } - - /* "yarl/_quoting_c.pyx":339 - * if self._qs and unquoted in '+=&;': - * ret.append(self._qs_quoter(unquoted)) - * elif unquoted in self._unsafe: # <<<<<<<<<<<<<< - * ret.append(self._quoter(unquoted)) - * else: - */ - if (unlikely(__pyx_v_self->_unsafe == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); - __PYX_ERR(0, 339, __pyx_L1_error) - } - __pyx_t_5 = (__Pyx_PyUnicode_ContainsTF(__pyx_v_unquoted, __pyx_v_self->_unsafe, Py_EQ)); if (unlikely(__pyx_t_5 < 0)) __PYX_ERR(0, 339, __pyx_L1_error) - __pyx_t_21 = (__pyx_t_5 != 0); - if (__pyx_t_21) { - - /* "yarl/_quoting_c.pyx":340 - * ret.append(self._qs_quoter(unquoted)) - * elif unquoted in self._unsafe: - * ret.append(self._quoter(unquoted)) # <<<<<<<<<<<<<< - * else: - * ret.append(unquoted) - */ - __Pyx_INCREF(((PyObject *)__pyx_v_self->_quoter)); - __pyx_t_11 = ((PyObject *)__pyx_v_self->_quoter); __pyx_t_3 = NULL; - if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_11))) { - __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_11); - if (likely(__pyx_t_3)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_11); - __Pyx_INCREF(__pyx_t_3); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_11, function); - } - } - __pyx_t_12 = (__pyx_t_3) ? __Pyx_PyObject_Call2Args(__pyx_t_11, __pyx_t_3, __pyx_v_unquoted) : __Pyx_PyObject_CallOneArg(__pyx_t_11, __pyx_v_unquoted); - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 340, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_12); - __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; - __pyx_t_14 = __Pyx_PyList_Append(__pyx_v_ret, __pyx_t_12); if (unlikely(__pyx_t_14 == ((int)-1))) __PYX_ERR(0, 340, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; - - /* "yarl/_quoting_c.pyx":339 - * if self._qs and unquoted in '+=&;': - * ret.append(self._qs_quoter(unquoted)) - * elif unquoted in self._unsafe: # <<<<<<<<<<<<<< - * ret.append(self._quoter(unquoted)) - * else: - */ - goto __pyx_L31; - } - - /* "yarl/_quoting_c.pyx":342 - * ret.append(self._quoter(unquoted)) - * else: - * ret.append(unquoted) # <<<<<<<<<<<<<< - * continue - * else: - */ - /*else*/ { - __pyx_t_14 = __Pyx_PyList_Append(__pyx_v_ret, __pyx_v_unquoted); if (unlikely(__pyx_t_14 == ((int)-1))) __PYX_ERR(0, 342, __pyx_L1_error) - } - __pyx_L31:; - - /* "yarl/_quoting_c.pyx":343 - * else: - * ret.append(unquoted) - * continue # <<<<<<<<<<<<<< - * else: - * ch = '%' - */ - goto __pyx_L4_continue; - - /* "yarl/_quoting_c.pyx":312 - * if ch == '%' and idx <= length - 2: - * ch = _restore_ch(val[idx], val[idx + 1]) - * if ch != -1: # <<<<<<<<<<<<<< - * idx += 2 - * assert buflen < 4 - */ - } - - /* "yarl/_quoting_c.pyx":345 - * continue - * else: - * ch = '%' # <<<<<<<<<<<<<< - * - * if buflen: - */ - /*else*/ { - __pyx_v_ch = 37; - } - - /* "yarl/_quoting_c.pyx":310 - * ch = val[idx] - * idx += 1 - * if ch == '%' and idx <= length - 2: # <<<<<<<<<<<<<< - * ch = _restore_ch(val[idx], val[idx + 1]) - * if ch != -1: - */ - } - - /* "yarl/_quoting_c.pyx":347 - * ch = '%' - * - * if buflen: # <<<<<<<<<<<<<< - * start_pct = idx - 1 - buflen * 3 - * ret.append(val[start_pct : idx - 1]) - */ - __pyx_t_21 = (__pyx_v_buflen != 0); - if (__pyx_t_21) { - - /* "yarl/_quoting_c.pyx":348 - * - * if buflen: - * start_pct = idx - 1 - buflen * 3 # <<<<<<<<<<<<<< - * ret.append(val[start_pct : idx - 1]) - * buflen = 0 - */ - __pyx_v_start_pct = ((__pyx_v_idx - 1) - (__pyx_v_buflen * 3)); - - /* "yarl/_quoting_c.pyx":349 - * if buflen: - * start_pct = idx - 1 - buflen * 3 - * ret.append(val[start_pct : idx - 1]) # <<<<<<<<<<<<<< - * buflen = 0 - * - */ - if (unlikely(__pyx_v_val == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(0, 349, __pyx_L1_error) - } - __pyx_t_12 = __Pyx_PyUnicode_Substring(__pyx_v_val, __pyx_v_start_pct, (__pyx_v_idx - 1)); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 349, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_12); - __pyx_t_14 = __Pyx_PyList_Append(__pyx_v_ret, __pyx_t_12); if (unlikely(__pyx_t_14 == ((int)-1))) __PYX_ERR(0, 349, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; - - /* "yarl/_quoting_c.pyx":350 - * start_pct = idx - 1 - buflen * 3 - * ret.append(val[start_pct : idx - 1]) - * buflen = 0 # <<<<<<<<<<<<<< - * - * if ch == '+': - */ - __pyx_v_buflen = 0; - - /* "yarl/_quoting_c.pyx":347 - * ch = '%' - * - * if buflen: # <<<<<<<<<<<<<< - * start_pct = idx - 1 - buflen * 3 - * ret.append(val[start_pct : idx - 1]) - */ - } - - /* "yarl/_quoting_c.pyx":352 - * buflen = 0 - * - * if ch == '+': # <<<<<<<<<<<<<< - * if not self._qs or ch in self._unsafe: - * ret.append('+') - */ - __pyx_t_21 = ((__pyx_v_ch == 43) != 0); - if (__pyx_t_21) { - - /* "yarl/_quoting_c.pyx":353 - * - * if ch == '+': - * if not self._qs or ch in self._unsafe: # <<<<<<<<<<<<<< - * ret.append('+') - * else: - */ - __pyx_t_5 = ((!(__pyx_v_self->_qs != 0)) != 0); - if (!__pyx_t_5) { - } else { - __pyx_t_21 = __pyx_t_5; - goto __pyx_L37_bool_binop_done; - } - if (unlikely(__pyx_v_self->_unsafe == Py_None)) { - PyErr_SetString(PyExc_TypeError, "argument of type 'NoneType' is not iterable"); - __PYX_ERR(0, 353, __pyx_L1_error) - } - __pyx_t_5 = ((__Pyx_UnicodeContainsUCS4(__pyx_v_self->_unsafe, __pyx_v_ch)) != 0); - __pyx_t_21 = __pyx_t_5; - __pyx_L37_bool_binop_done:; - if (__pyx_t_21) { - - /* "yarl/_quoting_c.pyx":354 - * if ch == '+': - * if not self._qs or ch in self._unsafe: - * ret.append('+') # <<<<<<<<<<<<<< - * else: - * ret.append(' ') - */ - __pyx_t_14 = __Pyx_PyList_Append(__pyx_v_ret, __pyx_kp_u__5); if (unlikely(__pyx_t_14 == ((int)-1))) __PYX_ERR(0, 354, __pyx_L1_error) - - /* "yarl/_quoting_c.pyx":353 - * - * if ch == '+': - * if not self._qs or ch in self._unsafe: # <<<<<<<<<<<<<< - * ret.append('+') - * else: - */ - goto __pyx_L36; - } - - /* "yarl/_quoting_c.pyx":356 - * ret.append('+') - * else: - * ret.append(' ') # <<<<<<<<<<<<<< - * continue - * - */ - /*else*/ { - __pyx_t_14 = __Pyx_PyList_Append(__pyx_v_ret, __pyx_kp_u__6); if (unlikely(__pyx_t_14 == ((int)-1))) __PYX_ERR(0, 356, __pyx_L1_error) - } - __pyx_L36:; - - /* "yarl/_quoting_c.pyx":357 - * else: - * ret.append(' ') - * continue # <<<<<<<<<<<<<< - * - * if ch in self._unsafe: - */ - goto __pyx_L4_continue; - - /* "yarl/_quoting_c.pyx":352 - * buflen = 0 - * - * if ch == '+': # <<<<<<<<<<<<<< - * if not self._qs or ch in self._unsafe: - * ret.append('+') - */ - } - - /* "yarl/_quoting_c.pyx":359 - * continue - * - * if ch in self._unsafe: # <<<<<<<<<<<<<< - * ret.append('%') - * h = hex(ord(ch)).upper()[2:] - */ - if (unlikely(__pyx_v_self->_unsafe == Py_None)) { - PyErr_SetString(PyExc_TypeError, "argument of type 'NoneType' is not iterable"); - __PYX_ERR(0, 359, __pyx_L1_error) - } - __pyx_t_21 = ((__Pyx_UnicodeContainsUCS4(__pyx_v_self->_unsafe, __pyx_v_ch)) != 0); - if (__pyx_t_21) { - - /* "yarl/_quoting_c.pyx":360 - * - * if ch in self._unsafe: - * ret.append('%') # <<<<<<<<<<<<<< - * h = hex(ord(ch)).upper()[2:] - * for ch in h: - */ - __pyx_t_14 = __Pyx_PyList_Append(__pyx_v_ret, __pyx_kp_u__7); if (unlikely(__pyx_t_14 == ((int)-1))) __PYX_ERR(0, 360, __pyx_L1_error) - - /* "yarl/_quoting_c.pyx":361 - * if ch in self._unsafe: - * ret.append('%') - * h = hex(ord(ch)).upper()[2:] # <<<<<<<<<<<<<< - * for ch in h: - * ret.append(ch) - */ - __pyx_t_11 = __Pyx_PyInt_From_long(((long)__pyx_v_ch)); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 361, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_11); - __pyx_t_3 = __Pyx_PyObject_CallOneArg(__pyx_builtin_hex, __pyx_t_11); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 361, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; - __pyx_t_11 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_upper); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 361, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_11); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = NULL; - if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_11))) { - __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_11); - if (likely(__pyx_t_3)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_11); - __Pyx_INCREF(__pyx_t_3); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_11, function); - } - } - __pyx_t_12 = (__pyx_t_3) ? __Pyx_PyObject_CallOneArg(__pyx_t_11, __pyx_t_3) : __Pyx_PyObject_CallNoArg(__pyx_t_11); - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 361, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_12); - __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; - __pyx_t_11 = __Pyx_PyObject_GetSlice(__pyx_t_12, 2, 0, NULL, NULL, &__pyx_slice__8, 1, 0, 1); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 361, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_11); - __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; - __Pyx_XDECREF_SET(__pyx_v_h, __pyx_t_11); - __pyx_t_11 = 0; - - /* "yarl/_quoting_c.pyx":362 - * ret.append('%') - * h = hex(ord(ch)).upper()[2:] - * for ch in h: # <<<<<<<<<<<<<< - * ret.append(ch) - * continue - */ - if (likely(PyList_CheckExact(__pyx_v_h)) || PyTuple_CheckExact(__pyx_v_h)) { - __pyx_t_11 = __pyx_v_h; __Pyx_INCREF(__pyx_t_11); __pyx_t_1 = 0; - __pyx_t_22 = NULL; - } else { - __pyx_t_1 = -1; __pyx_t_11 = PyObject_GetIter(__pyx_v_h); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 362, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_11); - __pyx_t_22 = Py_TYPE(__pyx_t_11)->tp_iternext; if (unlikely(!__pyx_t_22)) __PYX_ERR(0, 362, __pyx_L1_error) - } - for (;;) { - if (likely(!__pyx_t_22)) { - if (likely(PyList_CheckExact(__pyx_t_11))) { - if (__pyx_t_1 >= PyList_GET_SIZE(__pyx_t_11)) break; - #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_12 = PyList_GET_ITEM(__pyx_t_11, __pyx_t_1); __Pyx_INCREF(__pyx_t_12); __pyx_t_1++; if (unlikely(0 < 0)) __PYX_ERR(0, 362, __pyx_L1_error) - #else - __pyx_t_12 = PySequence_ITEM(__pyx_t_11, __pyx_t_1); __pyx_t_1++; if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 362, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_12); - #endif - } else { - if (__pyx_t_1 >= PyTuple_GET_SIZE(__pyx_t_11)) break; - #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_12 = PyTuple_GET_ITEM(__pyx_t_11, __pyx_t_1); __Pyx_INCREF(__pyx_t_12); __pyx_t_1++; if (unlikely(0 < 0)) __PYX_ERR(0, 362, __pyx_L1_error) - #else - __pyx_t_12 = PySequence_ITEM(__pyx_t_11, __pyx_t_1); __pyx_t_1++; if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 362, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_12); - #endif - } - } else { - __pyx_t_12 = __pyx_t_22(__pyx_t_11); - if (unlikely(!__pyx_t_12)) { - PyObject* exc_type = PyErr_Occurred(); - if (exc_type) { - if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); - else __PYX_ERR(0, 362, __pyx_L1_error) - } - break; - } - __Pyx_GOTREF(__pyx_t_12); - } - __pyx_t_6 = __Pyx_PyObject_AsPy_UCS4(__pyx_t_12); if (unlikely((__pyx_t_6 == (Py_UCS4)-1) && PyErr_Occurred())) __PYX_ERR(0, 362, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; - __pyx_v_ch = __pyx_t_6; - - /* "yarl/_quoting_c.pyx":363 - * h = hex(ord(ch)).upper()[2:] - * for ch in h: - * ret.append(ch) # <<<<<<<<<<<<<< - * continue - * - */ - __pyx_t_12 = PyUnicode_FromOrdinal(__pyx_v_ch); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 363, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_12); - __pyx_t_14 = __Pyx_PyList_Append(__pyx_v_ret, __pyx_t_12); if (unlikely(__pyx_t_14 == ((int)-1))) __PYX_ERR(0, 363, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; - - /* "yarl/_quoting_c.pyx":362 - * ret.append('%') - * h = hex(ord(ch)).upper()[2:] - * for ch in h: # <<<<<<<<<<<<<< - * ret.append(ch) - * continue - */ - } - __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; - - /* "yarl/_quoting_c.pyx":364 - * for ch in h: - * ret.append(ch) - * continue # <<<<<<<<<<<<<< - * - * ret.append(ch) - */ - goto __pyx_L4_continue; - - /* "yarl/_quoting_c.pyx":359 - * continue - * - * if ch in self._unsafe: # <<<<<<<<<<<<<< - * ret.append('%') - * h = hex(ord(ch)).upper()[2:] - */ - } - - /* "yarl/_quoting_c.pyx":366 - * continue - * - * ret.append(ch) # <<<<<<<<<<<<<< - * - * if buflen: - */ - __pyx_t_11 = PyUnicode_FromOrdinal(__pyx_v_ch); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 366, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_11); - __pyx_t_14 = __Pyx_PyList_Append(__pyx_v_ret, __pyx_t_11); if (unlikely(__pyx_t_14 == ((int)-1))) __PYX_ERR(0, 366, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; - __pyx_L4_continue:; - } - - /* "yarl/_quoting_c.pyx":368 - * ret.append(ch) - * - * if buflen: # <<<<<<<<<<<<<< - * ret.append(val[length - buflen * 3 : length]) - * - */ - __pyx_t_21 = (__pyx_v_buflen != 0); - if (__pyx_t_21) { - - /* "yarl/_quoting_c.pyx":369 - * - * if buflen: - * ret.append(val[length - buflen * 3 : length]) # <<<<<<<<<<<<<< - * - * return ''.join(ret) - */ - if (unlikely(__pyx_v_val == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(0, 369, __pyx_L1_error) - } - __pyx_t_11 = __Pyx_PyUnicode_Substring(__pyx_v_val, (__pyx_v_length - (__pyx_v_buflen * 3)), __pyx_v_length); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 369, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_11); - __pyx_t_14 = __Pyx_PyList_Append(__pyx_v_ret, __pyx_t_11); if (unlikely(__pyx_t_14 == ((int)-1))) __PYX_ERR(0, 369, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; - - /* "yarl/_quoting_c.pyx":368 - * ret.append(ch) - * - * if buflen: # <<<<<<<<<<<<<< - * ret.append(val[length - buflen * 3 : length]) - * - */ - } - - /* "yarl/_quoting_c.pyx":371 - * ret.append(val[length - buflen * 3 : length]) - * - * return ''.join(ret) # <<<<<<<<<<<<<< - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_11 = PyUnicode_Join(__pyx_kp_u_, __pyx_v_ret); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 371, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_11); - __pyx_r = ((PyObject*)__pyx_t_11); - __pyx_t_11 = 0; - goto __pyx_L0; - - /* "yarl/_quoting_c.pyx":294 - * return self._do_unquote(val) - * - * cdef str _do_unquote(self, str val): # <<<<<<<<<<<<<< - * if len(val) == 0: - * return val - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_11); - __Pyx_XDECREF(__pyx_t_12); - __Pyx_XDECREF(__pyx_t_13); - __Pyx_XDECREF(__pyx_t_18); - __Pyx_XDECREF(__pyx_t_19); - __Pyx_XDECREF(__pyx_t_20); - __Pyx_AddTraceback("yarl._quoting_c._Unquoter._do_unquote", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v_ret); - __Pyx_XDECREF(__pyx_v_unquoted); - __Pyx_XDECREF(__pyx_v_h); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "(tree fragment)":1 - * def __reduce_cython__(self): # <<<<<<<<<<<<<< - * cdef tuple state - * cdef object _dict - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_4yarl_10_quoting_c_9_Unquoter_5__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ -static PyObject *__pyx_pw_4yarl_10_quoting_c_9_Unquoter_5__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0); - __pyx_r = __pyx_pf_4yarl_10_quoting_c_9_Unquoter_4__reduce_cython__(((struct __pyx_obj_4yarl_10_quoting_c__Unquoter *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_4yarl_10_quoting_c_9_Unquoter_4__reduce_cython__(struct __pyx_obj_4yarl_10_quoting_c__Unquoter *__pyx_v_self) { - PyObject *__pyx_v_state = 0; - PyObject *__pyx_v__dict = 0; - int __pyx_v_use_setstate; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - int __pyx_t_3; - int __pyx_t_4; - int __pyx_t_5; - PyObject *__pyx_t_6 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__reduce_cython__", 0); - - /* "(tree fragment)":5 - * cdef object _dict - * cdef bint use_setstate - * state = (self._qs, self._qs_quoter, self._quoter, self._unsafe) # <<<<<<<<<<<<<< - * _dict = getattr(self, '__dict__', None) - * if _dict is not None: - */ - __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_self->_qs); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 5, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyTuple_New(4); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 5, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_GIVEREF(__pyx_t_1); - PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_t_1); - __Pyx_INCREF(((PyObject *)__pyx_v_self->_qs_quoter)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_self->_qs_quoter)); - PyTuple_SET_ITEM(__pyx_t_2, 1, ((PyObject *)__pyx_v_self->_qs_quoter)); - __Pyx_INCREF(((PyObject *)__pyx_v_self->_quoter)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_self->_quoter)); - PyTuple_SET_ITEM(__pyx_t_2, 2, ((PyObject *)__pyx_v_self->_quoter)); - __Pyx_INCREF(__pyx_v_self->_unsafe); - __Pyx_GIVEREF(__pyx_v_self->_unsafe); - PyTuple_SET_ITEM(__pyx_t_2, 3, __pyx_v_self->_unsafe); - __pyx_t_1 = 0; - __pyx_v_state = ((PyObject*)__pyx_t_2); - __pyx_t_2 = 0; - - /* "(tree fragment)":6 - * cdef bint use_setstate - * state = (self._qs, self._qs_quoter, self._quoter, self._unsafe) - * _dict = getattr(self, '__dict__', None) # <<<<<<<<<<<<<< - * if _dict is not None: - * state += (_dict,) - */ - __pyx_t_2 = __Pyx_GetAttr3(((PyObject *)__pyx_v_self), __pyx_n_s_dict, Py_None); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 6, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_v__dict = __pyx_t_2; - __pyx_t_2 = 0; - - /* "(tree fragment)":7 - * state = (self._qs, self._qs_quoter, self._quoter, self._unsafe) - * _dict = getattr(self, '__dict__', None) - * if _dict is not None: # <<<<<<<<<<<<<< - * state += (_dict,) - * use_setstate = True - */ - __pyx_t_3 = (__pyx_v__dict != Py_None); - __pyx_t_4 = (__pyx_t_3 != 0); - if (__pyx_t_4) { - - /* "(tree fragment)":8 - * _dict = getattr(self, '__dict__', None) - * if _dict is not None: - * state += (_dict,) # <<<<<<<<<<<<<< - * use_setstate = True - * else: - */ - __pyx_t_2 = PyTuple_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 8, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(__pyx_v__dict); - __Pyx_GIVEREF(__pyx_v__dict); - PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_v__dict); - __pyx_t_1 = PyNumber_InPlaceAdd(__pyx_v_state, __pyx_t_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 8, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF_SET(__pyx_v_state, ((PyObject*)__pyx_t_1)); - __pyx_t_1 = 0; - - /* "(tree fragment)":9 - * if _dict is not None: - * state += (_dict,) - * use_setstate = True # <<<<<<<<<<<<<< - * else: - * use_setstate = self._qs_quoter is not None or self._quoter is not None or self._unsafe is not None - */ - __pyx_v_use_setstate = 1; - - /* "(tree fragment)":7 - * state = (self._qs, self._qs_quoter, self._quoter, self._unsafe) - * _dict = getattr(self, '__dict__', None) - * if _dict is not None: # <<<<<<<<<<<<<< - * state += (_dict,) - * use_setstate = True - */ - goto __pyx_L3; - } - - /* "(tree fragment)":11 - * use_setstate = True - * else: - * use_setstate = self._qs_quoter is not None or self._quoter is not None or self._unsafe is not None # <<<<<<<<<<<<<< - * if use_setstate: - * return __pyx_unpickle__Unquoter, (type(self), 0x276577d, None), state - */ - /*else*/ { - __pyx_t_3 = (((PyObject *)__pyx_v_self->_qs_quoter) != Py_None); - __pyx_t_5 = (__pyx_t_3 != 0); - if (!__pyx_t_5) { - } else { - __pyx_t_4 = __pyx_t_5; - goto __pyx_L4_bool_binop_done; - } - __pyx_t_5 = (((PyObject *)__pyx_v_self->_quoter) != Py_None); - __pyx_t_3 = (__pyx_t_5 != 0); - if (!__pyx_t_3) { - } else { - __pyx_t_4 = __pyx_t_3; - goto __pyx_L4_bool_binop_done; - } - __pyx_t_3 = (__pyx_v_self->_unsafe != ((PyObject*)Py_None)); - __pyx_t_5 = (__pyx_t_3 != 0); - __pyx_t_4 = __pyx_t_5; - __pyx_L4_bool_binop_done:; - __pyx_v_use_setstate = __pyx_t_4; - } - __pyx_L3:; - - /* "(tree fragment)":12 - * else: - * use_setstate = self._qs_quoter is not None or self._quoter is not None or self._unsafe is not None - * if use_setstate: # <<<<<<<<<<<<<< - * return __pyx_unpickle__Unquoter, (type(self), 0x276577d, None), state - * else: - */ - __pyx_t_4 = (__pyx_v_use_setstate != 0); - if (__pyx_t_4) { - - /* "(tree fragment)":13 - * use_setstate = self._qs_quoter is not None or self._quoter is not None or self._unsafe is not None - * if use_setstate: - * return __pyx_unpickle__Unquoter, (type(self), 0x276577d, None), state # <<<<<<<<<<<<<< - * else: - * return __pyx_unpickle__Unquoter, (type(self), 0x276577d, state) - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_pyx_unpickle__Unquoter); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyTuple_New(3); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); - __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); - PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); - __Pyx_INCREF(__pyx_int_41310077); - __Pyx_GIVEREF(__pyx_int_41310077); - PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_int_41310077); - __Pyx_INCREF(Py_None); - __Pyx_GIVEREF(Py_None); - PyTuple_SET_ITEM(__pyx_t_2, 2, Py_None); - __pyx_t_6 = PyTuple_New(3); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_6); - __Pyx_GIVEREF(__pyx_t_1); - PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_1); - __Pyx_GIVEREF(__pyx_t_2); - PyTuple_SET_ITEM(__pyx_t_6, 1, __pyx_t_2); - __Pyx_INCREF(__pyx_v_state); - __Pyx_GIVEREF(__pyx_v_state); - PyTuple_SET_ITEM(__pyx_t_6, 2, __pyx_v_state); - __pyx_t_1 = 0; - __pyx_t_2 = 0; - __pyx_r = __pyx_t_6; - __pyx_t_6 = 0; - goto __pyx_L0; - - /* "(tree fragment)":12 - * else: - * use_setstate = self._qs_quoter is not None or self._quoter is not None or self._unsafe is not None - * if use_setstate: # <<<<<<<<<<<<<< - * return __pyx_unpickle__Unquoter, (type(self), 0x276577d, None), state - * else: - */ - } - - /* "(tree fragment)":15 - * return __pyx_unpickle__Unquoter, (type(self), 0x276577d, None), state - * else: - * return __pyx_unpickle__Unquoter, (type(self), 0x276577d, state) # <<<<<<<<<<<<<< - * def __setstate_cython__(self, __pyx_state): - * __pyx_unpickle__Unquoter__set_state(self, __pyx_state) - */ - /*else*/ { - __Pyx_XDECREF(__pyx_r); - __Pyx_GetModuleGlobalName(__pyx_t_6, __pyx_n_s_pyx_unpickle__Unquoter); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_6); - __pyx_t_2 = PyTuple_New(3); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); - __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); - PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); - __Pyx_INCREF(__pyx_int_41310077); - __Pyx_GIVEREF(__pyx_int_41310077); - PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_int_41310077); - __Pyx_INCREF(__pyx_v_state); - __Pyx_GIVEREF(__pyx_v_state); - PyTuple_SET_ITEM(__pyx_t_2, 2, __pyx_v_state); - __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_GIVEREF(__pyx_t_6); - PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_t_6); - __Pyx_GIVEREF(__pyx_t_2); - PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_t_2); - __pyx_t_6 = 0; - __pyx_t_2 = 0; - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - } - - /* "(tree fragment)":1 - * def __reduce_cython__(self): # <<<<<<<<<<<<<< - * cdef tuple state - * cdef object _dict - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_6); - __Pyx_AddTraceback("yarl._quoting_c._Unquoter.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v_state); - __Pyx_XDECREF(__pyx_v__dict); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "(tree fragment)":16 - * else: - * return __pyx_unpickle__Unquoter, (type(self), 0x276577d, state) - * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< - * __pyx_unpickle__Unquoter__set_state(self, __pyx_state) - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_4yarl_10_quoting_c_9_Unquoter_7__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state); /*proto*/ -static PyObject *__pyx_pw_4yarl_10_quoting_c_9_Unquoter_7__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0); - __pyx_r = __pyx_pf_4yarl_10_quoting_c_9_Unquoter_6__setstate_cython__(((struct __pyx_obj_4yarl_10_quoting_c__Unquoter *)__pyx_v_self), ((PyObject *)__pyx_v___pyx_state)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_4yarl_10_quoting_c_9_Unquoter_6__setstate_cython__(struct __pyx_obj_4yarl_10_quoting_c__Unquoter *__pyx_v_self, PyObject *__pyx_v___pyx_state) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__setstate_cython__", 0); - - /* "(tree fragment)":17 - * return __pyx_unpickle__Unquoter, (type(self), 0x276577d, state) - * def __setstate_cython__(self, __pyx_state): - * __pyx_unpickle__Unquoter__set_state(self, __pyx_state) # <<<<<<<<<<<<<< - */ - if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v___pyx_state)->tp_name), 0))) __PYX_ERR(1, 17, __pyx_L1_error) - __pyx_t_1 = __pyx_f_4yarl_10_quoting_c___pyx_unpickle__Unquoter__set_state(__pyx_v_self, ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 17, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "(tree fragment)":16 - * else: - * return __pyx_unpickle__Unquoter, (type(self), 0x276577d, state) - * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< - * __pyx_unpickle__Unquoter__set_state(self, __pyx_state) - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("yarl._quoting_c._Unquoter.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "(tree fragment)":1 - * def __pyx_unpickle__Quoter(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< - * cdef object __pyx_PickleError - * cdef object __pyx_result - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_4yarl_10_quoting_c_1__pyx_unpickle__Quoter(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ -static PyMethodDef __pyx_mdef_4yarl_10_quoting_c_1__pyx_unpickle__Quoter = {"__pyx_unpickle__Quoter", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_4yarl_10_quoting_c_1__pyx_unpickle__Quoter, METH_VARARGS|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_4yarl_10_quoting_c_1__pyx_unpickle__Quoter(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { - PyObject *__pyx_v___pyx_type = 0; - long __pyx_v___pyx_checksum; - PyObject *__pyx_v___pyx_state = 0; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__pyx_unpickle__Quoter (wrapper)", 0); - { - static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_pyx_type,&__pyx_n_s_pyx_checksum,&__pyx_n_s_pyx_state,0}; - PyObject* values[3] = {0,0,0}; - if (unlikely(__pyx_kwds)) { - Py_ssize_t kw_args; - const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); - switch (pos_args) { - case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); - CYTHON_FALLTHROUGH; - case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); - CYTHON_FALLTHROUGH; - case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = PyDict_Size(__pyx_kwds); - switch (pos_args) { - case 0: - if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_type)) != 0)) kw_args--; - else goto __pyx_L5_argtuple_error; - CYTHON_FALLTHROUGH; - case 1: - if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_checksum)) != 0)) kw_args--; - else { - __Pyx_RaiseArgtupleInvalid("__pyx_unpickle__Quoter", 1, 3, 3, 1); __PYX_ERR(1, 1, __pyx_L3_error) - } - CYTHON_FALLTHROUGH; - case 2: - if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_state)) != 0)) kw_args--; - else { - __Pyx_RaiseArgtupleInvalid("__pyx_unpickle__Quoter", 1, 3, 3, 2); __PYX_ERR(1, 1, __pyx_L3_error) - } - } - if (unlikely(kw_args > 0)) { - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__pyx_unpickle__Quoter") < 0)) __PYX_ERR(1, 1, __pyx_L3_error) - } - } else if (PyTuple_GET_SIZE(__pyx_args) != 3) { - goto __pyx_L5_argtuple_error; - } else { - values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - values[1] = PyTuple_GET_ITEM(__pyx_args, 1); - values[2] = PyTuple_GET_ITEM(__pyx_args, 2); - } - __pyx_v___pyx_type = values[0]; - __pyx_v___pyx_checksum = __Pyx_PyInt_As_long(values[1]); if (unlikely((__pyx_v___pyx_checksum == (long)-1) && PyErr_Occurred())) __PYX_ERR(1, 1, __pyx_L3_error) - __pyx_v___pyx_state = values[2]; - } - goto __pyx_L4_argument_unpacking_done; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("__pyx_unpickle__Quoter", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(1, 1, __pyx_L3_error) - __pyx_L3_error:; - __Pyx_AddTraceback("yarl._quoting_c.__pyx_unpickle__Quoter", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_4yarl_10_quoting_c___pyx_unpickle__Quoter(__pyx_self, __pyx_v___pyx_type, __pyx_v___pyx_checksum, __pyx_v___pyx_state); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_4yarl_10_quoting_c___pyx_unpickle__Quoter(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state) { - PyObject *__pyx_v___pyx_PickleError = 0; - PyObject *__pyx_v___pyx_result = 0; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - PyObject *__pyx_t_5 = NULL; - int __pyx_t_6; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__pyx_unpickle__Quoter", 0); - - /* "(tree fragment)":4 - * cdef object __pyx_PickleError - * cdef object __pyx_result - * if __pyx_checksum != 0xe91bd35: # <<<<<<<<<<<<<< - * from pickle import PickleError as __pyx_PickleError - * raise __pyx_PickleError("Incompatible checksums (%s vs 0xe91bd35 = (_protected_table, _qs, _requote, _safe_table))" % __pyx_checksum) - */ - __pyx_t_1 = ((__pyx_v___pyx_checksum != 0xe91bd35) != 0); - if (__pyx_t_1) { - - /* "(tree fragment)":5 - * cdef object __pyx_result - * if __pyx_checksum != 0xe91bd35: - * from pickle import PickleError as __pyx_PickleError # <<<<<<<<<<<<<< - * raise __pyx_PickleError("Incompatible checksums (%s vs 0xe91bd35 = (_protected_table, _qs, _requote, _safe_table))" % __pyx_checksum) - * __pyx_result = _Quoter.__new__(__pyx_type) - */ - __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 5, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(__pyx_n_s_PickleError); - __Pyx_GIVEREF(__pyx_n_s_PickleError); - PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_PickleError); - __pyx_t_3 = __Pyx_Import(__pyx_n_s_pickle, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 5, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_PickleError); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 5, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(__pyx_t_2); - __pyx_v___pyx_PickleError = __pyx_t_2; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "(tree fragment)":6 - * if __pyx_checksum != 0xe91bd35: - * from pickle import PickleError as __pyx_PickleError - * raise __pyx_PickleError("Incompatible checksums (%s vs 0xe91bd35 = (_protected_table, _qs, _requote, _safe_table))" % __pyx_checksum) # <<<<<<<<<<<<<< - * __pyx_result = _Quoter.__new__(__pyx_type) - * if __pyx_state is not None: - */ - __pyx_t_2 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 6, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_4 = __Pyx_PyString_Format(__pyx_kp_s_Incompatible_checksums_s_vs_0xe9, __pyx_t_2); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 6, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_INCREF(__pyx_v___pyx_PickleError); - __pyx_t_2 = __pyx_v___pyx_PickleError; __pyx_t_5 = NULL; - if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_5)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_5); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - } - } - __pyx_t_3 = (__pyx_t_5) ? __Pyx_PyObject_Call2Args(__pyx_t_2, __pyx_t_5, __pyx_t_4) : __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_4); - __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 6, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_Raise(__pyx_t_3, 0, 0, 0); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __PYX_ERR(1, 6, __pyx_L1_error) - - /* "(tree fragment)":4 - * cdef object __pyx_PickleError - * cdef object __pyx_result - * if __pyx_checksum != 0xe91bd35: # <<<<<<<<<<<<<< - * from pickle import PickleError as __pyx_PickleError - * raise __pyx_PickleError("Incompatible checksums (%s vs 0xe91bd35 = (_protected_table, _qs, _requote, _safe_table))" % __pyx_checksum) - */ - } - - /* "(tree fragment)":7 - * from pickle import PickleError as __pyx_PickleError - * raise __pyx_PickleError("Incompatible checksums (%s vs 0xe91bd35 = (_protected_table, _qs, _requote, _safe_table))" % __pyx_checksum) - * __pyx_result = _Quoter.__new__(__pyx_type) # <<<<<<<<<<<<<< - * if __pyx_state is not None: - * __pyx_unpickle__Quoter__set_state(<_Quoter> __pyx_result, __pyx_state) - */ - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_ptype_4yarl_10_quoting_c__Quoter), __pyx_n_s_new); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 7, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_4 = NULL; - if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_4)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_4); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - } - } - __pyx_t_3 = (__pyx_t_4) ? __Pyx_PyObject_Call2Args(__pyx_t_2, __pyx_t_4, __pyx_v___pyx_type) : __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_v___pyx_type); - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 7, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_v___pyx_result = __pyx_t_3; - __pyx_t_3 = 0; - - /* "(tree fragment)":8 - * raise __pyx_PickleError("Incompatible checksums (%s vs 0xe91bd35 = (_protected_table, _qs, _requote, _safe_table))" % __pyx_checksum) - * __pyx_result = _Quoter.__new__(__pyx_type) - * if __pyx_state is not None: # <<<<<<<<<<<<<< - * __pyx_unpickle__Quoter__set_state(<_Quoter> __pyx_result, __pyx_state) - * return __pyx_result - */ - __pyx_t_1 = (__pyx_v___pyx_state != Py_None); - __pyx_t_6 = (__pyx_t_1 != 0); - if (__pyx_t_6) { - - /* "(tree fragment)":9 - * __pyx_result = _Quoter.__new__(__pyx_type) - * if __pyx_state is not None: - * __pyx_unpickle__Quoter__set_state(<_Quoter> __pyx_result, __pyx_state) # <<<<<<<<<<<<<< - * return __pyx_result - * cdef __pyx_unpickle__Quoter__set_state(_Quoter __pyx_result, tuple __pyx_state): - */ - if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v___pyx_state)->tp_name), 0))) __PYX_ERR(1, 9, __pyx_L1_error) - __pyx_t_3 = __pyx_f_4yarl_10_quoting_c___pyx_unpickle__Quoter__set_state(((struct __pyx_obj_4yarl_10_quoting_c__Quoter *)__pyx_v___pyx_result), ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 9, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "(tree fragment)":8 - * raise __pyx_PickleError("Incompatible checksums (%s vs 0xe91bd35 = (_protected_table, _qs, _requote, _safe_table))" % __pyx_checksum) - * __pyx_result = _Quoter.__new__(__pyx_type) - * if __pyx_state is not None: # <<<<<<<<<<<<<< - * __pyx_unpickle__Quoter__set_state(<_Quoter> __pyx_result, __pyx_state) - * return __pyx_result - */ - } - - /* "(tree fragment)":10 - * if __pyx_state is not None: - * __pyx_unpickle__Quoter__set_state(<_Quoter> __pyx_result, __pyx_state) - * return __pyx_result # <<<<<<<<<<<<<< - * cdef __pyx_unpickle__Quoter__set_state(_Quoter __pyx_result, tuple __pyx_state): - * __pyx_result._protected_table = __pyx_state[0]; __pyx_result._qs = __pyx_state[1]; __pyx_result._requote = __pyx_state[2]; __pyx_result._safe_table = __pyx_state[3] - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v___pyx_result); - __pyx_r = __pyx_v___pyx_result; - goto __pyx_L0; - - /* "(tree fragment)":1 - * def __pyx_unpickle__Quoter(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< - * cdef object __pyx_PickleError - * cdef object __pyx_result - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_XDECREF(__pyx_t_5); - __Pyx_AddTraceback("yarl._quoting_c.__pyx_unpickle__Quoter", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v___pyx_PickleError); - __Pyx_XDECREF(__pyx_v___pyx_result); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "(tree fragment)":11 - * __pyx_unpickle__Quoter__set_state(<_Quoter> __pyx_result, __pyx_state) - * return __pyx_result - * cdef __pyx_unpickle__Quoter__set_state(_Quoter __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< - * __pyx_result._protected_table = __pyx_state[0]; __pyx_result._qs = __pyx_state[1]; __pyx_result._requote = __pyx_state[2]; __pyx_result._safe_table = __pyx_state[3] - * if len(__pyx_state) > 4 and hasattr(__pyx_result, '__dict__'): - */ - -static PyObject *__pyx_f_4yarl_10_quoting_c___pyx_unpickle__Quoter__set_state(struct __pyx_obj_4yarl_10_quoting_c__Quoter *__pyx_v___pyx_result, PyObject *__pyx_v___pyx_state) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - uint8_t __pyx_t_2[16]; - int __pyx_t_3; - Py_ssize_t __pyx_t_4; - int __pyx_t_5; - int __pyx_t_6; - PyObject *__pyx_t_7 = NULL; - PyObject *__pyx_t_8 = NULL; - PyObject *__pyx_t_9 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__pyx_unpickle__Quoter__set_state", 0); - - /* "(tree fragment)":12 - * return __pyx_result - * cdef __pyx_unpickle__Quoter__set_state(_Quoter __pyx_result, tuple __pyx_state): - * __pyx_result._protected_table = __pyx_state[0]; __pyx_result._qs = __pyx_state[1]; __pyx_result._requote = __pyx_state[2]; __pyx_result._safe_table = __pyx_state[3] # <<<<<<<<<<<<<< - * if len(__pyx_state) > 4 and hasattr(__pyx_result, '__dict__'): - * __pyx_result.__dict__.update(__pyx_state[4]) - */ - if (unlikely(__pyx_v___pyx_state == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(1, 12, __pyx_L1_error) - } - __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (unlikely(__Pyx_carray_from_py_uint8_t(__pyx_t_1, __pyx_t_2, 16) < 0)) __PYX_ERR(1, 12, __pyx_L1_error) - memcpy(&(__pyx_v___pyx_result->_protected_table[0]), __pyx_t_2, sizeof(__pyx_v___pyx_result->_protected_table[0]) * (16)); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - if (unlikely(__pyx_v___pyx_state == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(1, 12, __pyx_L1_error) - } - __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely((__pyx_t_3 == (int)-1) && PyErr_Occurred())) __PYX_ERR(1, 12, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_v___pyx_result->_qs = __pyx_t_3; - if (unlikely(__pyx_v___pyx_state == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(1, 12, __pyx_L1_error) - } - __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 2, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely((__pyx_t_3 == (int)-1) && PyErr_Occurred())) __PYX_ERR(1, 12, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_v___pyx_result->_requote = __pyx_t_3; - if (unlikely(__pyx_v___pyx_state == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(1, 12, __pyx_L1_error) - } - __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 3, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (unlikely(__Pyx_carray_from_py_uint8_t(__pyx_t_1, __pyx_t_2, 16) < 0)) __PYX_ERR(1, 12, __pyx_L1_error) - memcpy(&(__pyx_v___pyx_result->_safe_table[0]), __pyx_t_2, sizeof(__pyx_v___pyx_result->_safe_table[0]) * (16)); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "(tree fragment)":13 - * cdef __pyx_unpickle__Quoter__set_state(_Quoter __pyx_result, tuple __pyx_state): - * __pyx_result._protected_table = __pyx_state[0]; __pyx_result._qs = __pyx_state[1]; __pyx_result._requote = __pyx_state[2]; __pyx_result._safe_table = __pyx_state[3] - * if len(__pyx_state) > 4 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< - * __pyx_result.__dict__.update(__pyx_state[4]) - */ - if (unlikely(__pyx_v___pyx_state == Py_None)) { - PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); - __PYX_ERR(1, 13, __pyx_L1_error) - } - __pyx_t_4 = PyTuple_GET_SIZE(__pyx_v___pyx_state); if (unlikely(__pyx_t_4 == ((Py_ssize_t)-1))) __PYX_ERR(1, 13, __pyx_L1_error) - __pyx_t_5 = ((__pyx_t_4 > 4) != 0); - if (__pyx_t_5) { - } else { - __pyx_t_3 = __pyx_t_5; - goto __pyx_L4_bool_binop_done; - } - __pyx_t_5 = __Pyx_HasAttr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(__pyx_t_5 == ((int)-1))) __PYX_ERR(1, 13, __pyx_L1_error) - __pyx_t_6 = (__pyx_t_5 != 0); - __pyx_t_3 = __pyx_t_6; - __pyx_L4_bool_binop_done:; - if (__pyx_t_3) { - - /* "(tree fragment)":14 - * __pyx_result._protected_table = __pyx_state[0]; __pyx_result._qs = __pyx_state[1]; __pyx_result._requote = __pyx_state[2]; __pyx_result._safe_table = __pyx_state[3] - * if len(__pyx_state) > 4 and hasattr(__pyx_result, '__dict__'): - * __pyx_result.__dict__.update(__pyx_state[4]) # <<<<<<<<<<<<<< - */ - __pyx_t_7 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(!__pyx_t_7)) __PYX_ERR(1, 14, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_7); - __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_t_7, __pyx_n_s_update); if (unlikely(!__pyx_t_8)) __PYX_ERR(1, 14, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_8); - __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - if (unlikely(__pyx_v___pyx_state == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(1, 14, __pyx_L1_error) - } - __pyx_t_7 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 4, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_7)) __PYX_ERR(1, 14, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_7); - __pyx_t_9 = NULL; - if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_8))) { - __pyx_t_9 = PyMethod_GET_SELF(__pyx_t_8); - if (likely(__pyx_t_9)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_8); - __Pyx_INCREF(__pyx_t_9); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_8, function); - } - } - __pyx_t_1 = (__pyx_t_9) ? __Pyx_PyObject_Call2Args(__pyx_t_8, __pyx_t_9, __pyx_t_7) : __Pyx_PyObject_CallOneArg(__pyx_t_8, __pyx_t_7); - __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; - __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 14, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "(tree fragment)":13 - * cdef __pyx_unpickle__Quoter__set_state(_Quoter __pyx_result, tuple __pyx_state): - * __pyx_result._protected_table = __pyx_state[0]; __pyx_result._qs = __pyx_state[1]; __pyx_result._requote = __pyx_state[2]; __pyx_result._safe_table = __pyx_state[3] - * if len(__pyx_state) > 4 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< - * __pyx_result.__dict__.update(__pyx_state[4]) - */ - } - - /* "(tree fragment)":11 - * __pyx_unpickle__Quoter__set_state(<_Quoter> __pyx_result, __pyx_state) - * return __pyx_result - * cdef __pyx_unpickle__Quoter__set_state(_Quoter __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< - * __pyx_result._protected_table = __pyx_state[0]; __pyx_result._qs = __pyx_state[1]; __pyx_result._requote = __pyx_state[2]; __pyx_result._safe_table = __pyx_state[3] - * if len(__pyx_state) > 4 and hasattr(__pyx_result, '__dict__'): - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_7); - __Pyx_XDECREF(__pyx_t_8); - __Pyx_XDECREF(__pyx_t_9); - __Pyx_AddTraceback("yarl._quoting_c.__pyx_unpickle__Quoter__set_state", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "(tree fragment)":1 - * def __pyx_unpickle__Unquoter(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< - * cdef object __pyx_PickleError - * cdef object __pyx_result - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_4yarl_10_quoting_c_3__pyx_unpickle__Unquoter(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ -static PyMethodDef __pyx_mdef_4yarl_10_quoting_c_3__pyx_unpickle__Unquoter = {"__pyx_unpickle__Unquoter", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_4yarl_10_quoting_c_3__pyx_unpickle__Unquoter, METH_VARARGS|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_4yarl_10_quoting_c_3__pyx_unpickle__Unquoter(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { - PyObject *__pyx_v___pyx_type = 0; - long __pyx_v___pyx_checksum; - PyObject *__pyx_v___pyx_state = 0; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__pyx_unpickle__Unquoter (wrapper)", 0); - { - static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_pyx_type,&__pyx_n_s_pyx_checksum,&__pyx_n_s_pyx_state,0}; - PyObject* values[3] = {0,0,0}; - if (unlikely(__pyx_kwds)) { - Py_ssize_t kw_args; - const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); - switch (pos_args) { - case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); - CYTHON_FALLTHROUGH; - case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); - CYTHON_FALLTHROUGH; - case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = PyDict_Size(__pyx_kwds); - switch (pos_args) { - case 0: - if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_type)) != 0)) kw_args--; - else goto __pyx_L5_argtuple_error; - CYTHON_FALLTHROUGH; - case 1: - if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_checksum)) != 0)) kw_args--; - else { - __Pyx_RaiseArgtupleInvalid("__pyx_unpickle__Unquoter", 1, 3, 3, 1); __PYX_ERR(1, 1, __pyx_L3_error) - } - CYTHON_FALLTHROUGH; - case 2: - if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_state)) != 0)) kw_args--; - else { - __Pyx_RaiseArgtupleInvalid("__pyx_unpickle__Unquoter", 1, 3, 3, 2); __PYX_ERR(1, 1, __pyx_L3_error) - } - } - if (unlikely(kw_args > 0)) { - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__pyx_unpickle__Unquoter") < 0)) __PYX_ERR(1, 1, __pyx_L3_error) - } - } else if (PyTuple_GET_SIZE(__pyx_args) != 3) { - goto __pyx_L5_argtuple_error; - } else { - values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - values[1] = PyTuple_GET_ITEM(__pyx_args, 1); - values[2] = PyTuple_GET_ITEM(__pyx_args, 2); - } - __pyx_v___pyx_type = values[0]; - __pyx_v___pyx_checksum = __Pyx_PyInt_As_long(values[1]); if (unlikely((__pyx_v___pyx_checksum == (long)-1) && PyErr_Occurred())) __PYX_ERR(1, 1, __pyx_L3_error) - __pyx_v___pyx_state = values[2]; - } - goto __pyx_L4_argument_unpacking_done; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("__pyx_unpickle__Unquoter", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(1, 1, __pyx_L3_error) - __pyx_L3_error:; - __Pyx_AddTraceback("yarl._quoting_c.__pyx_unpickle__Unquoter", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_4yarl_10_quoting_c_2__pyx_unpickle__Unquoter(__pyx_self, __pyx_v___pyx_type, __pyx_v___pyx_checksum, __pyx_v___pyx_state); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_4yarl_10_quoting_c_2__pyx_unpickle__Unquoter(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state) { - PyObject *__pyx_v___pyx_PickleError = 0; - PyObject *__pyx_v___pyx_result = 0; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - PyObject *__pyx_t_5 = NULL; - int __pyx_t_6; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__pyx_unpickle__Unquoter", 0); - - /* "(tree fragment)":4 - * cdef object __pyx_PickleError - * cdef object __pyx_result - * if __pyx_checksum != 0x276577d: # <<<<<<<<<<<<<< - * from pickle import PickleError as __pyx_PickleError - * raise __pyx_PickleError("Incompatible checksums (%s vs 0x276577d = (_qs, _qs_quoter, _quoter, _unsafe))" % __pyx_checksum) - */ - __pyx_t_1 = ((__pyx_v___pyx_checksum != 0x276577d) != 0); - if (__pyx_t_1) { - - /* "(tree fragment)":5 - * cdef object __pyx_result - * if __pyx_checksum != 0x276577d: - * from pickle import PickleError as __pyx_PickleError # <<<<<<<<<<<<<< - * raise __pyx_PickleError("Incompatible checksums (%s vs 0x276577d = (_qs, _qs_quoter, _quoter, _unsafe))" % __pyx_checksum) - * __pyx_result = _Unquoter.__new__(__pyx_type) - */ - __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 5, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(__pyx_n_s_PickleError); - __Pyx_GIVEREF(__pyx_n_s_PickleError); - PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_PickleError); - __pyx_t_3 = __Pyx_Import(__pyx_n_s_pickle, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 5, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_PickleError); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 5, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(__pyx_t_2); - __pyx_v___pyx_PickleError = __pyx_t_2; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "(tree fragment)":6 - * if __pyx_checksum != 0x276577d: - * from pickle import PickleError as __pyx_PickleError - * raise __pyx_PickleError("Incompatible checksums (%s vs 0x276577d = (_qs, _qs_quoter, _quoter, _unsafe))" % __pyx_checksum) # <<<<<<<<<<<<<< - * __pyx_result = _Unquoter.__new__(__pyx_type) - * if __pyx_state is not None: - */ - __pyx_t_2 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 6, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_4 = __Pyx_PyString_Format(__pyx_kp_s_Incompatible_checksums_s_vs_0x27, __pyx_t_2); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 6, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_INCREF(__pyx_v___pyx_PickleError); - __pyx_t_2 = __pyx_v___pyx_PickleError; __pyx_t_5 = NULL; - if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_5)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_5); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - } - } - __pyx_t_3 = (__pyx_t_5) ? __Pyx_PyObject_Call2Args(__pyx_t_2, __pyx_t_5, __pyx_t_4) : __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_4); - __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 6, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_Raise(__pyx_t_3, 0, 0, 0); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __PYX_ERR(1, 6, __pyx_L1_error) - - /* "(tree fragment)":4 - * cdef object __pyx_PickleError - * cdef object __pyx_result - * if __pyx_checksum != 0x276577d: # <<<<<<<<<<<<<< - * from pickle import PickleError as __pyx_PickleError - * raise __pyx_PickleError("Incompatible checksums (%s vs 0x276577d = (_qs, _qs_quoter, _quoter, _unsafe))" % __pyx_checksum) - */ - } - - /* "(tree fragment)":7 - * from pickle import PickleError as __pyx_PickleError - * raise __pyx_PickleError("Incompatible checksums (%s vs 0x276577d = (_qs, _qs_quoter, _quoter, _unsafe))" % __pyx_checksum) - * __pyx_result = _Unquoter.__new__(__pyx_type) # <<<<<<<<<<<<<< - * if __pyx_state is not None: - * __pyx_unpickle__Unquoter__set_state(<_Unquoter> __pyx_result, __pyx_state) - */ - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_ptype_4yarl_10_quoting_c__Unquoter), __pyx_n_s_new); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 7, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_4 = NULL; - if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_4)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_4); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - } - } - __pyx_t_3 = (__pyx_t_4) ? __Pyx_PyObject_Call2Args(__pyx_t_2, __pyx_t_4, __pyx_v___pyx_type) : __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_v___pyx_type); - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 7, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_v___pyx_result = __pyx_t_3; - __pyx_t_3 = 0; - - /* "(tree fragment)":8 - * raise __pyx_PickleError("Incompatible checksums (%s vs 0x276577d = (_qs, _qs_quoter, _quoter, _unsafe))" % __pyx_checksum) - * __pyx_result = _Unquoter.__new__(__pyx_type) - * if __pyx_state is not None: # <<<<<<<<<<<<<< - * __pyx_unpickle__Unquoter__set_state(<_Unquoter> __pyx_result, __pyx_state) - * return __pyx_result - */ - __pyx_t_1 = (__pyx_v___pyx_state != Py_None); - __pyx_t_6 = (__pyx_t_1 != 0); - if (__pyx_t_6) { - - /* "(tree fragment)":9 - * __pyx_result = _Unquoter.__new__(__pyx_type) - * if __pyx_state is not None: - * __pyx_unpickle__Unquoter__set_state(<_Unquoter> __pyx_result, __pyx_state) # <<<<<<<<<<<<<< - * return __pyx_result - * cdef __pyx_unpickle__Unquoter__set_state(_Unquoter __pyx_result, tuple __pyx_state): - */ - if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v___pyx_state)->tp_name), 0))) __PYX_ERR(1, 9, __pyx_L1_error) - __pyx_t_3 = __pyx_f_4yarl_10_quoting_c___pyx_unpickle__Unquoter__set_state(((struct __pyx_obj_4yarl_10_quoting_c__Unquoter *)__pyx_v___pyx_result), ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 9, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "(tree fragment)":8 - * raise __pyx_PickleError("Incompatible checksums (%s vs 0x276577d = (_qs, _qs_quoter, _quoter, _unsafe))" % __pyx_checksum) - * __pyx_result = _Unquoter.__new__(__pyx_type) - * if __pyx_state is not None: # <<<<<<<<<<<<<< - * __pyx_unpickle__Unquoter__set_state(<_Unquoter> __pyx_result, __pyx_state) - * return __pyx_result - */ - } - - /* "(tree fragment)":10 - * if __pyx_state is not None: - * __pyx_unpickle__Unquoter__set_state(<_Unquoter> __pyx_result, __pyx_state) - * return __pyx_result # <<<<<<<<<<<<<< - * cdef __pyx_unpickle__Unquoter__set_state(_Unquoter __pyx_result, tuple __pyx_state): - * __pyx_result._qs = __pyx_state[0]; __pyx_result._qs_quoter = __pyx_state[1]; __pyx_result._quoter = __pyx_state[2]; __pyx_result._unsafe = __pyx_state[3] - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v___pyx_result); - __pyx_r = __pyx_v___pyx_result; - goto __pyx_L0; - - /* "(tree fragment)":1 - * def __pyx_unpickle__Unquoter(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< - * cdef object __pyx_PickleError - * cdef object __pyx_result - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_XDECREF(__pyx_t_5); - __Pyx_AddTraceback("yarl._quoting_c.__pyx_unpickle__Unquoter", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v___pyx_PickleError); - __Pyx_XDECREF(__pyx_v___pyx_result); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "(tree fragment)":11 - * __pyx_unpickle__Unquoter__set_state(<_Unquoter> __pyx_result, __pyx_state) - * return __pyx_result - * cdef __pyx_unpickle__Unquoter__set_state(_Unquoter __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< - * __pyx_result._qs = __pyx_state[0]; __pyx_result._qs_quoter = __pyx_state[1]; __pyx_result._quoter = __pyx_state[2]; __pyx_result._unsafe = __pyx_state[3] - * if len(__pyx_state) > 4 and hasattr(__pyx_result, '__dict__'): - */ - -static PyObject *__pyx_f_4yarl_10_quoting_c___pyx_unpickle__Unquoter__set_state(struct __pyx_obj_4yarl_10_quoting_c__Unquoter *__pyx_v___pyx_result, PyObject *__pyx_v___pyx_state) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_t_2; - Py_ssize_t __pyx_t_3; - int __pyx_t_4; - int __pyx_t_5; - PyObject *__pyx_t_6 = NULL; - PyObject *__pyx_t_7 = NULL; - PyObject *__pyx_t_8 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__pyx_unpickle__Unquoter__set_state", 0); - - /* "(tree fragment)":12 - * return __pyx_result - * cdef __pyx_unpickle__Unquoter__set_state(_Unquoter __pyx_result, tuple __pyx_state): - * __pyx_result._qs = __pyx_state[0]; __pyx_result._qs_quoter = __pyx_state[1]; __pyx_result._quoter = __pyx_state[2]; __pyx_result._unsafe = __pyx_state[3] # <<<<<<<<<<<<<< - * if len(__pyx_state) > 4 and hasattr(__pyx_result, '__dict__'): - * __pyx_result.__dict__.update(__pyx_state[4]) - */ - if (unlikely(__pyx_v___pyx_state == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(1, 12, __pyx_L1_error) - } - __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(1, 12, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_v___pyx_result->_qs = __pyx_t_2; - if (unlikely(__pyx_v___pyx_state == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(1, 12, __pyx_L1_error) - } - __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_4yarl_10_quoting_c__Quoter))))) __PYX_ERR(1, 12, __pyx_L1_error) - __Pyx_GIVEREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_v___pyx_result->_qs_quoter); - __Pyx_DECREF(((PyObject *)__pyx_v___pyx_result->_qs_quoter)); - __pyx_v___pyx_result->_qs_quoter = ((struct __pyx_obj_4yarl_10_quoting_c__Quoter *)__pyx_t_1); - __pyx_t_1 = 0; - if (unlikely(__pyx_v___pyx_state == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(1, 12, __pyx_L1_error) - } - __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 2, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_4yarl_10_quoting_c__Quoter))))) __PYX_ERR(1, 12, __pyx_L1_error) - __Pyx_GIVEREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_v___pyx_result->_quoter); - __Pyx_DECREF(((PyObject *)__pyx_v___pyx_result->_quoter)); - __pyx_v___pyx_result->_quoter = ((struct __pyx_obj_4yarl_10_quoting_c__Quoter *)__pyx_t_1); - __pyx_t_1 = 0; - if (unlikely(__pyx_v___pyx_state == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(1, 12, __pyx_L1_error) - } - __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 3, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (!(likely(PyUnicode_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "unicode", Py_TYPE(__pyx_t_1)->tp_name), 0))) __PYX_ERR(1, 12, __pyx_L1_error) - __Pyx_GIVEREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_v___pyx_result->_unsafe); - __Pyx_DECREF(__pyx_v___pyx_result->_unsafe); - __pyx_v___pyx_result->_unsafe = ((PyObject*)__pyx_t_1); - __pyx_t_1 = 0; - - /* "(tree fragment)":13 - * cdef __pyx_unpickle__Unquoter__set_state(_Unquoter __pyx_result, tuple __pyx_state): - * __pyx_result._qs = __pyx_state[0]; __pyx_result._qs_quoter = __pyx_state[1]; __pyx_result._quoter = __pyx_state[2]; __pyx_result._unsafe = __pyx_state[3] - * if len(__pyx_state) > 4 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< - * __pyx_result.__dict__.update(__pyx_state[4]) - */ - if (unlikely(__pyx_v___pyx_state == Py_None)) { - PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); - __PYX_ERR(1, 13, __pyx_L1_error) - } - __pyx_t_3 = PyTuple_GET_SIZE(__pyx_v___pyx_state); if (unlikely(__pyx_t_3 == ((Py_ssize_t)-1))) __PYX_ERR(1, 13, __pyx_L1_error) - __pyx_t_4 = ((__pyx_t_3 > 4) != 0); - if (__pyx_t_4) { - } else { - __pyx_t_2 = __pyx_t_4; - goto __pyx_L4_bool_binop_done; - } - __pyx_t_4 = __Pyx_HasAttr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(1, 13, __pyx_L1_error) - __pyx_t_5 = (__pyx_t_4 != 0); - __pyx_t_2 = __pyx_t_5; - __pyx_L4_bool_binop_done:; - if (__pyx_t_2) { - - /* "(tree fragment)":14 - * __pyx_result._qs = __pyx_state[0]; __pyx_result._qs_quoter = __pyx_state[1]; __pyx_result._quoter = __pyx_state[2]; __pyx_result._unsafe = __pyx_state[3] - * if len(__pyx_state) > 4 and hasattr(__pyx_result, '__dict__'): - * __pyx_result.__dict__.update(__pyx_state[4]) # <<<<<<<<<<<<<< - */ - __pyx_t_6 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 14, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_6); - __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_t_6, __pyx_n_s_update); if (unlikely(!__pyx_t_7)) __PYX_ERR(1, 14, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_7); - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - if (unlikely(__pyx_v___pyx_state == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(1, 14, __pyx_L1_error) - } - __pyx_t_6 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 4, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 14, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_6); - __pyx_t_8 = NULL; - if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_7))) { - __pyx_t_8 = PyMethod_GET_SELF(__pyx_t_7); - if (likely(__pyx_t_8)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7); - __Pyx_INCREF(__pyx_t_8); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_7, function); - } - } - __pyx_t_1 = (__pyx_t_8) ? __Pyx_PyObject_Call2Args(__pyx_t_7, __pyx_t_8, __pyx_t_6) : __Pyx_PyObject_CallOneArg(__pyx_t_7, __pyx_t_6); - __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 14, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "(tree fragment)":13 - * cdef __pyx_unpickle__Unquoter__set_state(_Unquoter __pyx_result, tuple __pyx_state): - * __pyx_result._qs = __pyx_state[0]; __pyx_result._qs_quoter = __pyx_state[1]; __pyx_result._quoter = __pyx_state[2]; __pyx_result._unsafe = __pyx_state[3] - * if len(__pyx_state) > 4 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< - * __pyx_result.__dict__.update(__pyx_state[4]) - */ - } - - /* "(tree fragment)":11 - * __pyx_unpickle__Unquoter__set_state(<_Unquoter> __pyx_result, __pyx_state) - * return __pyx_result - * cdef __pyx_unpickle__Unquoter__set_state(_Unquoter __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< - * __pyx_result._qs = __pyx_state[0]; __pyx_result._qs_quoter = __pyx_state[1]; __pyx_result._quoter = __pyx_state[2]; __pyx_result._unsafe = __pyx_state[3] - * if len(__pyx_state) > 4 and hasattr(__pyx_result, '__dict__'): - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_6); - __Pyx_XDECREF(__pyx_t_7); - __Pyx_XDECREF(__pyx_t_8); - __Pyx_AddTraceback("yarl._quoting_c.__pyx_unpickle__Unquoter__set_state", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "carray.from_py":77 - * - * @cname("__Pyx_carray_from_py_uint8_t") - * cdef int __Pyx_carray_from_py_uint8_t(object o, base_type *v, Py_ssize_t length) except -1: # <<<<<<<<<<<<<< - * cdef Py_ssize_t i = length - * try: - */ - -static int __Pyx_carray_from_py_uint8_t(PyObject *__pyx_v_o, uint8_t *__pyx_v_v, Py_ssize_t __pyx_v_length) { - Py_ssize_t __pyx_v_i; - PyObject *__pyx_v_item = NULL; - int __pyx_r; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - Py_ssize_t __pyx_t_4; - int __pyx_t_5; - int __pyx_t_6; - PyObject *__pyx_t_7 = NULL; - Py_ssize_t __pyx_t_8; - PyObject *(*__pyx_t_9)(PyObject *); - PyObject *__pyx_t_10 = NULL; - uint8_t __pyx_t_11; - char const *__pyx_t_12; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__Pyx_carray_from_py_uint8_t", 0); - - /* "carray.from_py":78 - * @cname("__Pyx_carray_from_py_uint8_t") - * cdef int __Pyx_carray_from_py_uint8_t(object o, base_type *v, Py_ssize_t length) except -1: - * cdef Py_ssize_t i = length # <<<<<<<<<<<<<< - * try: - * i = len(o) - */ - __pyx_v_i = __pyx_v_length; - - /* "carray.from_py":79 - * cdef int __Pyx_carray_from_py_uint8_t(object o, base_type *v, Py_ssize_t length) except -1: - * cdef Py_ssize_t i = length - * try: # <<<<<<<<<<<<<< - * i = len(o) - * except (TypeError, OverflowError): - */ - { - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - __Pyx_ExceptionSave(&__pyx_t_1, &__pyx_t_2, &__pyx_t_3); - __Pyx_XGOTREF(__pyx_t_1); - __Pyx_XGOTREF(__pyx_t_2); - __Pyx_XGOTREF(__pyx_t_3); - /*try:*/ { - - /* "carray.from_py":80 - * cdef Py_ssize_t i = length - * try: - * i = len(o) # <<<<<<<<<<<<<< - * except (TypeError, OverflowError): - * pass - */ - __pyx_t_4 = PyObject_Length(__pyx_v_o); if (unlikely(__pyx_t_4 == ((Py_ssize_t)-1))) __PYX_ERR(1, 80, __pyx_L3_error) - __pyx_v_i = __pyx_t_4; - - /* "carray.from_py":79 - * cdef int __Pyx_carray_from_py_uint8_t(object o, base_type *v, Py_ssize_t length) except -1: - * cdef Py_ssize_t i = length - * try: # <<<<<<<<<<<<<< - * i = len(o) - * except (TypeError, OverflowError): - */ - } - __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - goto __pyx_L8_try_end; - __pyx_L3_error:; - - /* "carray.from_py":81 - * try: - * i = len(o) - * except (TypeError, OverflowError): # <<<<<<<<<<<<<< - * pass - * if i == length: - */ - __pyx_t_5 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_TypeError) || __Pyx_PyErr_ExceptionMatches(__pyx_builtin_OverflowError); - if (__pyx_t_5) { - __Pyx_ErrRestore(0,0,0); - goto __pyx_L4_exception_handled; - } - goto __pyx_L5_except_error; - __pyx_L5_except_error:; - - /* "carray.from_py":79 - * cdef int __Pyx_carray_from_py_uint8_t(object o, base_type *v, Py_ssize_t length) except -1: - * cdef Py_ssize_t i = length - * try: # <<<<<<<<<<<<<< - * i = len(o) - * except (TypeError, OverflowError): - */ - __Pyx_XGIVEREF(__pyx_t_1); - __Pyx_XGIVEREF(__pyx_t_2); - __Pyx_XGIVEREF(__pyx_t_3); - __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3); - goto __pyx_L1_error; - __pyx_L4_exception_handled:; - __Pyx_XGIVEREF(__pyx_t_1); - __Pyx_XGIVEREF(__pyx_t_2); - __Pyx_XGIVEREF(__pyx_t_3); - __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3); - __pyx_L8_try_end:; - } - - /* "carray.from_py":83 - * except (TypeError, OverflowError): - * pass - * if i == length: # <<<<<<<<<<<<<< - * for i, item in enumerate(o): - * if i >= length: - */ - __pyx_t_6 = ((__pyx_v_i == __pyx_v_length) != 0); - if (__pyx_t_6) { - - /* "carray.from_py":84 - * pass - * if i == length: - * for i, item in enumerate(o): # <<<<<<<<<<<<<< - * if i >= length: - * break - */ - __pyx_t_4 = 0; - if (likely(PyList_CheckExact(__pyx_v_o)) || PyTuple_CheckExact(__pyx_v_o)) { - __pyx_t_7 = __pyx_v_o; __Pyx_INCREF(__pyx_t_7); __pyx_t_8 = 0; - __pyx_t_9 = NULL; - } else { - __pyx_t_8 = -1; __pyx_t_7 = PyObject_GetIter(__pyx_v_o); if (unlikely(!__pyx_t_7)) __PYX_ERR(1, 84, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_7); - __pyx_t_9 = Py_TYPE(__pyx_t_7)->tp_iternext; if (unlikely(!__pyx_t_9)) __PYX_ERR(1, 84, __pyx_L1_error) - } - for (;;) { - if (likely(!__pyx_t_9)) { - if (likely(PyList_CheckExact(__pyx_t_7))) { - if (__pyx_t_8 >= PyList_GET_SIZE(__pyx_t_7)) break; - #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_10 = PyList_GET_ITEM(__pyx_t_7, __pyx_t_8); __Pyx_INCREF(__pyx_t_10); __pyx_t_8++; if (unlikely(0 < 0)) __PYX_ERR(1, 84, __pyx_L1_error) - #else - __pyx_t_10 = PySequence_ITEM(__pyx_t_7, __pyx_t_8); __pyx_t_8++; if (unlikely(!__pyx_t_10)) __PYX_ERR(1, 84, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_10); - #endif - } else { - if (__pyx_t_8 >= PyTuple_GET_SIZE(__pyx_t_7)) break; - #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_10 = PyTuple_GET_ITEM(__pyx_t_7, __pyx_t_8); __Pyx_INCREF(__pyx_t_10); __pyx_t_8++; if (unlikely(0 < 0)) __PYX_ERR(1, 84, __pyx_L1_error) - #else - __pyx_t_10 = PySequence_ITEM(__pyx_t_7, __pyx_t_8); __pyx_t_8++; if (unlikely(!__pyx_t_10)) __PYX_ERR(1, 84, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_10); - #endif - } - } else { - __pyx_t_10 = __pyx_t_9(__pyx_t_7); - if (unlikely(!__pyx_t_10)) { - PyObject* exc_type = PyErr_Occurred(); - if (exc_type) { - if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); - else __PYX_ERR(1, 84, __pyx_L1_error) - } - break; - } - __Pyx_GOTREF(__pyx_t_10); - } - __Pyx_XDECREF_SET(__pyx_v_item, __pyx_t_10); - __pyx_t_10 = 0; - __pyx_v_i = __pyx_t_4; - __pyx_t_4 = (__pyx_t_4 + 1); - - /* "carray.from_py":85 - * if i == length: - * for i, item in enumerate(o): - * if i >= length: # <<<<<<<<<<<<<< - * break - * v[i] = item - */ - __pyx_t_6 = ((__pyx_v_i >= __pyx_v_length) != 0); - if (__pyx_t_6) { - - /* "carray.from_py":86 - * for i, item in enumerate(o): - * if i >= length: - * break # <<<<<<<<<<<<<< - * v[i] = item - * else: - */ - goto __pyx_L11_break; - - /* "carray.from_py":85 - * if i == length: - * for i, item in enumerate(o): - * if i >= length: # <<<<<<<<<<<<<< - * break - * v[i] = item - */ - } - - /* "carray.from_py":87 - * if i >= length: - * break - * v[i] = item # <<<<<<<<<<<<<< - * else: - * i += 1 # convert index to length - */ - __pyx_t_11 = __Pyx_PyInt_As_uint8_t(__pyx_v_item); if (unlikely((__pyx_t_11 == ((uint8_t)-1)) && PyErr_Occurred())) __PYX_ERR(1, 87, __pyx_L1_error) - (__pyx_v_v[__pyx_v_i]) = __pyx_t_11; - - /* "carray.from_py":84 - * pass - * if i == length: - * for i, item in enumerate(o): # <<<<<<<<<<<<<< - * if i >= length: - * break - */ - } - /*else*/ { - - /* "carray.from_py":89 - * v[i] = item - * else: - * i += 1 # convert index to length # <<<<<<<<<<<<<< - * if i == length: - * return 0 - */ - __pyx_v_i = (__pyx_v_i + 1); - - /* "carray.from_py":90 - * else: - * i += 1 # convert index to length - * if i == length: # <<<<<<<<<<<<<< - * return 0 - * - */ - __pyx_t_6 = ((__pyx_v_i == __pyx_v_length) != 0); - if (__pyx_t_6) { - - /* "carray.from_py":91 - * i += 1 # convert index to length - * if i == length: - * return 0 # <<<<<<<<<<<<<< - * - * PyErr_Format( - */ - __pyx_r = 0; - __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - goto __pyx_L0; - - /* "carray.from_py":90 - * else: - * i += 1 # convert index to length - * if i == length: # <<<<<<<<<<<<<< - * return 0 - * - */ - } - } - - /* "carray.from_py":84 - * pass - * if i == length: - * for i, item in enumerate(o): # <<<<<<<<<<<<<< - * if i >= length: - * break - */ - __pyx_L11_break:; - __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - - /* "carray.from_py":83 - * except (TypeError, OverflowError): - * pass - * if i == length: # <<<<<<<<<<<<<< - * for i, item in enumerate(o): - * if i >= length: - */ - } - - /* "carray.from_py":96 - * IndexError, - * ("too many values found during array assignment, expected %zd" - * if i >= length else # <<<<<<<<<<<<<< - * "not enough values found during array assignment, expected %zd, got %zd"), - * length, i) - */ - if (((__pyx_v_i >= __pyx_v_length) != 0)) { - __pyx_t_12 = ((char const *)"too many values found during array assignment, expected %zd"); - } else { - __pyx_t_12 = ((char const *)"not enough values found during array assignment, expected %zd, got %zd"); - } - - /* "carray.from_py":93 - * return 0 - * - * PyErr_Format( # <<<<<<<<<<<<<< - * IndexError, - * ("too many values found during array assignment, expected %zd" - */ - __pyx_t_7 = PyErr_Format(__pyx_builtin_IndexError, __pyx_t_12, __pyx_v_length, __pyx_v_i); if (unlikely(!__pyx_t_7)) __PYX_ERR(1, 93, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_7); - __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - - /* "carray.from_py":77 - * - * @cname("__Pyx_carray_from_py_uint8_t") - * cdef int __Pyx_carray_from_py_uint8_t(object o, base_type *v, Py_ssize_t length) except -1: # <<<<<<<<<<<<<< - * cdef Py_ssize_t i = length - * try: - */ - - /* function exit code */ - __pyx_r = 0; - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_7); - __Pyx_XDECREF(__pyx_t_10); - __Pyx_AddTraceback("carray.from_py.__Pyx_carray_from_py_uint8_t", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = -1; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v_item); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} -static struct __pyx_vtabstruct_4yarl_10_quoting_c__Quoter __pyx_vtable_4yarl_10_quoting_c__Quoter; - -static PyObject *__pyx_tp_new_4yarl_10_quoting_c__Quoter(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { - struct __pyx_obj_4yarl_10_quoting_c__Quoter *p; - PyObject *o; - if (likely((t->tp_flags & Py_TPFLAGS_IS_ABSTRACT) == 0)) { - o = (*t->tp_alloc)(t, 0); - } else { - o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0); - } - if (unlikely(!o)) return 0; - p = ((struct __pyx_obj_4yarl_10_quoting_c__Quoter *)o); - p->__pyx_vtab = __pyx_vtabptr_4yarl_10_quoting_c__Quoter; - return o; -} - -static void __pyx_tp_dealloc_4yarl_10_quoting_c__Quoter(PyObject *o) { - #if CYTHON_USE_TP_FINALIZE - if (unlikely(PyType_HasFeature(Py_TYPE(o), Py_TPFLAGS_HAVE_FINALIZE) && Py_TYPE(o)->tp_finalize) && (!PyType_IS_GC(Py_TYPE(o)) || !_PyGC_FINALIZED(o))) { - if (PyObject_CallFinalizerFromDealloc(o)) return; - } - #endif - (*Py_TYPE(o)->tp_free)(o); -} - -static PyMethodDef __pyx_methods_4yarl_10_quoting_c__Quoter[] = { - {"__reduce_cython__", (PyCFunction)__pyx_pw_4yarl_10_quoting_c_7_Quoter_5__reduce_cython__, METH_NOARGS, 0}, - {"__setstate_cython__", (PyCFunction)__pyx_pw_4yarl_10_quoting_c_7_Quoter_7__setstate_cython__, METH_O, 0}, - {0, 0, 0, 0} -}; - -static PyTypeObject __pyx_type_4yarl_10_quoting_c__Quoter = { - PyVarObject_HEAD_INIT(0, 0) - "yarl._quoting_c._Quoter", /*tp_name*/ - sizeof(struct __pyx_obj_4yarl_10_quoting_c__Quoter), /*tp_basicsize*/ - 0, /*tp_itemsize*/ - __pyx_tp_dealloc_4yarl_10_quoting_c__Quoter, /*tp_dealloc*/ - #if PY_VERSION_HEX < 0x030800b4 - 0, /*tp_print*/ - #endif - #if PY_VERSION_HEX >= 0x030800b4 - 0, /*tp_vectorcall_offset*/ - #endif - 0, /*tp_getattr*/ - 0, /*tp_setattr*/ - #if PY_MAJOR_VERSION < 3 - 0, /*tp_compare*/ - #endif - #if PY_MAJOR_VERSION >= 3 - 0, /*tp_as_async*/ - #endif - 0, /*tp_repr*/ - 0, /*tp_as_number*/ - 0, /*tp_as_sequence*/ - 0, /*tp_as_mapping*/ - 0, /*tp_hash*/ - __pyx_pw_4yarl_10_quoting_c_7_Quoter_3__call__, /*tp_call*/ - 0, /*tp_str*/ - 0, /*tp_getattro*/ - 0, /*tp_setattro*/ - 0, /*tp_as_buffer*/ - Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE, /*tp_flags*/ - 0, /*tp_doc*/ - 0, /*tp_traverse*/ - 0, /*tp_clear*/ - 0, /*tp_richcompare*/ - 0, /*tp_weaklistoffset*/ - 0, /*tp_iter*/ - 0, /*tp_iternext*/ - __pyx_methods_4yarl_10_quoting_c__Quoter, /*tp_methods*/ - 0, /*tp_members*/ - 0, /*tp_getset*/ - 0, /*tp_base*/ - 0, /*tp_dict*/ - 0, /*tp_descr_get*/ - 0, /*tp_descr_set*/ - 0, /*tp_dictoffset*/ - __pyx_pw_4yarl_10_quoting_c_7_Quoter_1__init__, /*tp_init*/ - 0, /*tp_alloc*/ - __pyx_tp_new_4yarl_10_quoting_c__Quoter, /*tp_new*/ - 0, /*tp_free*/ - 0, /*tp_is_gc*/ - 0, /*tp_bases*/ - 0, /*tp_mro*/ - 0, /*tp_cache*/ - 0, /*tp_subclasses*/ - 0, /*tp_weaklist*/ - 0, /*tp_del*/ - 0, /*tp_version_tag*/ - #if PY_VERSION_HEX >= 0x030400a1 - 0, /*tp_finalize*/ - #endif - #if PY_VERSION_HEX >= 0x030800b1 - 0, /*tp_vectorcall*/ - #endif - #if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 - 0, /*tp_print*/ - #endif -}; -static struct __pyx_vtabstruct_4yarl_10_quoting_c__Unquoter __pyx_vtable_4yarl_10_quoting_c__Unquoter; - -static PyObject *__pyx_tp_new_4yarl_10_quoting_c__Unquoter(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { - struct __pyx_obj_4yarl_10_quoting_c__Unquoter *p; - PyObject *o; - if (likely((t->tp_flags & Py_TPFLAGS_IS_ABSTRACT) == 0)) { - o = (*t->tp_alloc)(t, 0); - } else { - o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0); - } - if (unlikely(!o)) return 0; - p = ((struct __pyx_obj_4yarl_10_quoting_c__Unquoter *)o); - p->__pyx_vtab = __pyx_vtabptr_4yarl_10_quoting_c__Unquoter; - p->_unsafe = ((PyObject*)Py_None); Py_INCREF(Py_None); - p->_quoter = ((struct __pyx_obj_4yarl_10_quoting_c__Quoter *)Py_None); Py_INCREF(Py_None); - p->_qs_quoter = ((struct __pyx_obj_4yarl_10_quoting_c__Quoter *)Py_None); Py_INCREF(Py_None); - return o; -} - -static void __pyx_tp_dealloc_4yarl_10_quoting_c__Unquoter(PyObject *o) { - struct __pyx_obj_4yarl_10_quoting_c__Unquoter *p = (struct __pyx_obj_4yarl_10_quoting_c__Unquoter *)o; - #if CYTHON_USE_TP_FINALIZE - if (unlikely(PyType_HasFeature(Py_TYPE(o), Py_TPFLAGS_HAVE_FINALIZE) && Py_TYPE(o)->tp_finalize) && !_PyGC_FINALIZED(o)) { - if (PyObject_CallFinalizerFromDealloc(o)) return; - } - #endif - PyObject_GC_UnTrack(o); - Py_CLEAR(p->_unsafe); - Py_CLEAR(p->_quoter); - Py_CLEAR(p->_qs_quoter); - (*Py_TYPE(o)->tp_free)(o); -} - -static int __pyx_tp_traverse_4yarl_10_quoting_c__Unquoter(PyObject *o, visitproc v, void *a) { - int e; - struct __pyx_obj_4yarl_10_quoting_c__Unquoter *p = (struct __pyx_obj_4yarl_10_quoting_c__Unquoter *)o; - if (p->_quoter) { - e = (*v)(((PyObject *)p->_quoter), a); if (e) return e; - } - if (p->_qs_quoter) { - e = (*v)(((PyObject *)p->_qs_quoter), a); if (e) return e; - } - return 0; -} - -static int __pyx_tp_clear_4yarl_10_quoting_c__Unquoter(PyObject *o) { - PyObject* tmp; - struct __pyx_obj_4yarl_10_quoting_c__Unquoter *p = (struct __pyx_obj_4yarl_10_quoting_c__Unquoter *)o; - tmp = ((PyObject*)p->_quoter); - p->_quoter = ((struct __pyx_obj_4yarl_10_quoting_c__Quoter *)Py_None); Py_INCREF(Py_None); - Py_XDECREF(tmp); - tmp = ((PyObject*)p->_qs_quoter); - p->_qs_quoter = ((struct __pyx_obj_4yarl_10_quoting_c__Quoter *)Py_None); Py_INCREF(Py_None); - Py_XDECREF(tmp); - return 0; -} - -static PyMethodDef __pyx_methods_4yarl_10_quoting_c__Unquoter[] = { - {"__reduce_cython__", (PyCFunction)__pyx_pw_4yarl_10_quoting_c_9_Unquoter_5__reduce_cython__, METH_NOARGS, 0}, - {"__setstate_cython__", (PyCFunction)__pyx_pw_4yarl_10_quoting_c_9_Unquoter_7__setstate_cython__, METH_O, 0}, - {0, 0, 0, 0} -}; - -static PyTypeObject __pyx_type_4yarl_10_quoting_c__Unquoter = { - PyVarObject_HEAD_INIT(0, 0) - "yarl._quoting_c._Unquoter", /*tp_name*/ - sizeof(struct __pyx_obj_4yarl_10_quoting_c__Unquoter), /*tp_basicsize*/ - 0, /*tp_itemsize*/ - __pyx_tp_dealloc_4yarl_10_quoting_c__Unquoter, /*tp_dealloc*/ - #if PY_VERSION_HEX < 0x030800b4 - 0, /*tp_print*/ - #endif - #if PY_VERSION_HEX >= 0x030800b4 - 0, /*tp_vectorcall_offset*/ - #endif - 0, /*tp_getattr*/ - 0, /*tp_setattr*/ - #if PY_MAJOR_VERSION < 3 - 0, /*tp_compare*/ - #endif - #if PY_MAJOR_VERSION >= 3 - 0, /*tp_as_async*/ - #endif - 0, /*tp_repr*/ - 0, /*tp_as_number*/ - 0, /*tp_as_sequence*/ - 0, /*tp_as_mapping*/ - 0, /*tp_hash*/ - __pyx_pw_4yarl_10_quoting_c_9_Unquoter_3__call__, /*tp_call*/ - 0, /*tp_str*/ - 0, /*tp_getattro*/ - 0, /*tp_setattro*/ - 0, /*tp_as_buffer*/ - Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ - 0, /*tp_doc*/ - __pyx_tp_traverse_4yarl_10_quoting_c__Unquoter, /*tp_traverse*/ - __pyx_tp_clear_4yarl_10_quoting_c__Unquoter, /*tp_clear*/ - 0, /*tp_richcompare*/ - 0, /*tp_weaklistoffset*/ - 0, /*tp_iter*/ - 0, /*tp_iternext*/ - __pyx_methods_4yarl_10_quoting_c__Unquoter, /*tp_methods*/ - 0, /*tp_members*/ - 0, /*tp_getset*/ - 0, /*tp_base*/ - 0, /*tp_dict*/ - 0, /*tp_descr_get*/ - 0, /*tp_descr_set*/ - 0, /*tp_dictoffset*/ - __pyx_pw_4yarl_10_quoting_c_9_Unquoter_1__init__, /*tp_init*/ - 0, /*tp_alloc*/ - __pyx_tp_new_4yarl_10_quoting_c__Unquoter, /*tp_new*/ - 0, /*tp_free*/ - 0, /*tp_is_gc*/ - 0, /*tp_bases*/ - 0, /*tp_mro*/ - 0, /*tp_cache*/ - 0, /*tp_subclasses*/ - 0, /*tp_weaklist*/ - 0, /*tp_del*/ - 0, /*tp_version_tag*/ - #if PY_VERSION_HEX >= 0x030400a1 - 0, /*tp_finalize*/ - #endif - #if PY_VERSION_HEX >= 0x030800b1 - 0, /*tp_vectorcall*/ - #endif - #if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 - 0, /*tp_print*/ - #endif -}; - -static PyMethodDef __pyx_methods[] = { - {0, 0, 0, 0} -}; - -#if PY_MAJOR_VERSION >= 3 -#if CYTHON_PEP489_MULTI_PHASE_INIT -static PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def); /*proto*/ -static int __pyx_pymod_exec__quoting_c(PyObject* module); /*proto*/ -static PyModuleDef_Slot __pyx_moduledef_slots[] = { - {Py_mod_create, (void*)__pyx_pymod_create}, - {Py_mod_exec, (void*)__pyx_pymod_exec__quoting_c}, - {0, NULL} -}; -#endif - -static struct PyModuleDef __pyx_moduledef = { - PyModuleDef_HEAD_INIT, - "_quoting_c", - 0, /* m_doc */ - #if CYTHON_PEP489_MULTI_PHASE_INIT - 0, /* m_size */ - #else - -1, /* m_size */ - #endif - __pyx_methods /* m_methods */, - #if CYTHON_PEP489_MULTI_PHASE_INIT - __pyx_moduledef_slots, /* m_slots */ - #else - NULL, /* m_reload */ - #endif - NULL, /* m_traverse */ - NULL, /* m_clear */ - NULL /* m_free */ -}; -#endif -#ifndef CYTHON_SMALL_CODE -#if defined(__clang__) - #define CYTHON_SMALL_CODE -#elif defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3)) - #define CYTHON_SMALL_CODE __attribute__((cold)) -#else - #define CYTHON_SMALL_CODE -#endif -#endif - -static __Pyx_StringTabEntry __pyx_string_tab[] = { - {&__pyx_kp_u_, __pyx_k_, sizeof(__pyx_k_), 0, 1, 0, 0}, - {&__pyx_kp_u_Argument_should_be_str, __pyx_k_Argument_should_be_str, sizeof(__pyx_k_Argument_should_be_str), 0, 1, 0, 0}, - {&__pyx_kp_s_Incompatible_checksums_s_vs_0x27, __pyx_k_Incompatible_checksums_s_vs_0x27, sizeof(__pyx_k_Incompatible_checksums_s_vs_0x27), 0, 0, 1, 0}, - {&__pyx_kp_s_Incompatible_checksums_s_vs_0xe9, __pyx_k_Incompatible_checksums_s_vs_0xe9, sizeof(__pyx_k_Incompatible_checksums_s_vs_0xe9), 0, 0, 1, 0}, - {&__pyx_n_s_IndexError, __pyx_k_IndexError, sizeof(__pyx_k_IndexError), 0, 0, 1, 1}, - {&__pyx_kp_u_Only_safe_symbols_with_ORD_128_a, __pyx_k_Only_safe_symbols_with_ORD_128_a, sizeof(__pyx_k_Only_safe_symbols_with_ORD_128_a), 0, 1, 0, 0}, - {&__pyx_n_s_OverflowError, __pyx_k_OverflowError, sizeof(__pyx_k_OverflowError), 0, 0, 1, 1}, - {&__pyx_n_s_PickleError, __pyx_k_PickleError, sizeof(__pyx_k_PickleError), 0, 0, 1, 1}, - {&__pyx_n_s_Quoter, __pyx_k_Quoter, sizeof(__pyx_k_Quoter), 0, 0, 1, 1}, - {&__pyx_n_s_TypeError, __pyx_k_TypeError, sizeof(__pyx_k_TypeError), 0, 0, 1, 1}, - {&__pyx_n_s_UnicodeDecodeError, __pyx_k_UnicodeDecodeError, sizeof(__pyx_k_UnicodeDecodeError), 0, 0, 1, 1}, - {&__pyx_n_s_Unquoter, __pyx_k_Unquoter, sizeof(__pyx_k_Unquoter), 0, 0, 1, 1}, - {&__pyx_n_s_ValueError, __pyx_k_ValueError, sizeof(__pyx_k_ValueError), 0, 0, 1, 1}, - {&__pyx_kp_u__10, __pyx_k__10, sizeof(__pyx_k__10), 0, 1, 0, 0}, - {&__pyx_kp_u__11, __pyx_k__11, sizeof(__pyx_k__11), 0, 1, 0, 0}, - {&__pyx_kp_u__12, __pyx_k__12, sizeof(__pyx_k__12), 0, 1, 0, 0}, - {&__pyx_kp_u__13, __pyx_k__13, sizeof(__pyx_k__13), 0, 1, 0, 0}, - {&__pyx_kp_u__4, __pyx_k__4, sizeof(__pyx_k__4), 0, 1, 0, 0}, - {&__pyx_kp_u__5, __pyx_k__5, sizeof(__pyx_k__5), 0, 1, 0, 0}, - {&__pyx_kp_u__6, __pyx_k__6, sizeof(__pyx_k__6), 0, 1, 0, 0}, - {&__pyx_kp_u__7, __pyx_k__7, sizeof(__pyx_k__7), 0, 1, 0, 0}, - {&__pyx_kp_u__9, __pyx_k__9, sizeof(__pyx_k__9), 0, 1, 0, 0}, - {&__pyx_n_s_ascii_letters, __pyx_k_ascii_letters, sizeof(__pyx_k_ascii_letters), 0, 0, 1, 1}, - {&__pyx_n_s_chr, __pyx_k_chr, sizeof(__pyx_k_chr), 0, 0, 1, 1}, - {&__pyx_n_s_cline_in_traceback, __pyx_k_cline_in_traceback, sizeof(__pyx_k_cline_in_traceback), 0, 0, 1, 1}, - {&__pyx_n_s_dict, __pyx_k_dict, sizeof(__pyx_k_dict), 0, 0, 1, 1}, - {&__pyx_n_s_digits, __pyx_k_digits, sizeof(__pyx_k_digits), 0, 0, 1, 1}, - {&__pyx_n_s_enumerate, __pyx_k_enumerate, sizeof(__pyx_k_enumerate), 0, 0, 1, 1}, - {&__pyx_n_s_getstate, __pyx_k_getstate, sizeof(__pyx_k_getstate), 0, 0, 1, 1}, - {&__pyx_n_s_hex, __pyx_k_hex, sizeof(__pyx_k_hex), 0, 0, 1, 1}, - {&__pyx_n_s_i, __pyx_k_i, sizeof(__pyx_k_i), 0, 0, 1, 1}, - {&__pyx_n_s_import, __pyx_k_import, sizeof(__pyx_k_import), 0, 0, 1, 1}, - {&__pyx_n_s_main, __pyx_k_main, sizeof(__pyx_k_main), 0, 0, 1, 1}, - {&__pyx_n_s_name, __pyx_k_name, sizeof(__pyx_k_name), 0, 0, 1, 1}, - {&__pyx_n_s_new, __pyx_k_new, sizeof(__pyx_k_new), 0, 0, 1, 1}, - {&__pyx_n_s_pickle, __pyx_k_pickle, sizeof(__pyx_k_pickle), 0, 0, 1, 1}, - {&__pyx_n_s_protected, __pyx_k_protected, sizeof(__pyx_k_protected), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_PickleError, __pyx_k_pyx_PickleError, sizeof(__pyx_k_pyx_PickleError), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_checksum, __pyx_k_pyx_checksum, sizeof(__pyx_k_pyx_checksum), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_result, __pyx_k_pyx_result, sizeof(__pyx_k_pyx_result), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_state, __pyx_k_pyx_state, sizeof(__pyx_k_pyx_state), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_type, __pyx_k_pyx_type, sizeof(__pyx_k_pyx_type), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_unpickle__Quoter, __pyx_k_pyx_unpickle__Quoter, sizeof(__pyx_k_pyx_unpickle__Quoter), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_unpickle__Unquoter, __pyx_k_pyx_unpickle__Unquoter, sizeof(__pyx_k_pyx_unpickle__Unquoter), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_vtable, __pyx_k_pyx_vtable, sizeof(__pyx_k_pyx_vtable), 0, 0, 1, 1}, - {&__pyx_n_s_qs, __pyx_k_qs, sizeof(__pyx_k_qs), 0, 0, 1, 1}, - {&__pyx_n_s_range, __pyx_k_range, sizeof(__pyx_k_range), 0, 0, 1, 1}, - {&__pyx_n_s_reduce, __pyx_k_reduce, sizeof(__pyx_k_reduce), 0, 0, 1, 1}, - {&__pyx_n_s_reduce_cython, __pyx_k_reduce_cython, sizeof(__pyx_k_reduce_cython), 0, 0, 1, 1}, - {&__pyx_n_s_reduce_ex, __pyx_k_reduce_ex, sizeof(__pyx_k_reduce_ex), 0, 0, 1, 1}, - {&__pyx_n_s_requote, __pyx_k_requote, sizeof(__pyx_k_requote), 0, 0, 1, 1}, - {&__pyx_n_s_safe, __pyx_k_safe, sizeof(__pyx_k_safe), 0, 0, 1, 1}, - {&__pyx_n_s_setstate, __pyx_k_setstate, sizeof(__pyx_k_setstate), 0, 0, 1, 1}, - {&__pyx_n_s_setstate_cython, __pyx_k_setstate_cython, sizeof(__pyx_k_setstate_cython), 0, 0, 1, 1}, - {&__pyx_n_s_string, __pyx_k_string, sizeof(__pyx_k_string), 0, 0, 1, 1}, - {&__pyx_kp_s_stringsource, __pyx_k_stringsource, sizeof(__pyx_k_stringsource), 0, 0, 1, 0}, - {&__pyx_n_s_test, __pyx_k_test, sizeof(__pyx_k_test), 0, 0, 1, 1}, - {&__pyx_n_s_unsafe, __pyx_k_unsafe, sizeof(__pyx_k_unsafe), 0, 0, 1, 1}, - {&__pyx_n_s_update, __pyx_k_update, sizeof(__pyx_k_update), 0, 0, 1, 1}, - {&__pyx_n_s_upper, __pyx_k_upper, sizeof(__pyx_k_upper), 0, 0, 1, 1}, - {&__pyx_n_s_val, __pyx_k_val, sizeof(__pyx_k_val), 0, 0, 1, 1}, - {&__pyx_n_s_yarl__quoting_c, __pyx_k_yarl__quoting_c, sizeof(__pyx_k_yarl__quoting_c), 0, 0, 1, 1}, - {0, 0, 0, 0, 0, 0, 0} -}; -static CYTHON_SMALL_CODE int __Pyx_InitCachedBuiltins(void) { - __pyx_builtin_range = __Pyx_GetBuiltinName(__pyx_n_s_range); if (!__pyx_builtin_range) __PYX_ERR(0, 70, __pyx_L1_error) - __pyx_builtin_chr = __Pyx_GetBuiltinName(__pyx_n_s_chr); if (!__pyx_builtin_chr) __PYX_ERR(0, 71, __pyx_L1_error) - __pyx_builtin_ValueError = __Pyx_GetBuiltinName(__pyx_n_s_ValueError); if (!__pyx_builtin_ValueError) __PYX_ERR(0, 194, __pyx_L1_error) - __pyx_builtin_TypeError = __Pyx_GetBuiltinName(__pyx_n_s_TypeError); if (!__pyx_builtin_TypeError) __PYX_ERR(0, 213, __pyx_L1_error) - __pyx_builtin_UnicodeDecodeError = __Pyx_GetBuiltinName(__pyx_n_s_UnicodeDecodeError); if (!__pyx_builtin_UnicodeDecodeError) __PYX_ERR(0, 320, __pyx_L1_error) - __pyx_builtin_hex = __Pyx_GetBuiltinName(__pyx_n_s_hex); if (!__pyx_builtin_hex) __PYX_ERR(0, 361, __pyx_L1_error) - __pyx_builtin_OverflowError = __Pyx_GetBuiltinName(__pyx_n_s_OverflowError); if (!__pyx_builtin_OverflowError) __PYX_ERR(1, 81, __pyx_L1_error) - __pyx_builtin_enumerate = __Pyx_GetBuiltinName(__pyx_n_s_enumerate); if (!__pyx_builtin_enumerate) __PYX_ERR(1, 84, __pyx_L1_error) - __pyx_builtin_IndexError = __Pyx_GetBuiltinName(__pyx_n_s_IndexError); if (!__pyx_builtin_IndexError) __PYX_ERR(1, 94, __pyx_L1_error) - return 0; - __pyx_L1_error:; - return -1; -} - -static CYTHON_SMALL_CODE int __Pyx_InitCachedConstants(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0); - - /* "yarl/_quoting_c.pyx":194 - * for ch in safe: - * if ord(ch) > 127: - * raise ValueError("Only safe symbols with ORD < 128 are allowed") # <<<<<<<<<<<<<< - * set_bit(self._safe_table, ch) - * - */ - __pyx_tuple__2 = PyTuple_Pack(1, __pyx_kp_u_Only_safe_symbols_with_ORD_128_a); if (unlikely(!__pyx_tuple__2)) __PYX_ERR(0, 194, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__2); - __Pyx_GIVEREF(__pyx_tuple__2); - - /* "yarl/_quoting_c.pyx":213 - * val = str(val) - * else: - * raise TypeError("Argument should be str") # <<<<<<<<<<<<<< - * _init_writer(&writer) - * try: - */ - __pyx_tuple__3 = PyTuple_Pack(1, __pyx_kp_u_Argument_should_be_str); if (unlikely(!__pyx_tuple__3)) __PYX_ERR(0, 213, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__3); - __Pyx_GIVEREF(__pyx_tuple__3); - - /* "yarl/_quoting_c.pyx":361 - * if ch in self._unsafe: - * ret.append('%') - * h = hex(ord(ch)).upper()[2:] # <<<<<<<<<<<<<< - * for ch in h: - * ret.append(ch) - */ - __pyx_slice__8 = PySlice_New(__pyx_int_2, Py_None, Py_None); if (unlikely(!__pyx_slice__8)) __PYX_ERR(0, 361, __pyx_L1_error) - __Pyx_GOTREF(__pyx_slice__8); - __Pyx_GIVEREF(__pyx_slice__8); - - /* "(tree fragment)":1 - * def __pyx_unpickle__Quoter(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< - * cdef object __pyx_PickleError - * cdef object __pyx_result - */ - __pyx_tuple__14 = PyTuple_Pack(5, __pyx_n_s_pyx_type, __pyx_n_s_pyx_checksum, __pyx_n_s_pyx_state, __pyx_n_s_pyx_PickleError, __pyx_n_s_pyx_result); if (unlikely(!__pyx_tuple__14)) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__14); - __Pyx_GIVEREF(__pyx_tuple__14); - __pyx_codeobj__15 = (PyObject*)__Pyx_PyCode_New(3, 0, 5, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__14, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_pyx_unpickle__Quoter, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__15)) __PYX_ERR(1, 1, __pyx_L1_error) - __pyx_tuple__16 = PyTuple_Pack(5, __pyx_n_s_pyx_type, __pyx_n_s_pyx_checksum, __pyx_n_s_pyx_state, __pyx_n_s_pyx_PickleError, __pyx_n_s_pyx_result); if (unlikely(!__pyx_tuple__16)) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__16); - __Pyx_GIVEREF(__pyx_tuple__16); - __pyx_codeobj__17 = (PyObject*)__Pyx_PyCode_New(3, 0, 5, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__16, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_pyx_unpickle__Unquoter, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__17)) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_RefNannyFinishContext(); - return 0; - __pyx_L1_error:; - __Pyx_RefNannyFinishContext(); - return -1; -} - -static CYTHON_SMALL_CODE int __Pyx_InitGlobals(void) { - if (__Pyx_InitStrings(__pyx_string_tab) < 0) __PYX_ERR(0, 1, __pyx_L1_error); - __pyx_int_2 = PyInt_FromLong(2); if (unlikely(!__pyx_int_2)) __PYX_ERR(0, 1, __pyx_L1_error) - __pyx_int_41310077 = PyInt_FromLong(41310077L); if (unlikely(!__pyx_int_41310077)) __PYX_ERR(0, 1, __pyx_L1_error) - __pyx_int_244432181 = PyInt_FromLong(244432181L); if (unlikely(!__pyx_int_244432181)) __PYX_ERR(0, 1, __pyx_L1_error) - return 0; - __pyx_L1_error:; - return -1; -} - -static CYTHON_SMALL_CODE int __Pyx_modinit_global_init_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_variable_export_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_function_export_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_type_init_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_type_import_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_variable_import_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_function_import_code(void); /*proto*/ - -static int __Pyx_modinit_global_init_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_global_init_code", 0); - /*--- Global init code ---*/ - __pyx_v_4yarl_10_quoting_c_GEN_DELIMS = ((PyObject*)Py_None); Py_INCREF(Py_None); - __pyx_v_4yarl_10_quoting_c_SUB_DELIMS_WITHOUT_QS = ((PyObject*)Py_None); Py_INCREF(Py_None); - __pyx_v_4yarl_10_quoting_c_SUB_DELIMS = ((PyObject*)Py_None); Py_INCREF(Py_None); - __pyx_v_4yarl_10_quoting_c_RESERVED = ((PyObject*)Py_None); Py_INCREF(Py_None); - __pyx_v_4yarl_10_quoting_c_UNRESERVED = ((PyObject*)Py_None); Py_INCREF(Py_None); - __pyx_v_4yarl_10_quoting_c_ALLOWED = ((PyObject*)Py_None); Py_INCREF(Py_None); - __pyx_v_4yarl_10_quoting_c_QS = ((PyObject*)Py_None); Py_INCREF(Py_None); - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_variable_export_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_variable_export_code", 0); - /*--- Variable export code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_function_export_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_function_export_code", 0); - /*--- Function export code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_type_init_code(void) { - __Pyx_RefNannyDeclarations - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__Pyx_modinit_type_init_code", 0); - /*--- Type init code ---*/ - __pyx_vtabptr_4yarl_10_quoting_c__Quoter = &__pyx_vtable_4yarl_10_quoting_c__Quoter; - __pyx_vtable_4yarl_10_quoting_c__Quoter._do_quote = (PyObject *(*)(struct __pyx_obj_4yarl_10_quoting_c__Quoter *, PyObject *, struct __pyx_t_4yarl_10_quoting_c_Writer *))__pyx_f_4yarl_10_quoting_c_7_Quoter__do_quote; - __pyx_vtable_4yarl_10_quoting_c__Quoter._write = (int (*)(struct __pyx_obj_4yarl_10_quoting_c__Quoter *, struct __pyx_t_4yarl_10_quoting_c_Writer *, Py_UCS4))__pyx_f_4yarl_10_quoting_c_7_Quoter__write; - if (PyType_Ready(&__pyx_type_4yarl_10_quoting_c__Quoter) < 0) __PYX_ERR(0, 169, __pyx_L1_error) - #if PY_VERSION_HEX < 0x030800B1 - __pyx_type_4yarl_10_quoting_c__Quoter.tp_print = 0; - #endif - if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type_4yarl_10_quoting_c__Quoter.tp_dictoffset && __pyx_type_4yarl_10_quoting_c__Quoter.tp_getattro == PyObject_GenericGetAttr)) { - __pyx_type_4yarl_10_quoting_c__Quoter.tp_getattro = __Pyx_PyObject_GenericGetAttr; - } - if (__Pyx_SetVtable(__pyx_type_4yarl_10_quoting_c__Quoter.tp_dict, __pyx_vtabptr_4yarl_10_quoting_c__Quoter) < 0) __PYX_ERR(0, 169, __pyx_L1_error) - if (PyObject_SetAttr(__pyx_m, __pyx_n_s_Quoter, (PyObject *)&__pyx_type_4yarl_10_quoting_c__Quoter) < 0) __PYX_ERR(0, 169, __pyx_L1_error) - if (__Pyx_setup_reduce((PyObject*)&__pyx_type_4yarl_10_quoting_c__Quoter) < 0) __PYX_ERR(0, 169, __pyx_L1_error) - __pyx_ptype_4yarl_10_quoting_c__Quoter = &__pyx_type_4yarl_10_quoting_c__Quoter; - __pyx_vtabptr_4yarl_10_quoting_c__Unquoter = &__pyx_vtable_4yarl_10_quoting_c__Unquoter; - __pyx_vtable_4yarl_10_quoting_c__Unquoter._do_unquote = (PyObject *(*)(struct __pyx_obj_4yarl_10_quoting_c__Unquoter *, PyObject *))__pyx_f_4yarl_10_quoting_c_9_Unquoter__do_unquote; - if (PyType_Ready(&__pyx_type_4yarl_10_quoting_c__Unquoter) < 0) __PYX_ERR(0, 271, __pyx_L1_error) - #if PY_VERSION_HEX < 0x030800B1 - __pyx_type_4yarl_10_quoting_c__Unquoter.tp_print = 0; - #endif - if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type_4yarl_10_quoting_c__Unquoter.tp_dictoffset && __pyx_type_4yarl_10_quoting_c__Unquoter.tp_getattro == PyObject_GenericGetAttr)) { - __pyx_type_4yarl_10_quoting_c__Unquoter.tp_getattro = __Pyx_PyObject_GenericGetAttr; - } - if (__Pyx_SetVtable(__pyx_type_4yarl_10_quoting_c__Unquoter.tp_dict, __pyx_vtabptr_4yarl_10_quoting_c__Unquoter) < 0) __PYX_ERR(0, 271, __pyx_L1_error) - if (PyObject_SetAttr(__pyx_m, __pyx_n_s_Unquoter, (PyObject *)&__pyx_type_4yarl_10_quoting_c__Unquoter) < 0) __PYX_ERR(0, 271, __pyx_L1_error) - if (__Pyx_setup_reduce((PyObject*)&__pyx_type_4yarl_10_quoting_c__Unquoter) < 0) __PYX_ERR(0, 271, __pyx_L1_error) - __pyx_ptype_4yarl_10_quoting_c__Unquoter = &__pyx_type_4yarl_10_quoting_c__Unquoter; - __Pyx_RefNannyFinishContext(); - return 0; - __pyx_L1_error:; - __Pyx_RefNannyFinishContext(); - return -1; -} - -static int __Pyx_modinit_type_import_code(void) { - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__Pyx_modinit_type_import_code", 0); - /*--- Type import code ---*/ - __pyx_t_1 = PyImport_ImportModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 9, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_ptype_7cpython_4type_type = __Pyx_ImportType(__pyx_t_1, __Pyx_BUILTIN_MODULE_NAME, "type", - #if defined(PYPY_VERSION_NUM) && PYPY_VERSION_NUM < 0x050B0000 - sizeof(PyTypeObject), - #else - sizeof(PyHeapTypeObject), - #endif - __Pyx_ImportType_CheckSize_Warn); - if (!__pyx_ptype_7cpython_4type_type) __PYX_ERR(2, 9, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_RefNannyFinishContext(); - return 0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_RefNannyFinishContext(); - return -1; -} - -static int __Pyx_modinit_variable_import_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_variable_import_code", 0); - /*--- Variable import code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_function_import_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_function_import_code", 0); - /*--- Function import code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - - -#ifndef CYTHON_NO_PYINIT_EXPORT -#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC -#elif PY_MAJOR_VERSION < 3 -#ifdef __cplusplus -#define __Pyx_PyMODINIT_FUNC extern "C" void -#else -#define __Pyx_PyMODINIT_FUNC void -#endif -#else -#ifdef __cplusplus -#define __Pyx_PyMODINIT_FUNC extern "C" PyObject * -#else -#define __Pyx_PyMODINIT_FUNC PyObject * -#endif -#endif - - -#if PY_MAJOR_VERSION < 3 -__Pyx_PyMODINIT_FUNC init_quoting_c(void) CYTHON_SMALL_CODE; /*proto*/ -__Pyx_PyMODINIT_FUNC init_quoting_c(void) -#else -__Pyx_PyMODINIT_FUNC PyInit__quoting_c(void) CYTHON_SMALL_CODE; /*proto*/ -__Pyx_PyMODINIT_FUNC PyInit__quoting_c(void) -#if CYTHON_PEP489_MULTI_PHASE_INIT -{ - return PyModuleDef_Init(&__pyx_moduledef); -} -static CYTHON_SMALL_CODE int __Pyx_check_single_interpreter(void) { - #if PY_VERSION_HEX >= 0x030700A1 - static PY_INT64_T main_interpreter_id = -1; - PY_INT64_T current_id = PyInterpreterState_GetID(PyThreadState_Get()->interp); - if (main_interpreter_id == -1) { - main_interpreter_id = current_id; - return (unlikely(current_id == -1)) ? -1 : 0; - } else if (unlikely(main_interpreter_id != current_id)) - #else - static PyInterpreterState *main_interpreter = NULL; - PyInterpreterState *current_interpreter = PyThreadState_Get()->interp; - if (!main_interpreter) { - main_interpreter = current_interpreter; - } else if (unlikely(main_interpreter != current_interpreter)) - #endif - { - PyErr_SetString( - PyExc_ImportError, - "Interpreter change detected - this module can only be loaded into one interpreter per process."); - return -1; - } - return 0; -} -static CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *moddict, const char* from_name, const char* to_name, int allow_none) { - PyObject *value = PyObject_GetAttrString(spec, from_name); - int result = 0; - if (likely(value)) { - if (allow_none || value != Py_None) { - result = PyDict_SetItemString(moddict, to_name, value); - } - Py_DECREF(value); - } else if (PyErr_ExceptionMatches(PyExc_AttributeError)) { - PyErr_Clear(); - } else { - result = -1; - } - return result; -} -static CYTHON_SMALL_CODE PyObject* __pyx_pymod_create(PyObject *spec, CYTHON_UNUSED PyModuleDef *def) { - PyObject *module = NULL, *moddict, *modname; - if (__Pyx_check_single_interpreter()) - return NULL; - if (__pyx_m) - return __Pyx_NewRef(__pyx_m); - modname = PyObject_GetAttrString(spec, "name"); - if (unlikely(!modname)) goto bad; - module = PyModule_NewObject(modname); - Py_DECREF(modname); - if (unlikely(!module)) goto bad; - moddict = PyModule_GetDict(module); - if (unlikely(!moddict)) goto bad; - if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "loader", "__loader__", 1) < 0)) goto bad; - if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "origin", "__file__", 1) < 0)) goto bad; - if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "parent", "__package__", 1) < 0)) goto bad; - if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "submodule_search_locations", "__path__", 0) < 0)) goto bad; - return module; -bad: - Py_XDECREF(module); - return NULL; -} - - -static CYTHON_SMALL_CODE int __pyx_pymod_exec__quoting_c(PyObject *__pyx_pyinit_module) -#endif -#endif -{ - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - long __pyx_t_4; - int __pyx_t_5; - int __pyx_t_6; - uint64_t __pyx_t_7; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannyDeclarations - #if CYTHON_PEP489_MULTI_PHASE_INIT - if (__pyx_m) { - if (__pyx_m == __pyx_pyinit_module) return 0; - PyErr_SetString(PyExc_RuntimeError, "Module '_quoting_c' has already been imported. Re-initialisation is not supported."); - return -1; - } - #elif PY_MAJOR_VERSION >= 3 - if (__pyx_m) return __Pyx_NewRef(__pyx_m); - #endif - #if CYTHON_REFNANNY -__Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny"); -if (!__Pyx_RefNanny) { - PyErr_Clear(); - __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny"); - if (!__Pyx_RefNanny) - Py_FatalError("failed to import 'refnanny' module"); -} -#endif - __Pyx_RefNannySetupContext("__Pyx_PyMODINIT_FUNC PyInit__quoting_c(void)", 0); - if (__Pyx_check_binary_version() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #ifdef __Pxy_PyFrame_Initialize_Offsets - __Pxy_PyFrame_Initialize_Offsets(); - #endif - __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) __PYX_ERR(0, 1, __pyx_L1_error) - __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) __PYX_ERR(0, 1, __pyx_L1_error) - __pyx_empty_unicode = PyUnicode_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_unicode)) __PYX_ERR(0, 1, __pyx_L1_error) - #ifdef __Pyx_CyFunction_USED - if (__pyx_CyFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_FusedFunction_USED - if (__pyx_FusedFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_Coroutine_USED - if (__pyx_Coroutine_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_Generator_USED - if (__pyx_Generator_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_AsyncGen_USED - if (__pyx_AsyncGen_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_StopAsyncIteration_USED - if (__pyx_StopAsyncIteration_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - /*--- Library function declarations ---*/ - /*--- Threads initialization code ---*/ - #if defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS - #ifdef WITH_THREAD /* Python build with threading support? */ - PyEval_InitThreads(); - #endif - #endif - /*--- Module creation code ---*/ - #if CYTHON_PEP489_MULTI_PHASE_INIT - __pyx_m = __pyx_pyinit_module; - Py_INCREF(__pyx_m); - #else - #if PY_MAJOR_VERSION < 3 - __pyx_m = Py_InitModule4("_quoting_c", __pyx_methods, 0, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m); - #else - __pyx_m = PyModule_Create(&__pyx_moduledef); - #endif - if (unlikely(!__pyx_m)) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - __pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(0, 1, __pyx_L1_error) - Py_INCREF(__pyx_d); - __pyx_b = PyImport_AddModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(0, 1, __pyx_L1_error) - Py_INCREF(__pyx_b); - __pyx_cython_runtime = PyImport_AddModule((char *) "cython_runtime"); if (unlikely(!__pyx_cython_runtime)) __PYX_ERR(0, 1, __pyx_L1_error) - Py_INCREF(__pyx_cython_runtime); - if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(0, 1, __pyx_L1_error); - /*--- Initialize various global constants etc. ---*/ - if (__Pyx_InitGlobals() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #if PY_MAJOR_VERSION < 3 && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT) - if (__Pyx_init_sys_getdefaultencoding_params() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - if (__pyx_module_is_main_yarl___quoting_c) { - if (PyObject_SetAttr(__pyx_m, __pyx_n_s_name, __pyx_n_s_main) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - } - #if PY_MAJOR_VERSION >= 3 - { - PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) __PYX_ERR(0, 1, __pyx_L1_error) - if (!PyDict_GetItemString(modules, "yarl._quoting_c")) { - if (unlikely(PyDict_SetItemString(modules, "yarl._quoting_c", __pyx_m) < 0)) __PYX_ERR(0, 1, __pyx_L1_error) - } - } - #endif - /*--- Builtin init code ---*/ - if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - /*--- Constants init code ---*/ - if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - /*--- Global type/function init code ---*/ - (void)__Pyx_modinit_global_init_code(); - (void)__Pyx_modinit_variable_export_code(); - (void)__Pyx_modinit_function_export_code(); - if (unlikely(__Pyx_modinit_type_init_code() < 0)) __PYX_ERR(0, 1, __pyx_L1_error) - if (unlikely(__Pyx_modinit_type_import_code() < 0)) __PYX_ERR(0, 1, __pyx_L1_error) - (void)__Pyx_modinit_variable_import_code(); - (void)__Pyx_modinit_function_import_code(); - /*--- Execution code ---*/ - #if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) - if (__Pyx_patch_abc() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - - /* "yarl/_quoting_c.pyx":10 - * from cpython.unicode cimport PyUnicode_DecodeASCII, PyUnicode_DecodeUTF8Stateful - * - * from string import ascii_letters, digits # <<<<<<<<<<<<<< - * - * cdef str GEN_DELIMS = ":/?#[]@" - */ - __pyx_t_1 = PyList_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 10, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF(__pyx_n_s_ascii_letters); - __Pyx_GIVEREF(__pyx_n_s_ascii_letters); - PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_ascii_letters); - __Pyx_INCREF(__pyx_n_s_digits); - __Pyx_GIVEREF(__pyx_n_s_digits); - PyList_SET_ITEM(__pyx_t_1, 1, __pyx_n_s_digits); - __pyx_t_2 = __Pyx_Import(__pyx_n_s_string, __pyx_t_1, 0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 10, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_ascii_letters); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 10, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_ascii_letters, __pyx_t_1) < 0) __PYX_ERR(0, 10, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_digits); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 10, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_digits, __pyx_t_1) < 0) __PYX_ERR(0, 10, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "yarl/_quoting_c.pyx":12 - * from string import ascii_letters, digits - * - * cdef str GEN_DELIMS = ":/?#[]@" # <<<<<<<<<<<<<< - * cdef str SUB_DELIMS_WITHOUT_QS = "!$'()*," - * cdef str SUB_DELIMS = SUB_DELIMS_WITHOUT_QS + '+?=;' - */ - __Pyx_INCREF(__pyx_kp_u__9); - __Pyx_XGOTREF(__pyx_v_4yarl_10_quoting_c_GEN_DELIMS); - __Pyx_DECREF_SET(__pyx_v_4yarl_10_quoting_c_GEN_DELIMS, __pyx_kp_u__9); - __Pyx_GIVEREF(__pyx_kp_u__9); - - /* "yarl/_quoting_c.pyx":13 - * - * cdef str GEN_DELIMS = ":/?#[]@" - * cdef str SUB_DELIMS_WITHOUT_QS = "!$'()*," # <<<<<<<<<<<<<< - * cdef str SUB_DELIMS = SUB_DELIMS_WITHOUT_QS + '+?=;' - * cdef str RESERVED = GEN_DELIMS + SUB_DELIMS - */ - __Pyx_INCREF(__pyx_kp_u__10); - __Pyx_XGOTREF(__pyx_v_4yarl_10_quoting_c_SUB_DELIMS_WITHOUT_QS); - __Pyx_DECREF_SET(__pyx_v_4yarl_10_quoting_c_SUB_DELIMS_WITHOUT_QS, __pyx_kp_u__10); - __Pyx_GIVEREF(__pyx_kp_u__10); - - /* "yarl/_quoting_c.pyx":14 - * cdef str GEN_DELIMS = ":/?#[]@" - * cdef str SUB_DELIMS_WITHOUT_QS = "!$'()*," - * cdef str SUB_DELIMS = SUB_DELIMS_WITHOUT_QS + '+?=;' # <<<<<<<<<<<<<< - * cdef str RESERVED = GEN_DELIMS + SUB_DELIMS - * cdef str UNRESERVED = ascii_letters + digits + '-._~' - */ - __pyx_t_2 = __Pyx_PyUnicode_ConcatSafe(__pyx_v_4yarl_10_quoting_c_SUB_DELIMS_WITHOUT_QS, __pyx_kp_u__11); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 14, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_XGOTREF(__pyx_v_4yarl_10_quoting_c_SUB_DELIMS); - __Pyx_DECREF_SET(__pyx_v_4yarl_10_quoting_c_SUB_DELIMS, ((PyObject*)__pyx_t_2)); - __Pyx_GIVEREF(__pyx_t_2); - __pyx_t_2 = 0; - - /* "yarl/_quoting_c.pyx":15 - * cdef str SUB_DELIMS_WITHOUT_QS = "!$'()*," - * cdef str SUB_DELIMS = SUB_DELIMS_WITHOUT_QS + '+?=;' - * cdef str RESERVED = GEN_DELIMS + SUB_DELIMS # <<<<<<<<<<<<<< - * cdef str UNRESERVED = ascii_letters + digits + '-._~' - * cdef str ALLOWED = UNRESERVED + SUB_DELIMS_WITHOUT_QS - */ - __pyx_t_2 = __Pyx_PyUnicode_ConcatSafe(__pyx_v_4yarl_10_quoting_c_GEN_DELIMS, __pyx_v_4yarl_10_quoting_c_SUB_DELIMS); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_XGOTREF(__pyx_v_4yarl_10_quoting_c_RESERVED); - __Pyx_DECREF_SET(__pyx_v_4yarl_10_quoting_c_RESERVED, ((PyObject*)__pyx_t_2)); - __Pyx_GIVEREF(__pyx_t_2); - __pyx_t_2 = 0; - - /* "yarl/_quoting_c.pyx":16 - * cdef str SUB_DELIMS = SUB_DELIMS_WITHOUT_QS + '+?=;' - * cdef str RESERVED = GEN_DELIMS + SUB_DELIMS - * cdef str UNRESERVED = ascii_letters + digits + '-._~' # <<<<<<<<<<<<<< - * cdef str ALLOWED = UNRESERVED + SUB_DELIMS_WITHOUT_QS - * cdef str QS = '+&=;' - */ - __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_ascii_letters); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 16, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_digits); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 16, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = PyNumber_Add(__pyx_t_2, __pyx_t_1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 16, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyNumber_Add(__pyx_t_3, __pyx_kp_u__12); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 16, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (!(likely(PyUnicode_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "unicode", Py_TYPE(__pyx_t_1)->tp_name), 0))) __PYX_ERR(0, 16, __pyx_L1_error) - __Pyx_XGOTREF(__pyx_v_4yarl_10_quoting_c_UNRESERVED); - __Pyx_DECREF_SET(__pyx_v_4yarl_10_quoting_c_UNRESERVED, ((PyObject*)__pyx_t_1)); - __Pyx_GIVEREF(__pyx_t_1); - __pyx_t_1 = 0; - - /* "yarl/_quoting_c.pyx":17 - * cdef str RESERVED = GEN_DELIMS + SUB_DELIMS - * cdef str UNRESERVED = ascii_letters + digits + '-._~' - * cdef str ALLOWED = UNRESERVED + SUB_DELIMS_WITHOUT_QS # <<<<<<<<<<<<<< - * cdef str QS = '+&=;' - * - */ - __pyx_t_1 = __Pyx_PyUnicode_ConcatSafe(__pyx_v_4yarl_10_quoting_c_UNRESERVED, __pyx_v_4yarl_10_quoting_c_SUB_DELIMS_WITHOUT_QS); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 17, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_XGOTREF(__pyx_v_4yarl_10_quoting_c_ALLOWED); - __Pyx_DECREF_SET(__pyx_v_4yarl_10_quoting_c_ALLOWED, ((PyObject*)__pyx_t_1)); - __Pyx_GIVEREF(__pyx_t_1); - __pyx_t_1 = 0; - - /* "yarl/_quoting_c.pyx":18 - * cdef str UNRESERVED = ascii_letters + digits + '-._~' - * cdef str ALLOWED = UNRESERVED + SUB_DELIMS_WITHOUT_QS - * cdef str QS = '+&=;' # <<<<<<<<<<<<<< - * - * DEF BUF_SIZE = 8 * 1024 # 8KiB - */ - __Pyx_INCREF(__pyx_kp_u__13); - __Pyx_XGOTREF(__pyx_v_4yarl_10_quoting_c_QS); - __Pyx_DECREF_SET(__pyx_v_4yarl_10_quoting_c_QS, __pyx_kp_u__13); - __Pyx_GIVEREF(__pyx_kp_u__13); - - /* "yarl/_quoting_c.pyx":67 - * - * - * memset(ALLOWED_TABLE, 0, sizeof(ALLOWED_TABLE)) # <<<<<<<<<<<<<< - * memset(ALLOWED_NOTQS_TABLE, 0, sizeof(ALLOWED_NOTQS_TABLE)) - * - */ - (void)(memset(__pyx_v_4yarl_10_quoting_c_ALLOWED_TABLE, 0, (sizeof(__pyx_v_4yarl_10_quoting_c_ALLOWED_TABLE)))); - - /* "yarl/_quoting_c.pyx":68 - * - * memset(ALLOWED_TABLE, 0, sizeof(ALLOWED_TABLE)) - * memset(ALLOWED_NOTQS_TABLE, 0, sizeof(ALLOWED_NOTQS_TABLE)) # <<<<<<<<<<<<<< - * - * for i in range(128): - */ - (void)(memset(__pyx_v_4yarl_10_quoting_c_ALLOWED_NOTQS_TABLE, 0, (sizeof(__pyx_v_4yarl_10_quoting_c_ALLOWED_NOTQS_TABLE)))); - - /* "yarl/_quoting_c.pyx":70 - * memset(ALLOWED_NOTQS_TABLE, 0, sizeof(ALLOWED_NOTQS_TABLE)) - * - * for i in range(128): # <<<<<<<<<<<<<< - * if chr(i) in ALLOWED: - * set_bit(ALLOWED_TABLE, i) - */ - for (__pyx_t_4 = 0; __pyx_t_4 < 0x80; __pyx_t_4+=1) { - __pyx_t_1 = __Pyx_PyInt_From_long(__pyx_t_4); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 70, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_i, __pyx_t_1) < 0) __PYX_ERR(0, 70, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "yarl/_quoting_c.pyx":71 - * - * for i in range(128): - * if chr(i) in ALLOWED: # <<<<<<<<<<<<<< - * set_bit(ALLOWED_TABLE, i) - * set_bit(ALLOWED_NOTQS_TABLE, i) - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_i); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 71, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = __Pyx_PyObject_CallOneArg(__pyx_builtin_chr, __pyx_t_1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 71, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - if (unlikely(__pyx_v_4yarl_10_quoting_c_ALLOWED == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); - __PYX_ERR(0, 71, __pyx_L1_error) - } - __pyx_t_5 = (__Pyx_PyUnicode_ContainsTF(__pyx_t_3, __pyx_v_4yarl_10_quoting_c_ALLOWED, Py_EQ)); if (unlikely(__pyx_t_5 < 0)) __PYX_ERR(0, 71, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_6 = (__pyx_t_5 != 0); - if (__pyx_t_6) { - - /* "yarl/_quoting_c.pyx":72 - * for i in range(128): - * if chr(i) in ALLOWED: - * set_bit(ALLOWED_TABLE, i) # <<<<<<<<<<<<<< - * set_bit(ALLOWED_NOTQS_TABLE, i) - * if chr(i) in QS: - */ - __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_i); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 72, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_7 = __Pyx_PyInt_As_uint64_t(__pyx_t_3); if (unlikely((__pyx_t_7 == ((uint64_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 72, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_f_4yarl_10_quoting_c_set_bit(__pyx_v_4yarl_10_quoting_c_ALLOWED_TABLE, __pyx_t_7); - - /* "yarl/_quoting_c.pyx":73 - * if chr(i) in ALLOWED: - * set_bit(ALLOWED_TABLE, i) - * set_bit(ALLOWED_NOTQS_TABLE, i) # <<<<<<<<<<<<<< - * if chr(i) in QS: - * set_bit(ALLOWED_NOTQS_TABLE, i) - */ - __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_i); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 73, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_7 = __Pyx_PyInt_As_uint64_t(__pyx_t_3); if (unlikely((__pyx_t_7 == ((uint64_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 73, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_f_4yarl_10_quoting_c_set_bit(__pyx_v_4yarl_10_quoting_c_ALLOWED_NOTQS_TABLE, __pyx_t_7); - - /* "yarl/_quoting_c.pyx":71 - * - * for i in range(128): - * if chr(i) in ALLOWED: # <<<<<<<<<<<<<< - * set_bit(ALLOWED_TABLE, i) - * set_bit(ALLOWED_NOTQS_TABLE, i) - */ - } - - /* "yarl/_quoting_c.pyx":74 - * set_bit(ALLOWED_TABLE, i) - * set_bit(ALLOWED_NOTQS_TABLE, i) - * if chr(i) in QS: # <<<<<<<<<<<<<< - * set_bit(ALLOWED_NOTQS_TABLE, i) - * - */ - __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_i); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 74, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = __Pyx_PyObject_CallOneArg(__pyx_builtin_chr, __pyx_t_3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 74, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(__pyx_v_4yarl_10_quoting_c_QS == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); - __PYX_ERR(0, 74, __pyx_L1_error) - } - __pyx_t_6 = (__Pyx_PyUnicode_ContainsTF(__pyx_t_1, __pyx_v_4yarl_10_quoting_c_QS, Py_EQ)); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(0, 74, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_5 = (__pyx_t_6 != 0); - if (__pyx_t_5) { - - /* "yarl/_quoting_c.pyx":75 - * set_bit(ALLOWED_NOTQS_TABLE, i) - * if chr(i) in QS: - * set_bit(ALLOWED_NOTQS_TABLE, i) # <<<<<<<<<<<<<< - * - * # ----------------- writer --------------------------- - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_i); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 75, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_7 = __Pyx_PyInt_As_uint64_t(__pyx_t_1); if (unlikely((__pyx_t_7 == ((uint64_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 75, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_f_4yarl_10_quoting_c_set_bit(__pyx_v_4yarl_10_quoting_c_ALLOWED_NOTQS_TABLE, __pyx_t_7); - - /* "yarl/_quoting_c.pyx":74 - * set_bit(ALLOWED_TABLE, i) - * set_bit(ALLOWED_NOTQS_TABLE, i) - * if chr(i) in QS: # <<<<<<<<<<<<<< - * set_bit(ALLOWED_NOTQS_TABLE, i) - * - */ - } - } - - /* "(tree fragment)":1 - * def __pyx_unpickle__Quoter(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< - * cdef object __pyx_PickleError - * cdef object __pyx_result - */ - __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_4yarl_10_quoting_c_1__pyx_unpickle__Quoter, NULL, __pyx_n_s_yarl__quoting_c); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_pyx_unpickle__Quoter, __pyx_t_1) < 0) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "(tree fragment)":11 - * __pyx_unpickle__Quoter__set_state(<_Quoter> __pyx_result, __pyx_state) - * return __pyx_result - * cdef __pyx_unpickle__Quoter__set_state(_Quoter __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< - * __pyx_result._protected_table = __pyx_state[0]; __pyx_result._qs = __pyx_state[1]; __pyx_result._requote = __pyx_state[2]; __pyx_result._safe_table = __pyx_state[3] - * if len(__pyx_state) > 4 and hasattr(__pyx_result, '__dict__'): - */ - __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_4yarl_10_quoting_c_3__pyx_unpickle__Unquoter, NULL, __pyx_n_s_yarl__quoting_c); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_pyx_unpickle__Unquoter, __pyx_t_1) < 0) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "yarl/_quoting_c.pyx":1 - * # cython: language_level=3 # <<<<<<<<<<<<<< - * - * from libc.stdint cimport uint8_t, uint64_t - */ - __pyx_t_1 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_1) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "carray.from_py":77 - * - * @cname("__Pyx_carray_from_py_uint8_t") - * cdef int __Pyx_carray_from_py_uint8_t(object o, base_type *v, Py_ssize_t length) except -1: # <<<<<<<<<<<<<< - * cdef Py_ssize_t i = length - * try: - */ - - /*--- Wrapped vars code ---*/ - - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - if (__pyx_m) { - if (__pyx_d) { - __Pyx_AddTraceback("init yarl._quoting_c", __pyx_clineno, __pyx_lineno, __pyx_filename); - } - Py_CLEAR(__pyx_m); - } else if (!PyErr_Occurred()) { - PyErr_SetString(PyExc_ImportError, "init yarl._quoting_c"); - } - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - #if CYTHON_PEP489_MULTI_PHASE_INIT - return (__pyx_m != NULL) ? 0 : -1; - #elif PY_MAJOR_VERSION >= 3 - return __pyx_m; - #else - return; - #endif -} - -/* --- Runtime support code --- */ -/* Refnanny */ -#if CYTHON_REFNANNY -static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) { - PyObject *m = NULL, *p = NULL; - void *r = NULL; - m = PyImport_ImportModule(modname); - if (!m) goto end; - p = PyObject_GetAttrString(m, "RefNannyAPI"); - if (!p) goto end; - r = PyLong_AsVoidPtr(p); -end: - Py_XDECREF(p); - Py_XDECREF(m); - return (__Pyx_RefNannyAPIStruct *)r; -} -#endif - -/* PyObjectGetAttrStr */ -#if CYTHON_USE_TYPE_SLOTS -static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name) { - PyTypeObject* tp = Py_TYPE(obj); - if (likely(tp->tp_getattro)) - return tp->tp_getattro(obj, attr_name); -#if PY_MAJOR_VERSION < 3 - if (likely(tp->tp_getattr)) - return tp->tp_getattr(obj, PyString_AS_STRING(attr_name)); -#endif - return PyObject_GetAttr(obj, attr_name); -} -#endif - -/* GetBuiltinName */ -static PyObject *__Pyx_GetBuiltinName(PyObject *name) { - PyObject* result = __Pyx_PyObject_GetAttrStr(__pyx_b, name); - if (unlikely(!result)) { - PyErr_Format(PyExc_NameError, -#if PY_MAJOR_VERSION >= 3 - "name '%U' is not defined", name); -#else - "name '%.200s' is not defined", PyString_AS_STRING(name)); -#endif - } - return result; -} - -/* PyErrFetchRestore */ -#if CYTHON_FAST_THREAD_STATE -static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { - PyObject *tmp_type, *tmp_value, *tmp_tb; - tmp_type = tstate->curexc_type; - tmp_value = tstate->curexc_value; - tmp_tb = tstate->curexc_traceback; - tstate->curexc_type = type; - tstate->curexc_value = value; - tstate->curexc_traceback = tb; - Py_XDECREF(tmp_type); - Py_XDECREF(tmp_value); - Py_XDECREF(tmp_tb); -} -static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { - *type = tstate->curexc_type; - *value = tstate->curexc_value; - *tb = tstate->curexc_traceback; - tstate->curexc_type = 0; - tstate->curexc_value = 0; - tstate->curexc_traceback = 0; -} -#endif - -/* WriteUnraisableException */ -static void __Pyx_WriteUnraisable(const char *name, CYTHON_UNUSED int clineno, - CYTHON_UNUSED int lineno, CYTHON_UNUSED const char *filename, - int full_traceback, CYTHON_UNUSED int nogil) { - PyObject *old_exc, *old_val, *old_tb; - PyObject *ctx; - __Pyx_PyThreadState_declare -#ifdef WITH_THREAD - PyGILState_STATE state; - if (nogil) - state = PyGILState_Ensure(); -#ifdef _MSC_VER - else state = (PyGILState_STATE)-1; -#endif -#endif - __Pyx_PyThreadState_assign - __Pyx_ErrFetch(&old_exc, &old_val, &old_tb); - if (full_traceback) { - Py_XINCREF(old_exc); - Py_XINCREF(old_val); - Py_XINCREF(old_tb); - __Pyx_ErrRestore(old_exc, old_val, old_tb); - PyErr_PrintEx(1); - } - #if PY_MAJOR_VERSION < 3 - ctx = PyString_FromString(name); - #else - ctx = PyUnicode_FromString(name); - #endif - __Pyx_ErrRestore(old_exc, old_val, old_tb); - if (!ctx) { - PyErr_WriteUnraisable(Py_None); - } else { - PyErr_WriteUnraisable(ctx); - Py_DECREF(ctx); - } -#ifdef WITH_THREAD - if (nogil) - PyGILState_Release(state); -#endif -} - -/* RaiseDoubleKeywords */ -static void __Pyx_RaiseDoubleKeywordsError( - const char* func_name, - PyObject* kw_name) -{ - PyErr_Format(PyExc_TypeError, - #if PY_MAJOR_VERSION >= 3 - "%s() got multiple values for keyword argument '%U'", func_name, kw_name); - #else - "%s() got multiple values for keyword argument '%s'", func_name, - PyString_AsString(kw_name)); - #endif -} - -/* ParseKeywords */ -static int __Pyx_ParseOptionalKeywords( - PyObject *kwds, - PyObject **argnames[], - PyObject *kwds2, - PyObject *values[], - Py_ssize_t num_pos_args, - const char* function_name) -{ - PyObject *key = 0, *value = 0; - Py_ssize_t pos = 0; - PyObject*** name; - PyObject*** first_kw_arg = argnames + num_pos_args; - while (PyDict_Next(kwds, &pos, &key, &value)) { - name = first_kw_arg; - while (*name && (**name != key)) name++; - if (*name) { - values[name-argnames] = value; - continue; - } - name = first_kw_arg; - #if PY_MAJOR_VERSION < 3 - if (likely(PyString_Check(key))) { - while (*name) { - if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key)) - && _PyString_Eq(**name, key)) { - values[name-argnames] = value; - break; - } - name++; - } - if (*name) continue; - else { - PyObject*** argname = argnames; - while (argname != first_kw_arg) { - if ((**argname == key) || ( - (CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**argname) == PyString_GET_SIZE(key)) - && _PyString_Eq(**argname, key))) { - goto arg_passed_twice; - } - argname++; - } - } - } else - #endif - if (likely(PyUnicode_Check(key))) { - while (*name) { - int cmp = (**name == key) ? 0 : - #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 - (__Pyx_PyUnicode_GET_LENGTH(**name) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 : - #endif - PyUnicode_Compare(**name, key); - if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; - if (cmp == 0) { - values[name-argnames] = value; - break; - } - name++; - } - if (*name) continue; - else { - PyObject*** argname = argnames; - while (argname != first_kw_arg) { - int cmp = (**argname == key) ? 0 : - #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 - (__Pyx_PyUnicode_GET_LENGTH(**argname) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 : - #endif - PyUnicode_Compare(**argname, key); - if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; - if (cmp == 0) goto arg_passed_twice; - argname++; - } - } - } else - goto invalid_keyword_type; - if (kwds2) { - if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad; - } else { - goto invalid_keyword; - } - } - return 0; -arg_passed_twice: - __Pyx_RaiseDoubleKeywordsError(function_name, key); - goto bad; -invalid_keyword_type: - PyErr_Format(PyExc_TypeError, - "%.200s() keywords must be strings", function_name); - goto bad; -invalid_keyword: - PyErr_Format(PyExc_TypeError, - #if PY_MAJOR_VERSION < 3 - "%.200s() got an unexpected keyword argument '%.200s'", - function_name, PyString_AsString(key)); - #else - "%s() got an unexpected keyword argument '%U'", - function_name, key); - #endif -bad: - return -1; -} - -/* RaiseArgTupleInvalid */ -static void __Pyx_RaiseArgtupleInvalid( - const char* func_name, - int exact, - Py_ssize_t num_min, - Py_ssize_t num_max, - Py_ssize_t num_found) -{ - Py_ssize_t num_expected; - const char *more_or_less; - if (num_found < num_min) { - num_expected = num_min; - more_or_less = "at least"; - } else { - num_expected = num_max; - more_or_less = "at most"; - } - if (exact) { - more_or_less = "exactly"; - } - PyErr_Format(PyExc_TypeError, - "%.200s() takes %.8s %" CYTHON_FORMAT_SSIZE_T "d positional argument%.1s (%" CYTHON_FORMAT_SSIZE_T "d given)", - func_name, more_or_less, num_expected, - (num_expected == 1) ? "" : "s", num_found); -} - -/* ArgTypeTest */ -static int __Pyx__ArgTypeTest(PyObject *obj, PyTypeObject *type, const char *name, int exact) -{ - if (unlikely(!type)) { - PyErr_SetString(PyExc_SystemError, "Missing type object"); - return 0; - } - else if (exact) { - #if PY_MAJOR_VERSION == 2 - if ((type == &PyBaseString_Type) && likely(__Pyx_PyBaseString_CheckExact(obj))) return 1; - #endif - } - else { - if (likely(__Pyx_TypeCheck(obj, type))) return 1; - } - PyErr_Format(PyExc_TypeError, - "Argument '%.200s' has incorrect type (expected %.200s, got %.200s)", - name, type->tp_name, Py_TYPE(obj)->tp_name); - return 0; -} - -/* unicode_iter */ -static CYTHON_INLINE int __Pyx_init_unicode_iteration( - PyObject* ustring, Py_ssize_t *length, void** data, int *kind) { -#if CYTHON_PEP393_ENABLED - if (unlikely(__Pyx_PyUnicode_READY(ustring) < 0)) return -1; - *kind = PyUnicode_KIND(ustring); - *length = PyUnicode_GET_LENGTH(ustring); - *data = PyUnicode_DATA(ustring); -#else - *kind = 0; - *length = PyUnicode_GET_SIZE(ustring); - *data = (void*)PyUnicode_AS_UNICODE(ustring); -#endif - return 0; -} - -/* PyObjectCall */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) { - PyObject *result; - ternaryfunc call = func->ob_type->tp_call; - if (unlikely(!call)) - return PyObject_Call(func, arg, kw); - if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) - return NULL; - result = (*call)(func, arg, kw); - Py_LeaveRecursiveCall(); - if (unlikely(!result) && unlikely(!PyErr_Occurred())) { - PyErr_SetString( - PyExc_SystemError, - "NULL result without error in PyObject_Call"); - } - return result; -} -#endif - -/* RaiseException */ -#if PY_MAJOR_VERSION < 3 -static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, - CYTHON_UNUSED PyObject *cause) { - __Pyx_PyThreadState_declare - Py_XINCREF(type); - if (!value || value == Py_None) - value = NULL; - else - Py_INCREF(value); - if (!tb || tb == Py_None) - tb = NULL; - else { - Py_INCREF(tb); - if (!PyTraceBack_Check(tb)) { - PyErr_SetString(PyExc_TypeError, - "raise: arg 3 must be a traceback or None"); - goto raise_error; - } - } - if (PyType_Check(type)) { -#if CYTHON_COMPILING_IN_PYPY - if (!value) { - Py_INCREF(Py_None); - value = Py_None; - } -#endif - PyErr_NormalizeException(&type, &value, &tb); - } else { - if (value) { - PyErr_SetString(PyExc_TypeError, - "instance exception may not have a separate value"); - goto raise_error; - } - value = type; - type = (PyObject*) Py_TYPE(type); - Py_INCREF(type); - if (!PyType_IsSubtype((PyTypeObject *)type, (PyTypeObject *)PyExc_BaseException)) { - PyErr_SetString(PyExc_TypeError, - "raise: exception class must be a subclass of BaseException"); - goto raise_error; - } - } - __Pyx_PyThreadState_assign - __Pyx_ErrRestore(type, value, tb); - return; -raise_error: - Py_XDECREF(value); - Py_XDECREF(type); - Py_XDECREF(tb); - return; -} -#else -static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) { - PyObject* owned_instance = NULL; - if (tb == Py_None) { - tb = 0; - } else if (tb && !PyTraceBack_Check(tb)) { - PyErr_SetString(PyExc_TypeError, - "raise: arg 3 must be a traceback or None"); - goto bad; - } - if (value == Py_None) - value = 0; - if (PyExceptionInstance_Check(type)) { - if (value) { - PyErr_SetString(PyExc_TypeError, - "instance exception may not have a separate value"); - goto bad; - } - value = type; - type = (PyObject*) Py_TYPE(value); - } else if (PyExceptionClass_Check(type)) { - PyObject *instance_class = NULL; - if (value && PyExceptionInstance_Check(value)) { - instance_class = (PyObject*) Py_TYPE(value); - if (instance_class != type) { - int is_subclass = PyObject_IsSubclass(instance_class, type); - if (!is_subclass) { - instance_class = NULL; - } else if (unlikely(is_subclass == -1)) { - goto bad; - } else { - type = instance_class; - } - } - } - if (!instance_class) { - PyObject *args; - if (!value) - args = PyTuple_New(0); - else if (PyTuple_Check(value)) { - Py_INCREF(value); - args = value; - } else - args = PyTuple_Pack(1, value); - if (!args) - goto bad; - owned_instance = PyObject_Call(type, args, NULL); - Py_DECREF(args); - if (!owned_instance) - goto bad; - value = owned_instance; - if (!PyExceptionInstance_Check(value)) { - PyErr_Format(PyExc_TypeError, - "calling %R should have returned an instance of " - "BaseException, not %R", - type, Py_TYPE(value)); - goto bad; - } - } - } else { - PyErr_SetString(PyExc_TypeError, - "raise: exception class must be a subclass of BaseException"); - goto bad; - } - if (cause) { - PyObject *fixed_cause; - if (cause == Py_None) { - fixed_cause = NULL; - } else if (PyExceptionClass_Check(cause)) { - fixed_cause = PyObject_CallObject(cause, NULL); - if (fixed_cause == NULL) - goto bad; - } else if (PyExceptionInstance_Check(cause)) { - fixed_cause = cause; - Py_INCREF(fixed_cause); - } else { - PyErr_SetString(PyExc_TypeError, - "exception causes must derive from " - "BaseException"); - goto bad; - } - PyException_SetCause(value, fixed_cause); - } - PyErr_SetObject(type, value); - if (tb) { -#if CYTHON_COMPILING_IN_PYPY - PyObject *tmp_type, *tmp_value, *tmp_tb; - PyErr_Fetch(&tmp_type, &tmp_value, &tmp_tb); - Py_INCREF(tb); - PyErr_Restore(tmp_type, tmp_value, tb); - Py_XDECREF(tmp_tb); -#else - PyThreadState *tstate = __Pyx_PyThreadState_Current; - PyObject* tmp_tb = tstate->curexc_traceback; - if (tb != tmp_tb) { - Py_INCREF(tb); - tstate->curexc_traceback = tb; - Py_XDECREF(tmp_tb); - } -#endif - } -bad: - Py_XDECREF(owned_instance); - return; -} -#endif - -/* PyCFunctionFastCall */ -#if CYTHON_FAST_PYCCALL -static CYTHON_INLINE PyObject * __Pyx_PyCFunction_FastCall(PyObject *func_obj, PyObject **args, Py_ssize_t nargs) { - PyCFunctionObject *func = (PyCFunctionObject*)func_obj; - PyCFunction meth = PyCFunction_GET_FUNCTION(func); - PyObject *self = PyCFunction_GET_SELF(func); - int flags = PyCFunction_GET_FLAGS(func); - assert(PyCFunction_Check(func)); - assert(METH_FASTCALL == (flags & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS | METH_STACKLESS))); - assert(nargs >= 0); - assert(nargs == 0 || args != NULL); - /* _PyCFunction_FastCallDict() must not be called with an exception set, - because it may clear it (directly or indirectly) and so the - caller loses its exception */ - assert(!PyErr_Occurred()); - if ((PY_VERSION_HEX < 0x030700A0) || unlikely(flags & METH_KEYWORDS)) { - return (*((__Pyx_PyCFunctionFastWithKeywords)(void*)meth)) (self, args, nargs, NULL); - } else { - return (*((__Pyx_PyCFunctionFast)(void*)meth)) (self, args, nargs); - } -} -#endif - -/* PyFunctionFastCall */ -#if CYTHON_FAST_PYCALL -static PyObject* __Pyx_PyFunction_FastCallNoKw(PyCodeObject *co, PyObject **args, Py_ssize_t na, - PyObject *globals) { - PyFrameObject *f; - PyThreadState *tstate = __Pyx_PyThreadState_Current; - PyObject **fastlocals; - Py_ssize_t i; - PyObject *result; - assert(globals != NULL); - /* XXX Perhaps we should create a specialized - PyFrame_New() that doesn't take locals, but does - take builtins without sanity checking them. - */ - assert(tstate != NULL); - f = PyFrame_New(tstate, co, globals, NULL); - if (f == NULL) { - return NULL; - } - fastlocals = __Pyx_PyFrame_GetLocalsplus(f); - for (i = 0; i < na; i++) { - Py_INCREF(*args); - fastlocals[i] = *args++; - } - result = PyEval_EvalFrameEx(f,0); - ++tstate->recursion_depth; - Py_DECREF(f); - --tstate->recursion_depth; - return result; -} -#if 1 || PY_VERSION_HEX < 0x030600B1 -static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs) { - PyCodeObject *co = (PyCodeObject *)PyFunction_GET_CODE(func); - PyObject *globals = PyFunction_GET_GLOBALS(func); - PyObject *argdefs = PyFunction_GET_DEFAULTS(func); - PyObject *closure; -#if PY_MAJOR_VERSION >= 3 - PyObject *kwdefs; -#endif - PyObject *kwtuple, **k; - PyObject **d; - Py_ssize_t nd; - Py_ssize_t nk; - PyObject *result; - assert(kwargs == NULL || PyDict_Check(kwargs)); - nk = kwargs ? PyDict_Size(kwargs) : 0; - if (Py_EnterRecursiveCall((char*)" while calling a Python object")) { - return NULL; - } - if ( -#if PY_MAJOR_VERSION >= 3 - co->co_kwonlyargcount == 0 && -#endif - likely(kwargs == NULL || nk == 0) && - co->co_flags == (CO_OPTIMIZED | CO_NEWLOCALS | CO_NOFREE)) { - if (argdefs == NULL && co->co_argcount == nargs) { - result = __Pyx_PyFunction_FastCallNoKw(co, args, nargs, globals); - goto done; - } - else if (nargs == 0 && argdefs != NULL - && co->co_argcount == Py_SIZE(argdefs)) { - /* function called with no arguments, but all parameters have - a default value: use default values as arguments .*/ - args = &PyTuple_GET_ITEM(argdefs, 0); - result =__Pyx_PyFunction_FastCallNoKw(co, args, Py_SIZE(argdefs), globals); - goto done; - } - } - if (kwargs != NULL) { - Py_ssize_t pos, i; - kwtuple = PyTuple_New(2 * nk); - if (kwtuple == NULL) { - result = NULL; - goto done; - } - k = &PyTuple_GET_ITEM(kwtuple, 0); - pos = i = 0; - while (PyDict_Next(kwargs, &pos, &k[i], &k[i+1])) { - Py_INCREF(k[i]); - Py_INCREF(k[i+1]); - i += 2; - } - nk = i / 2; - } - else { - kwtuple = NULL; - k = NULL; - } - closure = PyFunction_GET_CLOSURE(func); -#if PY_MAJOR_VERSION >= 3 - kwdefs = PyFunction_GET_KW_DEFAULTS(func); -#endif - if (argdefs != NULL) { - d = &PyTuple_GET_ITEM(argdefs, 0); - nd = Py_SIZE(argdefs); - } - else { - d = NULL; - nd = 0; - } -#if PY_MAJOR_VERSION >= 3 - result = PyEval_EvalCodeEx((PyObject*)co, globals, (PyObject *)NULL, - args, (int)nargs, - k, (int)nk, - d, (int)nd, kwdefs, closure); -#else - result = PyEval_EvalCodeEx(co, globals, (PyObject *)NULL, - args, (int)nargs, - k, (int)nk, - d, (int)nd, closure); -#endif - Py_XDECREF(kwtuple); -done: - Py_LeaveRecursiveCall(); - return result; -} -#endif -#endif - -/* PyObjectCallMethO */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg) { - PyObject *self, *result; - PyCFunction cfunc; - cfunc = PyCFunction_GET_FUNCTION(func); - self = PyCFunction_GET_SELF(func); - if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) - return NULL; - result = cfunc(self, arg); - Py_LeaveRecursiveCall(); - if (unlikely(!result) && unlikely(!PyErr_Occurred())) { - PyErr_SetString( - PyExc_SystemError, - "NULL result without error in PyObject_Call"); - } - return result; -} -#endif - -/* PyObjectCallOneArg */ -#if CYTHON_COMPILING_IN_CPYTHON -static PyObject* __Pyx__PyObject_CallOneArg(PyObject *func, PyObject *arg) { - PyObject *result; - PyObject *args = PyTuple_New(1); - if (unlikely(!args)) return NULL; - Py_INCREF(arg); - PyTuple_SET_ITEM(args, 0, arg); - result = __Pyx_PyObject_Call(func, args, NULL); - Py_DECREF(args); - return result; -} -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { -#if CYTHON_FAST_PYCALL - if (PyFunction_Check(func)) { - return __Pyx_PyFunction_FastCall(func, &arg, 1); - } -#endif - if (likely(PyCFunction_Check(func))) { - if (likely(PyCFunction_GET_FLAGS(func) & METH_O)) { - return __Pyx_PyObject_CallMethO(func, arg); -#if CYTHON_FAST_PYCCALL - } else if (PyCFunction_GET_FLAGS(func) & METH_FASTCALL) { - return __Pyx_PyCFunction_FastCall(func, &arg, 1); -#endif - } - } - return __Pyx__PyObject_CallOneArg(func, arg); -} -#else -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { - PyObject *result; - PyObject *args = PyTuple_Pack(1, arg); - if (unlikely(!args)) return NULL; - result = __Pyx_PyObject_Call(func, args, NULL); - Py_DECREF(args); - return result; -} -#endif - -/* GetException */ -#if CYTHON_FAST_THREAD_STATE -static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) -#else -static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb) -#endif -{ - PyObject *local_type, *local_value, *local_tb; -#if CYTHON_FAST_THREAD_STATE - PyObject *tmp_type, *tmp_value, *tmp_tb; - local_type = tstate->curexc_type; - local_value = tstate->curexc_value; - local_tb = tstate->curexc_traceback; - tstate->curexc_type = 0; - tstate->curexc_value = 0; - tstate->curexc_traceback = 0; -#else - PyErr_Fetch(&local_type, &local_value, &local_tb); -#endif - PyErr_NormalizeException(&local_type, &local_value, &local_tb); -#if CYTHON_FAST_THREAD_STATE - if (unlikely(tstate->curexc_type)) -#else - if (unlikely(PyErr_Occurred())) -#endif - goto bad; - #if PY_MAJOR_VERSION >= 3 - if (local_tb) { - if (unlikely(PyException_SetTraceback(local_value, local_tb) < 0)) - goto bad; - } - #endif - Py_XINCREF(local_tb); - Py_XINCREF(local_type); - Py_XINCREF(local_value); - *type = local_type; - *value = local_value; - *tb = local_tb; -#if CYTHON_FAST_THREAD_STATE - #if CYTHON_USE_EXC_INFO_STACK - { - _PyErr_StackItem *exc_info = tstate->exc_info; - tmp_type = exc_info->exc_type; - tmp_value = exc_info->exc_value; - tmp_tb = exc_info->exc_traceback; - exc_info->exc_type = local_type; - exc_info->exc_value = local_value; - exc_info->exc_traceback = local_tb; - } - #else - tmp_type = tstate->exc_type; - tmp_value = tstate->exc_value; - tmp_tb = tstate->exc_traceback; - tstate->exc_type = local_type; - tstate->exc_value = local_value; - tstate->exc_traceback = local_tb; - #endif - Py_XDECREF(tmp_type); - Py_XDECREF(tmp_value); - Py_XDECREF(tmp_tb); -#else - PyErr_SetExcInfo(local_type, local_value, local_tb); -#endif - return 0; -bad: - *type = 0; - *value = 0; - *tb = 0; - Py_XDECREF(local_type); - Py_XDECREF(local_value); - Py_XDECREF(local_tb); - return -1; -} - -/* SwapException */ -#if CYTHON_FAST_THREAD_STATE -static CYTHON_INLINE void __Pyx__ExceptionSwap(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { - PyObject *tmp_type, *tmp_value, *tmp_tb; - #if CYTHON_USE_EXC_INFO_STACK - _PyErr_StackItem *exc_info = tstate->exc_info; - tmp_type = exc_info->exc_type; - tmp_value = exc_info->exc_value; - tmp_tb = exc_info->exc_traceback; - exc_info->exc_type = *type; - exc_info->exc_value = *value; - exc_info->exc_traceback = *tb; - #else - tmp_type = tstate->exc_type; - tmp_value = tstate->exc_value; - tmp_tb = tstate->exc_traceback; - tstate->exc_type = *type; - tstate->exc_value = *value; - tstate->exc_traceback = *tb; - #endif - *type = tmp_type; - *value = tmp_value; - *tb = tmp_tb; -} -#else -static CYTHON_INLINE void __Pyx_ExceptionSwap(PyObject **type, PyObject **value, PyObject **tb) { - PyObject *tmp_type, *tmp_value, *tmp_tb; - PyErr_GetExcInfo(&tmp_type, &tmp_value, &tmp_tb); - PyErr_SetExcInfo(*type, *value, *tb); - *type = tmp_type; - *value = tmp_value; - *tb = tmp_tb; -} -#endif - -/* GetTopmostException */ -#if CYTHON_USE_EXC_INFO_STACK -static _PyErr_StackItem * -__Pyx_PyErr_GetTopmostException(PyThreadState *tstate) -{ - _PyErr_StackItem *exc_info = tstate->exc_info; - while ((exc_info->exc_type == NULL || exc_info->exc_type == Py_None) && - exc_info->previous_item != NULL) - { - exc_info = exc_info->previous_item; - } - return exc_info; -} -#endif - -/* SaveResetException */ -#if CYTHON_FAST_THREAD_STATE -static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { - #if CYTHON_USE_EXC_INFO_STACK - _PyErr_StackItem *exc_info = __Pyx_PyErr_GetTopmostException(tstate); - *type = exc_info->exc_type; - *value = exc_info->exc_value; - *tb = exc_info->exc_traceback; - #else - *type = tstate->exc_type; - *value = tstate->exc_value; - *tb = tstate->exc_traceback; - #endif - Py_XINCREF(*type); - Py_XINCREF(*value); - Py_XINCREF(*tb); -} -static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { - PyObject *tmp_type, *tmp_value, *tmp_tb; - #if CYTHON_USE_EXC_INFO_STACK - _PyErr_StackItem *exc_info = tstate->exc_info; - tmp_type = exc_info->exc_type; - tmp_value = exc_info->exc_value; - tmp_tb = exc_info->exc_traceback; - exc_info->exc_type = type; - exc_info->exc_value = value; - exc_info->exc_traceback = tb; - #else - tmp_type = tstate->exc_type; - tmp_value = tstate->exc_value; - tmp_tb = tstate->exc_traceback; - tstate->exc_type = type; - tstate->exc_value = value; - tstate->exc_traceback = tb; - #endif - Py_XDECREF(tmp_type); - Py_XDECREF(tmp_value); - Py_XDECREF(tmp_tb); -} -#endif - -/* GetItemIntUnicode */ -static CYTHON_INLINE Py_UCS4 __Pyx_GetItemInt_Unicode_Fast(PyObject* ustring, Py_ssize_t i, - int wraparound, int boundscheck) { - Py_ssize_t length; - if (unlikely(__Pyx_PyUnicode_READY(ustring) < 0)) return (Py_UCS4)-1; - if (wraparound | boundscheck) { - length = __Pyx_PyUnicode_GET_LENGTH(ustring); - if (wraparound & unlikely(i < 0)) i += length; - if ((!boundscheck) || likely(__Pyx_is_valid_index(i, length))) { - return __Pyx_PyUnicode_READ_CHAR(ustring, i); - } else { - PyErr_SetString(PyExc_IndexError, "string index out of range"); - return (Py_UCS4)-1; - } - } else { - return __Pyx_PyUnicode_READ_CHAR(ustring, i); - } -} - -/* ReRaiseException */ -static CYTHON_INLINE void __Pyx_ReraiseException(void) { - PyObject *type = NULL, *value = NULL, *tb = NULL; -#if CYTHON_FAST_THREAD_STATE - PyThreadState *tstate = PyThreadState_GET(); - #if CYTHON_USE_EXC_INFO_STACK - _PyErr_StackItem *exc_info = __Pyx_PyErr_GetTopmostException(tstate); - type = exc_info->exc_type; - value = exc_info->exc_value; - tb = exc_info->exc_traceback; - #else - type = tstate->exc_type; - value = tstate->exc_value; - tb = tstate->exc_traceback; - #endif -#else - PyErr_GetExcInfo(&type, &value, &tb); -#endif - if (!type || type == Py_None) { -#if !CYTHON_FAST_THREAD_STATE - Py_XDECREF(type); - Py_XDECREF(value); - Py_XDECREF(tb); -#endif - PyErr_SetString(PyExc_RuntimeError, - "No active exception to reraise"); - } else { -#if CYTHON_FAST_THREAD_STATE - Py_INCREF(type); - Py_XINCREF(value); - Py_XINCREF(tb); -#endif - PyErr_Restore(type, value, tb); - } -} - -/* PyErrExceptionMatches */ -#if CYTHON_FAST_THREAD_STATE -static int __Pyx_PyErr_ExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { - Py_ssize_t i, n; - n = PyTuple_GET_SIZE(tuple); -#if PY_MAJOR_VERSION >= 3 - for (i=0; icurexc_type; - if (exc_type == err) return 1; - if (unlikely(!exc_type)) return 0; - if (unlikely(PyTuple_Check(err))) - return __Pyx_PyErr_ExceptionMatchesTuple(exc_type, err); - return __Pyx_PyErr_GivenExceptionMatches(exc_type, err); -} -#endif - -/* GetAttr */ -static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *o, PyObject *n) { -#if CYTHON_USE_TYPE_SLOTS -#if PY_MAJOR_VERSION >= 3 - if (likely(PyUnicode_Check(n))) -#else - if (likely(PyString_Check(n))) -#endif - return __Pyx_PyObject_GetAttrStr(o, n); -#endif - return PyObject_GetAttr(o, n); -} - -/* GetAttr3 */ -static PyObject *__Pyx_GetAttr3Default(PyObject *d) { - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - if (unlikely(!__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError))) - return NULL; - __Pyx_PyErr_Clear(); - Py_INCREF(d); - return d; -} -static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *o, PyObject *n, PyObject *d) { - PyObject *r = __Pyx_GetAttr(o, n); - return (likely(r)) ? r : __Pyx_GetAttr3Default(d); -} - -/* PyDictVersioning */ -#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS -static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj) { - PyObject *dict = Py_TYPE(obj)->tp_dict; - return likely(dict) ? __PYX_GET_DICT_VERSION(dict) : 0; -} -static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj) { - PyObject **dictptr = NULL; - Py_ssize_t offset = Py_TYPE(obj)->tp_dictoffset; - if (offset) { -#if CYTHON_COMPILING_IN_CPYTHON - dictptr = (likely(offset > 0)) ? (PyObject **) ((char *)obj + offset) : _PyObject_GetDictPtr(obj); -#else - dictptr = _PyObject_GetDictPtr(obj); -#endif - } - return (dictptr && *dictptr) ? __PYX_GET_DICT_VERSION(*dictptr) : 0; -} -static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version) { - PyObject *dict = Py_TYPE(obj)->tp_dict; - if (unlikely(!dict) || unlikely(tp_dict_version != __PYX_GET_DICT_VERSION(dict))) - return 0; - return obj_dict_version == __Pyx_get_object_dict_version(obj); -} -#endif - -/* GetModuleGlobalName */ -#if CYTHON_USE_DICT_VERSIONS -static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value) -#else -static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name) -#endif -{ - PyObject *result; -#if !CYTHON_AVOID_BORROWED_REFS -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 - result = _PyDict_GetItem_KnownHash(__pyx_d, name, ((PyASCIIObject *) name)->hash); - __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) - if (likely(result)) { - return __Pyx_NewRef(result); - } else if (unlikely(PyErr_Occurred())) { - return NULL; - } -#else - result = PyDict_GetItem(__pyx_d, name); - __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) - if (likely(result)) { - return __Pyx_NewRef(result); - } -#endif -#else - result = PyObject_GetItem(__pyx_d, name); - __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) - if (likely(result)) { - return __Pyx_NewRef(result); - } - PyErr_Clear(); -#endif - return __Pyx_GetBuiltinName(name); -} - -/* PyObjectCallNoArg */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func) { -#if CYTHON_FAST_PYCALL - if (PyFunction_Check(func)) { - return __Pyx_PyFunction_FastCall(func, NULL, 0); - } -#endif -#ifdef __Pyx_CyFunction_USED - if (likely(PyCFunction_Check(func) || __Pyx_CyFunction_Check(func))) -#else - if (likely(PyCFunction_Check(func))) -#endif - { - if (likely(PyCFunction_GET_FLAGS(func) & METH_NOARGS)) { - return __Pyx_PyObject_CallMethO(func, NULL); - } - } - return __Pyx_PyObject_Call(func, __pyx_empty_tuple, NULL); -} -#endif - -/* PyUnicode_Substring */ -static CYTHON_INLINE PyObject* __Pyx_PyUnicode_Substring( - PyObject* text, Py_ssize_t start, Py_ssize_t stop) { - Py_ssize_t length; - if (unlikely(__Pyx_PyUnicode_READY(text) == -1)) return NULL; - length = __Pyx_PyUnicode_GET_LENGTH(text); - if (start < 0) { - start += length; - if (start < 0) - start = 0; - } - if (stop < 0) - stop += length; - else if (stop > length) - stop = length; - if (stop <= start) - return __Pyx_NewRef(__pyx_empty_unicode); -#if CYTHON_PEP393_ENABLED - return PyUnicode_FromKindAndData(PyUnicode_KIND(text), - PyUnicode_1BYTE_DATA(text) + start*PyUnicode_KIND(text), stop-start); -#else - return PyUnicode_FromUnicode(PyUnicode_AS_UNICODE(text)+start, stop-start); -#endif -} - -/* PyObjectCall2Args */ -static CYTHON_UNUSED PyObject* __Pyx_PyObject_Call2Args(PyObject* function, PyObject* arg1, PyObject* arg2) { - PyObject *args, *result = NULL; - #if CYTHON_FAST_PYCALL - if (PyFunction_Check(function)) { - PyObject *args[2] = {arg1, arg2}; - return __Pyx_PyFunction_FastCall(function, args, 2); - } - #endif - #if CYTHON_FAST_PYCCALL - if (__Pyx_PyFastCFunction_Check(function)) { - PyObject *args[2] = {arg1, arg2}; - return __Pyx_PyCFunction_FastCall(function, args, 2); - } - #endif - args = PyTuple_New(2); - if (unlikely(!args)) goto done; - Py_INCREF(arg1); - PyTuple_SET_ITEM(args, 0, arg1); - Py_INCREF(arg2); - PyTuple_SET_ITEM(args, 1, arg2); - Py_INCREF(function); - result = __Pyx_PyObject_Call(function, args, NULL); - Py_DECREF(args); - Py_DECREF(function); -done: - return result; -} - -/* SliceObject */ -static CYTHON_INLINE PyObject* __Pyx_PyObject_GetSlice(PyObject* obj, - Py_ssize_t cstart, Py_ssize_t cstop, - PyObject** _py_start, PyObject** _py_stop, PyObject** _py_slice, - int has_cstart, int has_cstop, CYTHON_UNUSED int wraparound) { -#if CYTHON_USE_TYPE_SLOTS - PyMappingMethods* mp; -#if PY_MAJOR_VERSION < 3 - PySequenceMethods* ms = Py_TYPE(obj)->tp_as_sequence; - if (likely(ms && ms->sq_slice)) { - if (!has_cstart) { - if (_py_start && (*_py_start != Py_None)) { - cstart = __Pyx_PyIndex_AsSsize_t(*_py_start); - if ((cstart == (Py_ssize_t)-1) && PyErr_Occurred()) goto bad; - } else - cstart = 0; - } - if (!has_cstop) { - if (_py_stop && (*_py_stop != Py_None)) { - cstop = __Pyx_PyIndex_AsSsize_t(*_py_stop); - if ((cstop == (Py_ssize_t)-1) && PyErr_Occurred()) goto bad; - } else - cstop = PY_SSIZE_T_MAX; - } - if (wraparound && unlikely((cstart < 0) | (cstop < 0)) && likely(ms->sq_length)) { - Py_ssize_t l = ms->sq_length(obj); - if (likely(l >= 0)) { - if (cstop < 0) { - cstop += l; - if (cstop < 0) cstop = 0; - } - if (cstart < 0) { - cstart += l; - if (cstart < 0) cstart = 0; - } - } else { - if (!PyErr_ExceptionMatches(PyExc_OverflowError)) - goto bad; - PyErr_Clear(); - } - } - return ms->sq_slice(obj, cstart, cstop); - } -#endif - mp = Py_TYPE(obj)->tp_as_mapping; - if (likely(mp && mp->mp_subscript)) -#endif - { - PyObject* result; - PyObject *py_slice, *py_start, *py_stop; - if (_py_slice) { - py_slice = *_py_slice; - } else { - PyObject* owned_start = NULL; - PyObject* owned_stop = NULL; - if (_py_start) { - py_start = *_py_start; - } else { - if (has_cstart) { - owned_start = py_start = PyInt_FromSsize_t(cstart); - if (unlikely(!py_start)) goto bad; - } else - py_start = Py_None; - } - if (_py_stop) { - py_stop = *_py_stop; - } else { - if (has_cstop) { - owned_stop = py_stop = PyInt_FromSsize_t(cstop); - if (unlikely(!py_stop)) { - Py_XDECREF(owned_start); - goto bad; - } - } else - py_stop = Py_None; - } - py_slice = PySlice_New(py_start, py_stop, Py_None); - Py_XDECREF(owned_start); - Py_XDECREF(owned_stop); - if (unlikely(!py_slice)) goto bad; - } -#if CYTHON_USE_TYPE_SLOTS - result = mp->mp_subscript(obj, py_slice); -#else - result = PyObject_GetItem(obj, py_slice); -#endif - if (!_py_slice) { - Py_DECREF(py_slice); - } - return result; - } - PyErr_Format(PyExc_TypeError, - "'%.200s' object is unsliceable", Py_TYPE(obj)->tp_name); -bad: - return NULL; -} - -/* Import */ -static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level) { - PyObject *empty_list = 0; - PyObject *module = 0; - PyObject *global_dict = 0; - PyObject *empty_dict = 0; - PyObject *list; - #if PY_MAJOR_VERSION < 3 - PyObject *py_import; - py_import = __Pyx_PyObject_GetAttrStr(__pyx_b, __pyx_n_s_import); - if (!py_import) - goto bad; - #endif - if (from_list) - list = from_list; - else { - empty_list = PyList_New(0); - if (!empty_list) - goto bad; - list = empty_list; - } - global_dict = PyModule_GetDict(__pyx_m); - if (!global_dict) - goto bad; - empty_dict = PyDict_New(); - if (!empty_dict) - goto bad; - { - #if PY_MAJOR_VERSION >= 3 - if (level == -1) { - if ((1) && (strchr(__Pyx_MODULE_NAME, '.'))) { - module = PyImport_ImportModuleLevelObject( - name, global_dict, empty_dict, list, 1); - if (!module) { - if (!PyErr_ExceptionMatches(PyExc_ImportError)) - goto bad; - PyErr_Clear(); - } - } - level = 0; - } - #endif - if (!module) { - #if PY_MAJOR_VERSION < 3 - PyObject *py_level = PyInt_FromLong(level); - if (!py_level) - goto bad; - module = PyObject_CallFunctionObjArgs(py_import, - name, global_dict, empty_dict, list, py_level, (PyObject *)NULL); - Py_DECREF(py_level); - #else - module = PyImport_ImportModuleLevelObject( - name, global_dict, empty_dict, list, level); - #endif - } - } -bad: - #if PY_MAJOR_VERSION < 3 - Py_XDECREF(py_import); - #endif - Py_XDECREF(empty_list); - Py_XDECREF(empty_dict); - return module; -} - -/* ImportFrom */ -static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name) { - PyObject* value = __Pyx_PyObject_GetAttrStr(module, name); - if (unlikely(!value) && PyErr_ExceptionMatches(PyExc_AttributeError)) { - PyErr_Format(PyExc_ImportError, - #if PY_MAJOR_VERSION < 3 - "cannot import name %.230s", PyString_AS_STRING(name)); - #else - "cannot import name %S", name); - #endif - } - return value; -} - -/* GetItemInt */ -static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j) { - PyObject *r; - if (!j) return NULL; - r = PyObject_GetItem(o, j); - Py_DECREF(j); - return r; -} -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i, - CYTHON_NCP_UNUSED int wraparound, - CYTHON_NCP_UNUSED int boundscheck) { -#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - Py_ssize_t wrapped_i = i; - if (wraparound & unlikely(i < 0)) { - wrapped_i += PyList_GET_SIZE(o); - } - if ((!boundscheck) || likely(__Pyx_is_valid_index(wrapped_i, PyList_GET_SIZE(o)))) { - PyObject *r = PyList_GET_ITEM(o, wrapped_i); - Py_INCREF(r); - return r; - } - return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); -#else - return PySequence_GetItem(o, i); -#endif -} -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i, - CYTHON_NCP_UNUSED int wraparound, - CYTHON_NCP_UNUSED int boundscheck) { -#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - Py_ssize_t wrapped_i = i; - if (wraparound & unlikely(i < 0)) { - wrapped_i += PyTuple_GET_SIZE(o); - } - if ((!boundscheck) || likely(__Pyx_is_valid_index(wrapped_i, PyTuple_GET_SIZE(o)))) { - PyObject *r = PyTuple_GET_ITEM(o, wrapped_i); - Py_INCREF(r); - return r; - } - return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); -#else - return PySequence_GetItem(o, i); -#endif -} -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, int is_list, - CYTHON_NCP_UNUSED int wraparound, - CYTHON_NCP_UNUSED int boundscheck) { -#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS && CYTHON_USE_TYPE_SLOTS - if (is_list || PyList_CheckExact(o)) { - Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyList_GET_SIZE(o); - if ((!boundscheck) || (likely(__Pyx_is_valid_index(n, PyList_GET_SIZE(o))))) { - PyObject *r = PyList_GET_ITEM(o, n); - Py_INCREF(r); - return r; - } - } - else if (PyTuple_CheckExact(o)) { - Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyTuple_GET_SIZE(o); - if ((!boundscheck) || likely(__Pyx_is_valid_index(n, PyTuple_GET_SIZE(o)))) { - PyObject *r = PyTuple_GET_ITEM(o, n); - Py_INCREF(r); - return r; - } - } else { - PySequenceMethods *m = Py_TYPE(o)->tp_as_sequence; - if (likely(m && m->sq_item)) { - if (wraparound && unlikely(i < 0) && likely(m->sq_length)) { - Py_ssize_t l = m->sq_length(o); - if (likely(l >= 0)) { - i += l; - } else { - if (!PyErr_ExceptionMatches(PyExc_OverflowError)) - return NULL; - PyErr_Clear(); - } - } - return m->sq_item(o, i); - } - } -#else - if (is_list || PySequence_Check(o)) { - return PySequence_GetItem(o, i); - } -#endif - return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); -} - -/* HasAttr */ -static CYTHON_INLINE int __Pyx_HasAttr(PyObject *o, PyObject *n) { - PyObject *r; - if (unlikely(!__Pyx_PyBaseString_Check(n))) { - PyErr_SetString(PyExc_TypeError, - "hasattr(): attribute name must be string"); - return -1; - } - r = __Pyx_GetAttr(o, n); - if (unlikely(!r)) { - PyErr_Clear(); - return 0; - } else { - Py_DECREF(r); - return 1; - } -} - -/* ExtTypeTest */ -static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type) { - if (unlikely(!type)) { - PyErr_SetString(PyExc_SystemError, "Missing type object"); - return 0; - } - if (likely(__Pyx_TypeCheck(obj, type))) - return 1; - PyErr_Format(PyExc_TypeError, "Cannot convert %.200s to %.200s", - Py_TYPE(obj)->tp_name, type->tp_name); - return 0; -} - -/* PyObject_GenericGetAttrNoDict */ -#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 -static PyObject *__Pyx_RaiseGenericGetAttributeError(PyTypeObject *tp, PyObject *attr_name) { - PyErr_Format(PyExc_AttributeError, -#if PY_MAJOR_VERSION >= 3 - "'%.50s' object has no attribute '%U'", - tp->tp_name, attr_name); -#else - "'%.50s' object has no attribute '%.400s'", - tp->tp_name, PyString_AS_STRING(attr_name)); -#endif - return NULL; -} -static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name) { - PyObject *descr; - PyTypeObject *tp = Py_TYPE(obj); - if (unlikely(!PyString_Check(attr_name))) { - return PyObject_GenericGetAttr(obj, attr_name); - } - assert(!tp->tp_dictoffset); - descr = _PyType_Lookup(tp, attr_name); - if (unlikely(!descr)) { - return __Pyx_RaiseGenericGetAttributeError(tp, attr_name); - } - Py_INCREF(descr); - #if PY_MAJOR_VERSION < 3 - if (likely(PyType_HasFeature(Py_TYPE(descr), Py_TPFLAGS_HAVE_CLASS))) - #endif - { - descrgetfunc f = Py_TYPE(descr)->tp_descr_get; - if (unlikely(f)) { - PyObject *res = f(descr, obj, (PyObject *)tp); - Py_DECREF(descr); - return res; - } - } - return descr; -} -#endif - -/* PyObject_GenericGetAttr */ -#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 -static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name) { - if (unlikely(Py_TYPE(obj)->tp_dictoffset)) { - return PyObject_GenericGetAttr(obj, attr_name); - } - return __Pyx_PyObject_GenericGetAttrNoDict(obj, attr_name); -} -#endif - -/* SetVTable */ -static int __Pyx_SetVtable(PyObject *dict, void *vtable) { -#if PY_VERSION_HEX >= 0x02070000 - PyObject *ob = PyCapsule_New(vtable, 0, 0); -#else - PyObject *ob = PyCObject_FromVoidPtr(vtable, 0); -#endif - if (!ob) - goto bad; - if (PyDict_SetItem(dict, __pyx_n_s_pyx_vtable, ob) < 0) - goto bad; - Py_DECREF(ob); - return 0; -bad: - Py_XDECREF(ob); - return -1; -} - -/* PyObjectGetAttrStrNoError */ -static void __Pyx_PyObject_GetAttrStr_ClearAttributeError(void) { - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - if (likely(__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError))) - __Pyx_PyErr_Clear(); -} -static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name) { - PyObject *result; -#if CYTHON_COMPILING_IN_CPYTHON && CYTHON_USE_TYPE_SLOTS && PY_VERSION_HEX >= 0x030700B1 - PyTypeObject* tp = Py_TYPE(obj); - if (likely(tp->tp_getattro == PyObject_GenericGetAttr)) { - return _PyObject_GenericGetAttrWithDict(obj, attr_name, NULL, 1); - } -#endif - result = __Pyx_PyObject_GetAttrStr(obj, attr_name); - if (unlikely(!result)) { - __Pyx_PyObject_GetAttrStr_ClearAttributeError(); - } - return result; -} - -/* SetupReduce */ -static int __Pyx_setup_reduce_is_named(PyObject* meth, PyObject* name) { - int ret; - PyObject *name_attr; - name_attr = __Pyx_PyObject_GetAttrStr(meth, __pyx_n_s_name); - if (likely(name_attr)) { - ret = PyObject_RichCompareBool(name_attr, name, Py_EQ); - } else { - ret = -1; - } - if (unlikely(ret < 0)) { - PyErr_Clear(); - ret = 0; - } - Py_XDECREF(name_attr); - return ret; -} -static int __Pyx_setup_reduce(PyObject* type_obj) { - int ret = 0; - PyObject *object_reduce = NULL; - PyObject *object_reduce_ex = NULL; - PyObject *reduce = NULL; - PyObject *reduce_ex = NULL; - PyObject *reduce_cython = NULL; - PyObject *setstate = NULL; - PyObject *setstate_cython = NULL; -#if CYTHON_USE_PYTYPE_LOOKUP - if (_PyType_Lookup((PyTypeObject*)type_obj, __pyx_n_s_getstate)) goto __PYX_GOOD; -#else - if (PyObject_HasAttr(type_obj, __pyx_n_s_getstate)) goto __PYX_GOOD; -#endif -#if CYTHON_USE_PYTYPE_LOOKUP - object_reduce_ex = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto __PYX_BAD; -#else - object_reduce_ex = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto __PYX_BAD; -#endif - reduce_ex = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce_ex); if (unlikely(!reduce_ex)) goto __PYX_BAD; - if (reduce_ex == object_reduce_ex) { -#if CYTHON_USE_PYTYPE_LOOKUP - object_reduce = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto __PYX_BAD; -#else - object_reduce = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto __PYX_BAD; -#endif - reduce = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce); if (unlikely(!reduce)) goto __PYX_BAD; - if (reduce == object_reduce || __Pyx_setup_reduce_is_named(reduce, __pyx_n_s_reduce_cython)) { - reduce_cython = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_reduce_cython); - if (likely(reduce_cython)) { - ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce, reduce_cython); if (unlikely(ret < 0)) goto __PYX_BAD; - ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce_cython); if (unlikely(ret < 0)) goto __PYX_BAD; - } else if (reduce == object_reduce || PyErr_Occurred()) { - goto __PYX_BAD; - } - setstate = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_setstate); - if (!setstate) PyErr_Clear(); - if (!setstate || __Pyx_setup_reduce_is_named(setstate, __pyx_n_s_setstate_cython)) { - setstate_cython = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_setstate_cython); - if (likely(setstate_cython)) { - ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate, setstate_cython); if (unlikely(ret < 0)) goto __PYX_BAD; - ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate_cython); if (unlikely(ret < 0)) goto __PYX_BAD; - } else if (!setstate || PyErr_Occurred()) { - goto __PYX_BAD; - } - } - PyType_Modified((PyTypeObject*)type_obj); - } - } - goto __PYX_GOOD; -__PYX_BAD: - if (!PyErr_Occurred()) - PyErr_Format(PyExc_RuntimeError, "Unable to initialize pickling for %s", ((PyTypeObject*)type_obj)->tp_name); - ret = -1; -__PYX_GOOD: -#if !CYTHON_USE_PYTYPE_LOOKUP - Py_XDECREF(object_reduce); - Py_XDECREF(object_reduce_ex); -#endif - Py_XDECREF(reduce); - Py_XDECREF(reduce_ex); - Py_XDECREF(reduce_cython); - Py_XDECREF(setstate); - Py_XDECREF(setstate_cython); - return ret; -} - -/* TypeImport */ -#ifndef __PYX_HAVE_RT_ImportType -#define __PYX_HAVE_RT_ImportType -static PyTypeObject *__Pyx_ImportType(PyObject *module, const char *module_name, const char *class_name, - size_t size, enum __Pyx_ImportType_CheckSize check_size) -{ - PyObject *result = 0; - char warning[200]; - Py_ssize_t basicsize; -#ifdef Py_LIMITED_API - PyObject *py_basicsize; -#endif - result = PyObject_GetAttrString(module, class_name); - if (!result) - goto bad; - if (!PyType_Check(result)) { - PyErr_Format(PyExc_TypeError, - "%.200s.%.200s is not a type object", - module_name, class_name); - goto bad; - } -#ifndef Py_LIMITED_API - basicsize = ((PyTypeObject *)result)->tp_basicsize; -#else - py_basicsize = PyObject_GetAttrString(result, "__basicsize__"); - if (!py_basicsize) - goto bad; - basicsize = PyLong_AsSsize_t(py_basicsize); - Py_DECREF(py_basicsize); - py_basicsize = 0; - if (basicsize == (Py_ssize_t)-1 && PyErr_Occurred()) - goto bad; -#endif - if ((size_t)basicsize < size) { - PyErr_Format(PyExc_ValueError, - "%.200s.%.200s size changed, may indicate binary incompatibility. " - "Expected %zd from C header, got %zd from PyObject", - module_name, class_name, size, basicsize); - goto bad; - } - if (check_size == __Pyx_ImportType_CheckSize_Error && (size_t)basicsize != size) { - PyErr_Format(PyExc_ValueError, - "%.200s.%.200s size changed, may indicate binary incompatibility. " - "Expected %zd from C header, got %zd from PyObject", - module_name, class_name, size, basicsize); - goto bad; - } - else if (check_size == __Pyx_ImportType_CheckSize_Warn && (size_t)basicsize > size) { - PyOS_snprintf(warning, sizeof(warning), - "%s.%s size changed, may indicate binary incompatibility. " - "Expected %zd from C header, got %zd from PyObject", - module_name, class_name, size, basicsize); - if (PyErr_WarnEx(NULL, warning, 0) < 0) goto bad; - } - return (PyTypeObject *)result; -bad: - Py_XDECREF(result); - return NULL; -} -#endif - -/* CLineInTraceback */ -#ifndef CYTHON_CLINE_IN_TRACEBACK -static int __Pyx_CLineForTraceback(CYTHON_NCP_UNUSED PyThreadState *tstate, int c_line) { - PyObject *use_cline; - PyObject *ptype, *pvalue, *ptraceback; -#if CYTHON_COMPILING_IN_CPYTHON - PyObject **cython_runtime_dict; -#endif - if (unlikely(!__pyx_cython_runtime)) { - return c_line; - } - __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); -#if CYTHON_COMPILING_IN_CPYTHON - cython_runtime_dict = _PyObject_GetDictPtr(__pyx_cython_runtime); - if (likely(cython_runtime_dict)) { - __PYX_PY_DICT_LOOKUP_IF_MODIFIED( - use_cline, *cython_runtime_dict, - __Pyx_PyDict_GetItemStr(*cython_runtime_dict, __pyx_n_s_cline_in_traceback)) - } else -#endif - { - PyObject *use_cline_obj = __Pyx_PyObject_GetAttrStr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback); - if (use_cline_obj) { - use_cline = PyObject_Not(use_cline_obj) ? Py_False : Py_True; - Py_DECREF(use_cline_obj); - } else { - PyErr_Clear(); - use_cline = NULL; - } - } - if (!use_cline) { - c_line = 0; - PyObject_SetAttr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback, Py_False); - } - else if (use_cline == Py_False || (use_cline != Py_True && PyObject_Not(use_cline) != 0)) { - c_line = 0; - } - __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); - return c_line; -} -#endif - -/* CodeObjectCache */ -static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) { - int start = 0, mid = 0, end = count - 1; - if (end >= 0 && code_line > entries[end].code_line) { - return count; - } - while (start < end) { - mid = start + (end - start) / 2; - if (code_line < entries[mid].code_line) { - end = mid; - } else if (code_line > entries[mid].code_line) { - start = mid + 1; - } else { - return mid; - } - } - if (code_line <= entries[mid].code_line) { - return mid; - } else { - return mid + 1; - } -} -static PyCodeObject *__pyx_find_code_object(int code_line) { - PyCodeObject* code_object; - int pos; - if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) { - return NULL; - } - pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); - if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) { - return NULL; - } - code_object = __pyx_code_cache.entries[pos].code_object; - Py_INCREF(code_object); - return code_object; -} -static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) { - int pos, i; - __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries; - if (unlikely(!code_line)) { - return; - } - if (unlikely(!entries)) { - entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry)); - if (likely(entries)) { - __pyx_code_cache.entries = entries; - __pyx_code_cache.max_count = 64; - __pyx_code_cache.count = 1; - entries[0].code_line = code_line; - entries[0].code_object = code_object; - Py_INCREF(code_object); - } - return; - } - pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); - if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) { - PyCodeObject* tmp = entries[pos].code_object; - entries[pos].code_object = code_object; - Py_DECREF(tmp); - return; - } - if (__pyx_code_cache.count == __pyx_code_cache.max_count) { - int new_max = __pyx_code_cache.max_count + 64; - entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc( - __pyx_code_cache.entries, ((size_t)new_max) * sizeof(__Pyx_CodeObjectCacheEntry)); - if (unlikely(!entries)) { - return; - } - __pyx_code_cache.entries = entries; - __pyx_code_cache.max_count = new_max; - } - for (i=__pyx_code_cache.count; i>pos; i--) { - entries[i] = entries[i-1]; - } - entries[pos].code_line = code_line; - entries[pos].code_object = code_object; - __pyx_code_cache.count++; - Py_INCREF(code_object); -} - -/* AddTraceback */ -#include "compile.h" -#include "frameobject.h" -#include "traceback.h" -static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( - const char *funcname, int c_line, - int py_line, const char *filename) { - PyCodeObject *py_code = 0; - PyObject *py_srcfile = 0; - PyObject *py_funcname = 0; - #if PY_MAJOR_VERSION < 3 - py_srcfile = PyString_FromString(filename); - #else - py_srcfile = PyUnicode_FromString(filename); - #endif - if (!py_srcfile) goto bad; - if (c_line) { - #if PY_MAJOR_VERSION < 3 - py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); - #else - py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); - #endif - } - else { - #if PY_MAJOR_VERSION < 3 - py_funcname = PyString_FromString(funcname); - #else - py_funcname = PyUnicode_FromString(funcname); - #endif - } - if (!py_funcname) goto bad; - py_code = __Pyx_PyCode_New( - 0, - 0, - 0, - 0, - 0, - __pyx_empty_bytes, /*PyObject *code,*/ - __pyx_empty_tuple, /*PyObject *consts,*/ - __pyx_empty_tuple, /*PyObject *names,*/ - __pyx_empty_tuple, /*PyObject *varnames,*/ - __pyx_empty_tuple, /*PyObject *freevars,*/ - __pyx_empty_tuple, /*PyObject *cellvars,*/ - py_srcfile, /*PyObject *filename,*/ - py_funcname, /*PyObject *name,*/ - py_line, - __pyx_empty_bytes /*PyObject *lnotab*/ - ); - Py_DECREF(py_srcfile); - Py_DECREF(py_funcname); - return py_code; -bad: - Py_XDECREF(py_srcfile); - Py_XDECREF(py_funcname); - return NULL; -} -static void __Pyx_AddTraceback(const char *funcname, int c_line, - int py_line, const char *filename) { - PyCodeObject *py_code = 0; - PyFrameObject *py_frame = 0; - PyThreadState *tstate = __Pyx_PyThreadState_Current; - if (c_line) { - c_line = __Pyx_CLineForTraceback(tstate, c_line); - } - py_code = __pyx_find_code_object(c_line ? -c_line : py_line); - if (!py_code) { - py_code = __Pyx_CreateCodeObjectForTraceback( - funcname, c_line, py_line, filename); - if (!py_code) goto bad; - __pyx_insert_code_object(c_line ? -c_line : py_line, py_code); - } - py_frame = PyFrame_New( - tstate, /*PyThreadState *tstate,*/ - py_code, /*PyCodeObject *code,*/ - __pyx_d, /*PyObject *globals,*/ - 0 /*PyObject *locals*/ - ); - if (!py_frame) goto bad; - __Pyx_PyFrame_SetLineNumber(py_frame, py_line); - PyTraceBack_Here(py_frame); -bad: - Py_XDECREF(py_code); - Py_XDECREF(py_frame); -} - -/* CIntFromPyVerify */ -#define __PYX_VERIFY_RETURN_INT(target_type, func_type, func_value)\ - __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 0) -#define __PYX_VERIFY_RETURN_INT_EXC(target_type, func_type, func_value)\ - __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 1) -#define __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, exc)\ - {\ - func_type value = func_value;\ - if (sizeof(target_type) < sizeof(func_type)) {\ - if (unlikely(value != (func_type) (target_type) value)) {\ - func_type zero = 0;\ - if (exc && unlikely(value == (func_type)-1 && PyErr_Occurred()))\ - return (target_type) -1;\ - if (is_unsigned && unlikely(value < zero))\ - goto raise_neg_overflow;\ - else\ - goto raise_overflow;\ - }\ - }\ - return (target_type) value;\ - } - -/* CIntToPy */ -static CYTHON_INLINE PyObject* __Pyx_PyInt_From_int(int value) { - const int neg_one = (int) ((int) 0 - (int) 1), const_zero = (int) 0; - const int is_unsigned = neg_one > const_zero; - if (is_unsigned) { - if (sizeof(int) < sizeof(long)) { - return PyInt_FromLong((long) value); - } else if (sizeof(int) <= sizeof(unsigned long)) { - return PyLong_FromUnsignedLong((unsigned long) value); -#ifdef HAVE_LONG_LONG - } else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) { - return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); -#endif - } - } else { - if (sizeof(int) <= sizeof(long)) { - return PyInt_FromLong((long) value); -#ifdef HAVE_LONG_LONG - } else if (sizeof(int) <= sizeof(PY_LONG_LONG)) { - return PyLong_FromLongLong((PY_LONG_LONG) value); -#endif - } - } - { - int one = 1; int little = (int)*(unsigned char *)&one; - unsigned char *bytes = (unsigned char *)&value; - return _PyLong_FromByteArray(bytes, sizeof(int), - little, !is_unsigned); - } -} - -/* CIntToPy */ -static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) { - const long neg_one = (long) ((long) 0 - (long) 1), const_zero = (long) 0; - const int is_unsigned = neg_one > const_zero; - if (is_unsigned) { - if (sizeof(long) < sizeof(long)) { - return PyInt_FromLong((long) value); - } else if (sizeof(long) <= sizeof(unsigned long)) { - return PyLong_FromUnsignedLong((unsigned long) value); -#ifdef HAVE_LONG_LONG - } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { - return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); -#endif - } - } else { - if (sizeof(long) <= sizeof(long)) { - return PyInt_FromLong((long) value); -#ifdef HAVE_LONG_LONG - } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { - return PyLong_FromLongLong((PY_LONG_LONG) value); -#endif - } - } - { - int one = 1; int little = (int)*(unsigned char *)&one; - unsigned char *bytes = (unsigned char *)&value; - return _PyLong_FromByteArray(bytes, sizeof(long), - little, !is_unsigned); - } -} - -/* PyUCS4InUnicode */ -#if PY_VERSION_HEX < 0x03090000 -#if Py_UNICODE_SIZE == 2 -static int __Pyx_PyUnicodeBufferContainsUCS4_SP(Py_UNICODE* buffer, Py_ssize_t length, Py_UCS4 character) { - Py_UNICODE high_val, low_val; - Py_UNICODE* pos; - high_val = (Py_UNICODE) (0xD800 | (((character - 0x10000) >> 10) & ((1<<10)-1))); - low_val = (Py_UNICODE) (0xDC00 | ( (character - 0x10000) & ((1<<10)-1))); - for (pos=buffer; pos < buffer+length-1; pos++) { - if (unlikely((high_val == pos[0]) & (low_val == pos[1]))) return 1; - } - return 0; -} -#endif -static int __Pyx_PyUnicodeBufferContainsUCS4_BMP(Py_UNICODE* buffer, Py_ssize_t length, Py_UCS4 character) { - Py_UNICODE uchar; - Py_UNICODE* pos; - uchar = (Py_UNICODE) character; - for (pos=buffer; pos < buffer+length; pos++) { - if (unlikely(uchar == pos[0])) return 1; - } - return 0; -} -#endif -static CYTHON_INLINE int __Pyx_UnicodeContainsUCS4(PyObject* unicode, Py_UCS4 character) { -#if CYTHON_PEP393_ENABLED - const int kind = PyUnicode_KIND(unicode); - if (likely(kind != PyUnicode_WCHAR_KIND)) { - Py_ssize_t i; - const void* udata = PyUnicode_DATA(unicode); - const Py_ssize_t length = PyUnicode_GET_LENGTH(unicode); - for (i=0; i < length; i++) { - if (unlikely(character == PyUnicode_READ(kind, udata, i))) return 1; - } - return 0; - } -#elif PY_VERSION_HEX >= 0x03090000 - #error Cannot use "UChar in Unicode" in Python 3.9 without PEP-393 unicode strings. -#endif -#if PY_VERSION_HEX < 0x03090000 -#if Py_UNICODE_SIZE == 2 - if (unlikely(character > 65535)) { - return __Pyx_PyUnicodeBufferContainsUCS4_SP( - PyUnicode_AS_UNICODE(unicode), - PyUnicode_GET_SIZE(unicode), - character); - } else -#endif - { - return __Pyx_PyUnicodeBufferContainsUCS4_BMP( - PyUnicode_AS_UNICODE(unicode), - PyUnicode_GET_SIZE(unicode), - character); - } -#endif -} - -/* UnicodeAsUCS4 */ -static CYTHON_INLINE Py_UCS4 __Pyx_PyUnicode_AsPy_UCS4(PyObject* x) { - Py_ssize_t length; - #if CYTHON_PEP393_ENABLED - length = PyUnicode_GET_LENGTH(x); - if (likely(length == 1)) { - return PyUnicode_READ_CHAR(x, 0); - } - #else - length = PyUnicode_GET_SIZE(x); - if (likely(length == 1)) { - return PyUnicode_AS_UNICODE(x)[0]; - } - #if Py_UNICODE_SIZE == 2 - else if (PyUnicode_GET_SIZE(x) == 2) { - Py_UCS4 high_val = PyUnicode_AS_UNICODE(x)[0]; - if (high_val >= 0xD800 && high_val <= 0xDBFF) { - Py_UCS4 low_val = PyUnicode_AS_UNICODE(x)[1]; - if (low_val >= 0xDC00 && low_val <= 0xDFFF) { - return 0x10000 + (((high_val & ((1<<10)-1)) << 10) | (low_val & ((1<<10)-1))); - } - } - } - #endif - #endif - PyErr_Format(PyExc_ValueError, - "only single character unicode strings can be converted to Py_UCS4, " - "got length %" CYTHON_FORMAT_SSIZE_T "d", length); - return (Py_UCS4)-1; -} - -/* CIntFromPy */ -static CYTHON_INLINE uint8_t __Pyx_PyInt_As_uint8_t(PyObject *x) { - const uint8_t neg_one = (uint8_t) ((uint8_t) 0 - (uint8_t) 1), const_zero = (uint8_t) 0; - const int is_unsigned = neg_one > const_zero; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_Check(x))) { - if (sizeof(uint8_t) < sizeof(long)) { - __PYX_VERIFY_RETURN_INT(uint8_t, long, PyInt_AS_LONG(x)) - } else { - long val = PyInt_AS_LONG(x); - if (is_unsigned && unlikely(val < 0)) { - goto raise_neg_overflow; - } - return (uint8_t) val; - } - } else -#endif - if (likely(PyLong_Check(x))) { - if (is_unsigned) { -#if CYTHON_USE_PYLONG_INTERNALS - const digit* digits = ((PyLongObject*)x)->ob_digit; - switch (Py_SIZE(x)) { - case 0: return (uint8_t) 0; - case 1: __PYX_VERIFY_RETURN_INT(uint8_t, digit, digits[0]) - case 2: - if (8 * sizeof(uint8_t) > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(uint8_t, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(uint8_t) >= 2 * PyLong_SHIFT) { - return (uint8_t) (((((uint8_t)digits[1]) << PyLong_SHIFT) | (uint8_t)digits[0])); - } - } - break; - case 3: - if (8 * sizeof(uint8_t) > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(uint8_t, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(uint8_t) >= 3 * PyLong_SHIFT) { - return (uint8_t) (((((((uint8_t)digits[2]) << PyLong_SHIFT) | (uint8_t)digits[1]) << PyLong_SHIFT) | (uint8_t)digits[0])); - } - } - break; - case 4: - if (8 * sizeof(uint8_t) > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(uint8_t, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(uint8_t) >= 4 * PyLong_SHIFT) { - return (uint8_t) (((((((((uint8_t)digits[3]) << PyLong_SHIFT) | (uint8_t)digits[2]) << PyLong_SHIFT) | (uint8_t)digits[1]) << PyLong_SHIFT) | (uint8_t)digits[0])); - } - } - break; - } -#endif -#if CYTHON_COMPILING_IN_CPYTHON - if (unlikely(Py_SIZE(x) < 0)) { - goto raise_neg_overflow; - } -#else - { - int result = PyObject_RichCompareBool(x, Py_False, Py_LT); - if (unlikely(result < 0)) - return (uint8_t) -1; - if (unlikely(result == 1)) - goto raise_neg_overflow; - } -#endif - if (sizeof(uint8_t) <= sizeof(unsigned long)) { - __PYX_VERIFY_RETURN_INT_EXC(uint8_t, unsigned long, PyLong_AsUnsignedLong(x)) -#ifdef HAVE_LONG_LONG - } else if (sizeof(uint8_t) <= sizeof(unsigned PY_LONG_LONG)) { - __PYX_VERIFY_RETURN_INT_EXC(uint8_t, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) -#endif - } - } else { -#if CYTHON_USE_PYLONG_INTERNALS - const digit* digits = ((PyLongObject*)x)->ob_digit; - switch (Py_SIZE(x)) { - case 0: return (uint8_t) 0; - case -1: __PYX_VERIFY_RETURN_INT(uint8_t, sdigit, (sdigit) (-(sdigit)digits[0])) - case 1: __PYX_VERIFY_RETURN_INT(uint8_t, digit, +digits[0]) - case -2: - if (8 * sizeof(uint8_t) - 1 > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(uint8_t, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(uint8_t) - 1 > 2 * PyLong_SHIFT) { - return (uint8_t) (((uint8_t)-1)*(((((uint8_t)digits[1]) << PyLong_SHIFT) | (uint8_t)digits[0]))); - } - } - break; - case 2: - if (8 * sizeof(uint8_t) > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(uint8_t, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(uint8_t) - 1 > 2 * PyLong_SHIFT) { - return (uint8_t) ((((((uint8_t)digits[1]) << PyLong_SHIFT) | (uint8_t)digits[0]))); - } - } - break; - case -3: - if (8 * sizeof(uint8_t) - 1 > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(uint8_t, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(uint8_t) - 1 > 3 * PyLong_SHIFT) { - return (uint8_t) (((uint8_t)-1)*(((((((uint8_t)digits[2]) << PyLong_SHIFT) | (uint8_t)digits[1]) << PyLong_SHIFT) | (uint8_t)digits[0]))); - } - } - break; - case 3: - if (8 * sizeof(uint8_t) > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(uint8_t, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(uint8_t) - 1 > 3 * PyLong_SHIFT) { - return (uint8_t) ((((((((uint8_t)digits[2]) << PyLong_SHIFT) | (uint8_t)digits[1]) << PyLong_SHIFT) | (uint8_t)digits[0]))); - } - } - break; - case -4: - if (8 * sizeof(uint8_t) - 1 > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(uint8_t, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(uint8_t) - 1 > 4 * PyLong_SHIFT) { - return (uint8_t) (((uint8_t)-1)*(((((((((uint8_t)digits[3]) << PyLong_SHIFT) | (uint8_t)digits[2]) << PyLong_SHIFT) | (uint8_t)digits[1]) << PyLong_SHIFT) | (uint8_t)digits[0]))); - } - } - break; - case 4: - if (8 * sizeof(uint8_t) > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(uint8_t, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(uint8_t) - 1 > 4 * PyLong_SHIFT) { - return (uint8_t) ((((((((((uint8_t)digits[3]) << PyLong_SHIFT) | (uint8_t)digits[2]) << PyLong_SHIFT) | (uint8_t)digits[1]) << PyLong_SHIFT) | (uint8_t)digits[0]))); - } - } - break; - } -#endif - if (sizeof(uint8_t) <= sizeof(long)) { - __PYX_VERIFY_RETURN_INT_EXC(uint8_t, long, PyLong_AsLong(x)) -#ifdef HAVE_LONG_LONG - } else if (sizeof(uint8_t) <= sizeof(PY_LONG_LONG)) { - __PYX_VERIFY_RETURN_INT_EXC(uint8_t, PY_LONG_LONG, PyLong_AsLongLong(x)) -#endif - } - } - { -#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) - PyErr_SetString(PyExc_RuntimeError, - "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); -#else - uint8_t val; - PyObject *v = __Pyx_PyNumber_IntOrLong(x); - #if PY_MAJOR_VERSION < 3 - if (likely(v) && !PyLong_Check(v)) { - PyObject *tmp = v; - v = PyNumber_Long(tmp); - Py_DECREF(tmp); - } - #endif - if (likely(v)) { - int one = 1; int is_little = (int)*(unsigned char *)&one; - unsigned char *bytes = (unsigned char *)&val; - int ret = _PyLong_AsByteArray((PyLongObject *)v, - bytes, sizeof(val), - is_little, !is_unsigned); - Py_DECREF(v); - if (likely(!ret)) - return val; - } -#endif - return (uint8_t) -1; - } - } else { - uint8_t val; - PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); - if (!tmp) return (uint8_t) -1; - val = __Pyx_PyInt_As_uint8_t(tmp); - Py_DECREF(tmp); - return val; - } -raise_overflow: - PyErr_SetString(PyExc_OverflowError, - "value too large to convert to uint8_t"); - return (uint8_t) -1; -raise_neg_overflow: - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to uint8_t"); - return (uint8_t) -1; -} - -/* CIntFromPy */ -static CYTHON_INLINE uint64_t __Pyx_PyInt_As_uint64_t(PyObject *x) { - const uint64_t neg_one = (uint64_t) ((uint64_t) 0 - (uint64_t) 1), const_zero = (uint64_t) 0; - const int is_unsigned = neg_one > const_zero; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_Check(x))) { - if (sizeof(uint64_t) < sizeof(long)) { - __PYX_VERIFY_RETURN_INT(uint64_t, long, PyInt_AS_LONG(x)) - } else { - long val = PyInt_AS_LONG(x); - if (is_unsigned && unlikely(val < 0)) { - goto raise_neg_overflow; - } - return (uint64_t) val; - } - } else -#endif - if (likely(PyLong_Check(x))) { - if (is_unsigned) { -#if CYTHON_USE_PYLONG_INTERNALS - const digit* digits = ((PyLongObject*)x)->ob_digit; - switch (Py_SIZE(x)) { - case 0: return (uint64_t) 0; - case 1: __PYX_VERIFY_RETURN_INT(uint64_t, digit, digits[0]) - case 2: - if (8 * sizeof(uint64_t) > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(uint64_t, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(uint64_t) >= 2 * PyLong_SHIFT) { - return (uint64_t) (((((uint64_t)digits[1]) << PyLong_SHIFT) | (uint64_t)digits[0])); - } - } - break; - case 3: - if (8 * sizeof(uint64_t) > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(uint64_t, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(uint64_t) >= 3 * PyLong_SHIFT) { - return (uint64_t) (((((((uint64_t)digits[2]) << PyLong_SHIFT) | (uint64_t)digits[1]) << PyLong_SHIFT) | (uint64_t)digits[0])); - } - } - break; - case 4: - if (8 * sizeof(uint64_t) > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(uint64_t, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(uint64_t) >= 4 * PyLong_SHIFT) { - return (uint64_t) (((((((((uint64_t)digits[3]) << PyLong_SHIFT) | (uint64_t)digits[2]) << PyLong_SHIFT) | (uint64_t)digits[1]) << PyLong_SHIFT) | (uint64_t)digits[0])); - } - } - break; - } -#endif -#if CYTHON_COMPILING_IN_CPYTHON - if (unlikely(Py_SIZE(x) < 0)) { - goto raise_neg_overflow; - } -#else - { - int result = PyObject_RichCompareBool(x, Py_False, Py_LT); - if (unlikely(result < 0)) - return (uint64_t) -1; - if (unlikely(result == 1)) - goto raise_neg_overflow; - } -#endif - if (sizeof(uint64_t) <= sizeof(unsigned long)) { - __PYX_VERIFY_RETURN_INT_EXC(uint64_t, unsigned long, PyLong_AsUnsignedLong(x)) -#ifdef HAVE_LONG_LONG - } else if (sizeof(uint64_t) <= sizeof(unsigned PY_LONG_LONG)) { - __PYX_VERIFY_RETURN_INT_EXC(uint64_t, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) -#endif - } - } else { -#if CYTHON_USE_PYLONG_INTERNALS - const digit* digits = ((PyLongObject*)x)->ob_digit; - switch (Py_SIZE(x)) { - case 0: return (uint64_t) 0; - case -1: __PYX_VERIFY_RETURN_INT(uint64_t, sdigit, (sdigit) (-(sdigit)digits[0])) - case 1: __PYX_VERIFY_RETURN_INT(uint64_t, digit, +digits[0]) - case -2: - if (8 * sizeof(uint64_t) - 1 > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(uint64_t, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(uint64_t) - 1 > 2 * PyLong_SHIFT) { - return (uint64_t) (((uint64_t)-1)*(((((uint64_t)digits[1]) << PyLong_SHIFT) | (uint64_t)digits[0]))); - } - } - break; - case 2: - if (8 * sizeof(uint64_t) > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(uint64_t, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(uint64_t) - 1 > 2 * PyLong_SHIFT) { - return (uint64_t) ((((((uint64_t)digits[1]) << PyLong_SHIFT) | (uint64_t)digits[0]))); - } - } - break; - case -3: - if (8 * sizeof(uint64_t) - 1 > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(uint64_t, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(uint64_t) - 1 > 3 * PyLong_SHIFT) { - return (uint64_t) (((uint64_t)-1)*(((((((uint64_t)digits[2]) << PyLong_SHIFT) | (uint64_t)digits[1]) << PyLong_SHIFT) | (uint64_t)digits[0]))); - } - } - break; - case 3: - if (8 * sizeof(uint64_t) > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(uint64_t, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(uint64_t) - 1 > 3 * PyLong_SHIFT) { - return (uint64_t) ((((((((uint64_t)digits[2]) << PyLong_SHIFT) | (uint64_t)digits[1]) << PyLong_SHIFT) | (uint64_t)digits[0]))); - } - } - break; - case -4: - if (8 * sizeof(uint64_t) - 1 > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(uint64_t, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(uint64_t) - 1 > 4 * PyLong_SHIFT) { - return (uint64_t) (((uint64_t)-1)*(((((((((uint64_t)digits[3]) << PyLong_SHIFT) | (uint64_t)digits[2]) << PyLong_SHIFT) | (uint64_t)digits[1]) << PyLong_SHIFT) | (uint64_t)digits[0]))); - } - } - break; - case 4: - if (8 * sizeof(uint64_t) > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(uint64_t, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(uint64_t) - 1 > 4 * PyLong_SHIFT) { - return (uint64_t) ((((((((((uint64_t)digits[3]) << PyLong_SHIFT) | (uint64_t)digits[2]) << PyLong_SHIFT) | (uint64_t)digits[1]) << PyLong_SHIFT) | (uint64_t)digits[0]))); - } - } - break; - } -#endif - if (sizeof(uint64_t) <= sizeof(long)) { - __PYX_VERIFY_RETURN_INT_EXC(uint64_t, long, PyLong_AsLong(x)) -#ifdef HAVE_LONG_LONG - } else if (sizeof(uint64_t) <= sizeof(PY_LONG_LONG)) { - __PYX_VERIFY_RETURN_INT_EXC(uint64_t, PY_LONG_LONG, PyLong_AsLongLong(x)) -#endif - } - } - { -#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) - PyErr_SetString(PyExc_RuntimeError, - "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); -#else - uint64_t val; - PyObject *v = __Pyx_PyNumber_IntOrLong(x); - #if PY_MAJOR_VERSION < 3 - if (likely(v) && !PyLong_Check(v)) { - PyObject *tmp = v; - v = PyNumber_Long(tmp); - Py_DECREF(tmp); - } - #endif - if (likely(v)) { - int one = 1; int is_little = (int)*(unsigned char *)&one; - unsigned char *bytes = (unsigned char *)&val; - int ret = _PyLong_AsByteArray((PyLongObject *)v, - bytes, sizeof(val), - is_little, !is_unsigned); - Py_DECREF(v); - if (likely(!ret)) - return val; - } -#endif - return (uint64_t) -1; - } - } else { - uint64_t val; - PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); - if (!tmp) return (uint64_t) -1; - val = __Pyx_PyInt_As_uint64_t(tmp); - Py_DECREF(tmp); - return val; - } -raise_overflow: - PyErr_SetString(PyExc_OverflowError, - "value too large to convert to uint64_t"); - return (uint64_t) -1; -raise_neg_overflow: - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to uint64_t"); - return (uint64_t) -1; -} - -/* CIntFromPy */ -static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) { - const long neg_one = (long) ((long) 0 - (long) 1), const_zero = (long) 0; - const int is_unsigned = neg_one > const_zero; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_Check(x))) { - if (sizeof(long) < sizeof(long)) { - __PYX_VERIFY_RETURN_INT(long, long, PyInt_AS_LONG(x)) - } else { - long val = PyInt_AS_LONG(x); - if (is_unsigned && unlikely(val < 0)) { - goto raise_neg_overflow; - } - return (long) val; - } - } else -#endif - if (likely(PyLong_Check(x))) { - if (is_unsigned) { -#if CYTHON_USE_PYLONG_INTERNALS - const digit* digits = ((PyLongObject*)x)->ob_digit; - switch (Py_SIZE(x)) { - case 0: return (long) 0; - case 1: __PYX_VERIFY_RETURN_INT(long, digit, digits[0]) - case 2: - if (8 * sizeof(long) > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) >= 2 * PyLong_SHIFT) { - return (long) (((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); - } - } - break; - case 3: - if (8 * sizeof(long) > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) >= 3 * PyLong_SHIFT) { - return (long) (((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); - } - } - break; - case 4: - if (8 * sizeof(long) > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) >= 4 * PyLong_SHIFT) { - return (long) (((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); - } - } - break; - } -#endif -#if CYTHON_COMPILING_IN_CPYTHON - if (unlikely(Py_SIZE(x) < 0)) { - goto raise_neg_overflow; - } -#else - { - int result = PyObject_RichCompareBool(x, Py_False, Py_LT); - if (unlikely(result < 0)) - return (long) -1; - if (unlikely(result == 1)) - goto raise_neg_overflow; - } -#endif - if (sizeof(long) <= sizeof(unsigned long)) { - __PYX_VERIFY_RETURN_INT_EXC(long, unsigned long, PyLong_AsUnsignedLong(x)) -#ifdef HAVE_LONG_LONG - } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { - __PYX_VERIFY_RETURN_INT_EXC(long, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) -#endif - } - } else { -#if CYTHON_USE_PYLONG_INTERNALS - const digit* digits = ((PyLongObject*)x)->ob_digit; - switch (Py_SIZE(x)) { - case 0: return (long) 0; - case -1: __PYX_VERIFY_RETURN_INT(long, sdigit, (sdigit) (-(sdigit)digits[0])) - case 1: __PYX_VERIFY_RETURN_INT(long, digit, +digits[0]) - case -2: - if (8 * sizeof(long) - 1 > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { - return (long) (((long)-1)*(((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case 2: - if (8 * sizeof(long) > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { - return (long) ((((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case -3: - if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { - return (long) (((long)-1)*(((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case 3: - if (8 * sizeof(long) > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { - return (long) ((((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case -4: - if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { - return (long) (((long)-1)*(((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case 4: - if (8 * sizeof(long) > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { - return (long) ((((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - } -#endif - if (sizeof(long) <= sizeof(long)) { - __PYX_VERIFY_RETURN_INT_EXC(long, long, PyLong_AsLong(x)) -#ifdef HAVE_LONG_LONG - } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { - __PYX_VERIFY_RETURN_INT_EXC(long, PY_LONG_LONG, PyLong_AsLongLong(x)) -#endif - } - } - { -#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) - PyErr_SetString(PyExc_RuntimeError, - "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); -#else - long val; - PyObject *v = __Pyx_PyNumber_IntOrLong(x); - #if PY_MAJOR_VERSION < 3 - if (likely(v) && !PyLong_Check(v)) { - PyObject *tmp = v; - v = PyNumber_Long(tmp); - Py_DECREF(tmp); - } - #endif - if (likely(v)) { - int one = 1; int is_little = (int)*(unsigned char *)&one; - unsigned char *bytes = (unsigned char *)&val; - int ret = _PyLong_AsByteArray((PyLongObject *)v, - bytes, sizeof(val), - is_little, !is_unsigned); - Py_DECREF(v); - if (likely(!ret)) - return val; - } -#endif - return (long) -1; - } - } else { - long val; - PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); - if (!tmp) return (long) -1; - val = __Pyx_PyInt_As_long(tmp); - Py_DECREF(tmp); - return val; - } -raise_overflow: - PyErr_SetString(PyExc_OverflowError, - "value too large to convert to long"); - return (long) -1; -raise_neg_overflow: - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to long"); - return (long) -1; -} - -/* ObjectAsUCS4 */ -static Py_UCS4 __Pyx__PyObject_AsPy_UCS4_raise_error(long ival) { - if (ival < 0) { - if (!PyErr_Occurred()) - PyErr_SetString(PyExc_OverflowError, - "cannot convert negative value to Py_UCS4"); - } else { - PyErr_SetString(PyExc_OverflowError, - "value too large to convert to Py_UCS4"); - } - return (Py_UCS4)-1; -} -static Py_UCS4 __Pyx__PyObject_AsPy_UCS4(PyObject* x) { - long ival; - ival = __Pyx_PyInt_As_long(x); - if (unlikely(!__Pyx_is_valid_index(ival, 1114111 + 1))) { - return __Pyx__PyObject_AsPy_UCS4_raise_error(ival); - } - return (Py_UCS4)ival; -} - -/* CIntFromPy */ -static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *x) { - const int neg_one = (int) ((int) 0 - (int) 1), const_zero = (int) 0; - const int is_unsigned = neg_one > const_zero; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_Check(x))) { - if (sizeof(int) < sizeof(long)) { - __PYX_VERIFY_RETURN_INT(int, long, PyInt_AS_LONG(x)) - } else { - long val = PyInt_AS_LONG(x); - if (is_unsigned && unlikely(val < 0)) { - goto raise_neg_overflow; - } - return (int) val; - } - } else -#endif - if (likely(PyLong_Check(x))) { - if (is_unsigned) { -#if CYTHON_USE_PYLONG_INTERNALS - const digit* digits = ((PyLongObject*)x)->ob_digit; - switch (Py_SIZE(x)) { - case 0: return (int) 0; - case 1: __PYX_VERIFY_RETURN_INT(int, digit, digits[0]) - case 2: - if (8 * sizeof(int) > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) >= 2 * PyLong_SHIFT) { - return (int) (((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); - } - } - break; - case 3: - if (8 * sizeof(int) > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) >= 3 * PyLong_SHIFT) { - return (int) (((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); - } - } - break; - case 4: - if (8 * sizeof(int) > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) >= 4 * PyLong_SHIFT) { - return (int) (((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); - } - } - break; - } -#endif -#if CYTHON_COMPILING_IN_CPYTHON - if (unlikely(Py_SIZE(x) < 0)) { - goto raise_neg_overflow; - } -#else - { - int result = PyObject_RichCompareBool(x, Py_False, Py_LT); - if (unlikely(result < 0)) - return (int) -1; - if (unlikely(result == 1)) - goto raise_neg_overflow; - } -#endif - if (sizeof(int) <= sizeof(unsigned long)) { - __PYX_VERIFY_RETURN_INT_EXC(int, unsigned long, PyLong_AsUnsignedLong(x)) -#ifdef HAVE_LONG_LONG - } else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) { - __PYX_VERIFY_RETURN_INT_EXC(int, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) -#endif - } - } else { -#if CYTHON_USE_PYLONG_INTERNALS - const digit* digits = ((PyLongObject*)x)->ob_digit; - switch (Py_SIZE(x)) { - case 0: return (int) 0; - case -1: __PYX_VERIFY_RETURN_INT(int, sdigit, (sdigit) (-(sdigit)digits[0])) - case 1: __PYX_VERIFY_RETURN_INT(int, digit, +digits[0]) - case -2: - if (8 * sizeof(int) - 1 > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { - return (int) (((int)-1)*(((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case 2: - if (8 * sizeof(int) > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { - return (int) ((((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case -3: - if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { - return (int) (((int)-1)*(((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case 3: - if (8 * sizeof(int) > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { - return (int) ((((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case -4: - if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { - return (int) (((int)-1)*(((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case 4: - if (8 * sizeof(int) > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { - return (int) ((((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - } -#endif - if (sizeof(int) <= sizeof(long)) { - __PYX_VERIFY_RETURN_INT_EXC(int, long, PyLong_AsLong(x)) -#ifdef HAVE_LONG_LONG - } else if (sizeof(int) <= sizeof(PY_LONG_LONG)) { - __PYX_VERIFY_RETURN_INT_EXC(int, PY_LONG_LONG, PyLong_AsLongLong(x)) -#endif - } - } - { -#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) - PyErr_SetString(PyExc_RuntimeError, - "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); -#else - int val; - PyObject *v = __Pyx_PyNumber_IntOrLong(x); - #if PY_MAJOR_VERSION < 3 - if (likely(v) && !PyLong_Check(v)) { - PyObject *tmp = v; - v = PyNumber_Long(tmp); - Py_DECREF(tmp); - } - #endif - if (likely(v)) { - int one = 1; int is_little = (int)*(unsigned char *)&one; - unsigned char *bytes = (unsigned char *)&val; - int ret = _PyLong_AsByteArray((PyLongObject *)v, - bytes, sizeof(val), - is_little, !is_unsigned); - Py_DECREF(v); - if (likely(!ret)) - return val; - } -#endif - return (int) -1; - } - } else { - int val; - PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); - if (!tmp) return (int) -1; - val = __Pyx_PyInt_As_int(tmp); - Py_DECREF(tmp); - return val; - } -raise_overflow: - PyErr_SetString(PyExc_OverflowError, - "value too large to convert to int"); - return (int) -1; -raise_neg_overflow: - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to int"); - return (int) -1; -} - -/* FastTypeChecks */ -#if CYTHON_COMPILING_IN_CPYTHON -static int __Pyx_InBases(PyTypeObject *a, PyTypeObject *b) { - while (a) { - a = a->tp_base; - if (a == b) - return 1; - } - return b == &PyBaseObject_Type; -} -static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b) { - PyObject *mro; - if (a == b) return 1; - mro = a->tp_mro; - if (likely(mro)) { - Py_ssize_t i, n; - n = PyTuple_GET_SIZE(mro); - for (i = 0; i < n; i++) { - if (PyTuple_GET_ITEM(mro, i) == (PyObject *)b) - return 1; - } - return 0; - } - return __Pyx_InBases(a, b); -} -#if PY_MAJOR_VERSION == 2 -static int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject* exc_type2) { - PyObject *exception, *value, *tb; - int res; - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - __Pyx_ErrFetch(&exception, &value, &tb); - res = exc_type1 ? PyObject_IsSubclass(err, exc_type1) : 0; - if (unlikely(res == -1)) { - PyErr_WriteUnraisable(err); - res = 0; - } - if (!res) { - res = PyObject_IsSubclass(err, exc_type2); - if (unlikely(res == -1)) { - PyErr_WriteUnraisable(err); - res = 0; - } - } - __Pyx_ErrRestore(exception, value, tb); - return res; -} -#else -static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject *exc_type2) { - int res = exc_type1 ? __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type1) : 0; - if (!res) { - res = __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type2); - } - return res; -} -#endif -static int __Pyx_PyErr_GivenExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { - Py_ssize_t i, n; - assert(PyExceptionClass_Check(exc_type)); - n = PyTuple_GET_SIZE(tuple); -#if PY_MAJOR_VERSION >= 3 - for (i=0; ip) { - #if PY_MAJOR_VERSION < 3 - if (t->is_unicode) { - *t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL); - } else if (t->intern) { - *t->p = PyString_InternFromString(t->s); - } else { - *t->p = PyString_FromStringAndSize(t->s, t->n - 1); - } - #else - if (t->is_unicode | t->is_str) { - if (t->intern) { - *t->p = PyUnicode_InternFromString(t->s); - } else if (t->encoding) { - *t->p = PyUnicode_Decode(t->s, t->n - 1, t->encoding, NULL); - } else { - *t->p = PyUnicode_FromStringAndSize(t->s, t->n - 1); - } - } else { - *t->p = PyBytes_FromStringAndSize(t->s, t->n - 1); - } - #endif - if (!*t->p) - return -1; - if (PyObject_Hash(*t->p) == -1) - return -1; - ++t; - } - return 0; -} - -static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char* c_str) { - return __Pyx_PyUnicode_FromStringAndSize(c_str, (Py_ssize_t)strlen(c_str)); -} -static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject* o) { - Py_ssize_t ignore; - return __Pyx_PyObject_AsStringAndSize(o, &ignore); -} -#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT -#if !CYTHON_PEP393_ENABLED -static const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { - char* defenc_c; - PyObject* defenc = _PyUnicode_AsDefaultEncodedString(o, NULL); - if (!defenc) return NULL; - defenc_c = PyBytes_AS_STRING(defenc); -#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII - { - char* end = defenc_c + PyBytes_GET_SIZE(defenc); - char* c; - for (c = defenc_c; c < end; c++) { - if ((unsigned char) (*c) >= 128) { - PyUnicode_AsASCIIString(o); - return NULL; - } - } - } -#endif - *length = PyBytes_GET_SIZE(defenc); - return defenc_c; -} -#else -static CYTHON_INLINE const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { - if (unlikely(__Pyx_PyUnicode_READY(o) == -1)) return NULL; -#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII - if (likely(PyUnicode_IS_ASCII(o))) { - *length = PyUnicode_GET_LENGTH(o); - return PyUnicode_AsUTF8(o); - } else { - PyUnicode_AsASCIIString(o); - return NULL; - } -#else - return PyUnicode_AsUTF8AndSize(o, length); -#endif -} -#endif -#endif -static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) { -#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT - if ( -#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII - __Pyx_sys_getdefaultencoding_not_ascii && -#endif - PyUnicode_Check(o)) { - return __Pyx_PyUnicode_AsStringAndSize(o, length); - } else -#endif -#if (!CYTHON_COMPILING_IN_PYPY) || (defined(PyByteArray_AS_STRING) && defined(PyByteArray_GET_SIZE)) - if (PyByteArray_Check(o)) { - *length = PyByteArray_GET_SIZE(o); - return PyByteArray_AS_STRING(o); - } else -#endif - { - char* result; - int r = PyBytes_AsStringAndSize(o, &result, length); - if (unlikely(r < 0)) { - return NULL; - } else { - return result; - } - } -} -static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) { - int is_true = x == Py_True; - if (is_true | (x == Py_False) | (x == Py_None)) return is_true; - else return PyObject_IsTrue(x); -} -static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject* x) { - int retval; - if (unlikely(!x)) return -1; - retval = __Pyx_PyObject_IsTrue(x); - Py_DECREF(x); - return retval; -} -static PyObject* __Pyx_PyNumber_IntOrLongWrongResultType(PyObject* result, const char* type_name) { -#if PY_MAJOR_VERSION >= 3 - if (PyLong_Check(result)) { - if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1, - "__int__ returned non-int (type %.200s). " - "The ability to return an instance of a strict subclass of int " - "is deprecated, and may be removed in a future version of Python.", - Py_TYPE(result)->tp_name)) { - Py_DECREF(result); - return NULL; - } - return result; - } -#endif - PyErr_Format(PyExc_TypeError, - "__%.4s__ returned non-%.4s (type %.200s)", - type_name, type_name, Py_TYPE(result)->tp_name); - Py_DECREF(result); - return NULL; -} -static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x) { -#if CYTHON_USE_TYPE_SLOTS - PyNumberMethods *m; -#endif - const char *name = NULL; - PyObject *res = NULL; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_Check(x) || PyLong_Check(x))) -#else - if (likely(PyLong_Check(x))) -#endif - return __Pyx_NewRef(x); -#if CYTHON_USE_TYPE_SLOTS - m = Py_TYPE(x)->tp_as_number; - #if PY_MAJOR_VERSION < 3 - if (m && m->nb_int) { - name = "int"; - res = m->nb_int(x); - } - else if (m && m->nb_long) { - name = "long"; - res = m->nb_long(x); - } - #else - if (likely(m && m->nb_int)) { - name = "int"; - res = m->nb_int(x); - } - #endif -#else - if (!PyBytes_CheckExact(x) && !PyUnicode_CheckExact(x)) { - res = PyNumber_Int(x); - } -#endif - if (likely(res)) { -#if PY_MAJOR_VERSION < 3 - if (unlikely(!PyInt_Check(res) && !PyLong_Check(res))) { -#else - if (unlikely(!PyLong_CheckExact(res))) { -#endif - return __Pyx_PyNumber_IntOrLongWrongResultType(res, name); - } - } - else if (!PyErr_Occurred()) { - PyErr_SetString(PyExc_TypeError, - "an integer is required"); - } - return res; -} -static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { - Py_ssize_t ival; - PyObject *x; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_CheckExact(b))) { - if (sizeof(Py_ssize_t) >= sizeof(long)) - return PyInt_AS_LONG(b); - else - return PyInt_AsSsize_t(b); - } -#endif - if (likely(PyLong_CheckExact(b))) { - #if CYTHON_USE_PYLONG_INTERNALS - const digit* digits = ((PyLongObject*)b)->ob_digit; - const Py_ssize_t size = Py_SIZE(b); - if (likely(__Pyx_sst_abs(size) <= 1)) { - ival = likely(size) ? digits[0] : 0; - if (size == -1) ival = -ival; - return ival; - } else { - switch (size) { - case 2: - if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { - return (Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case -2: - if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { - return -(Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case 3: - if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { - return (Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case -3: - if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { - return -(Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case 4: - if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { - return (Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case -4: - if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { - return -(Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - } - } - #endif - return PyLong_AsSsize_t(b); - } - x = PyNumber_Index(b); - if (!x) return -1; - ival = PyInt_AsSsize_t(x); - Py_DECREF(x); - return ival; -} -static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b) { - return b ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False); -} -static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { - return PyInt_FromSize_t(ival); -} - - -#endif /* Py_PYTHON_H */ diff --git a/third_party/python/yarl/yarl/_quoting_c.pyi b/third_party/python/yarl/yarl/_quoting_c.pyi deleted file mode 100644 index 1c8fc24ec7ec..000000000000 --- a/third_party/python/yarl/yarl/_quoting_c.pyi +++ /dev/null @@ -1,16 +0,0 @@ -from typing import Optional - -class _Quoter: - def __init__( - self, - *, - safe: str = ..., - protected: str = ..., - qs: bool = ..., - requote: bool = ... - ) -> None: ... - def __call__(self, val: Optional[str] = ...) -> Optional[str]: ... - -class _Unquoter: - def __init__(self, *, unsafe: str = ..., qs: bool = ...) -> None: ... - def __call__(self, val: Optional[str] = ...) -> Optional[str]: ... diff --git a/third_party/python/yarl/yarl/_quoting_c.pyx b/third_party/python/yarl/yarl/_quoting_c.pyx deleted file mode 100644 index 1b8bea25182a..000000000000 --- a/third_party/python/yarl/yarl/_quoting_c.pyx +++ /dev/null @@ -1,371 +0,0 @@ -# cython: language_level=3 - -from libc.stdint cimport uint8_t, uint64_t -from libc.string cimport memcpy, memset - -from cpython.exc cimport PyErr_NoMemory -from cpython.mem cimport PyMem_Malloc, PyMem_Realloc, PyMem_Free -from cpython.unicode cimport PyUnicode_DecodeASCII, PyUnicode_DecodeUTF8Stateful - -from string import ascii_letters, digits - -cdef str GEN_DELIMS = ":/?#[]@" -cdef str SUB_DELIMS_WITHOUT_QS = "!$'()*," -cdef str SUB_DELIMS = SUB_DELIMS_WITHOUT_QS + '+?=;' -cdef str RESERVED = GEN_DELIMS + SUB_DELIMS -cdef str UNRESERVED = ascii_letters + digits + '-._~' -cdef str ALLOWED = UNRESERVED + SUB_DELIMS_WITHOUT_QS -cdef str QS = '+&=;' - -DEF BUF_SIZE = 8 * 1024 # 8KiB -cdef char BUFFER[BUF_SIZE] - -cdef inline Py_UCS4 _to_hex(uint8_t v): - if v < 10: - return (v+0x30) # ord('0') == 0x30 - else: - return (v+0x41-10) # ord('A') == 0x41 - - -cdef inline int _from_hex(Py_UCS4 v): - if '0' <= v <= '9': - return (v) - 0x30 # ord('0') == 0x30 - elif 'A' <= v <= 'F': - return (v) - 0x41 + 10 # ord('A') == 0x41 - elif 'a' <= v <= 'f': - return (v) - 0x61 + 10 # ord('a') == 0x61 - else: - return -1 - - -cdef inline int _is_lower_hex(Py_UCS4 v): - return 'a' <= v <= 'f' - - -cdef inline Py_UCS4 _restore_ch(Py_UCS4 d1, Py_UCS4 d2): - cdef int digit1 = _from_hex(d1) - if digit1 < 0: - return -1 - cdef int digit2 = _from_hex(d2) - if digit2 < 0: - return -1 - return (digit1 << 4 | digit2) - - -cdef uint8_t ALLOWED_TABLE[16] -cdef uint8_t ALLOWED_NOTQS_TABLE[16] - - -cdef inline bint bit_at(uint8_t array[], uint64_t ch): - return array[ch >> 3] & (1 << (ch & 7)) - - -cdef inline void set_bit(uint8_t array[], uint64_t ch): - array[ch >> 3] |= (1 << (ch & 7)) - - -memset(ALLOWED_TABLE, 0, sizeof(ALLOWED_TABLE)) -memset(ALLOWED_NOTQS_TABLE, 0, sizeof(ALLOWED_NOTQS_TABLE)) - -for i in range(128): - if chr(i) in ALLOWED: - set_bit(ALLOWED_TABLE, i) - set_bit(ALLOWED_NOTQS_TABLE, i) - if chr(i) in QS: - set_bit(ALLOWED_NOTQS_TABLE, i) - -# ----------------- writer --------------------------- - -cdef struct Writer: - char *buf - Py_ssize_t size - Py_ssize_t pos - bint changed - - -cdef inline void _init_writer(Writer* writer): - writer.buf = &BUFFER[0] - writer.size = BUF_SIZE - writer.pos = 0 - writer.changed = 0 - - -cdef inline void _release_writer(Writer* writer): - if writer.buf != BUFFER: - PyMem_Free(writer.buf) - - -cdef inline int _write_char(Writer* writer, Py_UCS4 ch, bint changed): - cdef char * buf - cdef Py_ssize_t size - - if writer.pos == writer.size: - # reallocate - size = writer.size + BUF_SIZE - if writer.buf == BUFFER: - buf = PyMem_Malloc(size) - if buf == NULL: - PyErr_NoMemory() - return -1 - memcpy(buf, writer.buf, writer.size) - else: - buf = PyMem_Realloc(writer.buf, size) - if buf == NULL: - PyErr_NoMemory() - return -1 - writer.buf = buf - writer.size = size - writer.buf[writer.pos] = ch - writer.pos += 1 - writer.changed |= changed - return 0 - - -cdef inline int _write_pct(Writer* writer, uint8_t ch, bint changed): - if _write_char(writer, '%', changed) < 0: - return -1 - if _write_char(writer, _to_hex(ch >> 4), changed) < 0: - return -1 - return _write_char(writer, _to_hex(ch & 0x0f), changed) - - -cdef inline int _write_utf8(Writer* writer, Py_UCS4 symbol): - cdef uint64_t utf = symbol - - if utf < 0x80: - return _write_pct(writer, utf, True) - elif utf < 0x800: - if _write_pct(writer, (0xc0 | (utf >> 6)), True) < 0: - return -1 - return _write_pct(writer, (0x80 | (utf & 0x3f)), True) - elif 0xD800 <= utf <= 0xDFFF: - # surogate pair, ignored - return 0 - elif utf < 0x10000: - if _write_pct(writer, (0xe0 | (utf >> 12)), True) < 0: - return -1 - if _write_pct(writer, (0x80 | ((utf >> 6) & 0x3f)), - True) < 0: - return -1 - return _write_pct(writer, (0x80 | (utf & 0x3f)), True) - elif utf > 0x10FFFF: - # symbol is too large - return 0 - else: - if _write_pct(writer, (0xf0 | (utf >> 18)), True) < 0: - return -1 - if _write_pct(writer, (0x80 | ((utf >> 12) & 0x3f)), - True) < 0: - return -1 - if _write_pct(writer, (0x80 | ((utf >> 6) & 0x3f)), - True) < 0: - return -1 - return _write_pct(writer, (0x80 | (utf & 0x3f)), True) - - -# --------------------- end writer -------------------------- - - -cdef class _Quoter: - cdef bint _qs - cdef bint _requote - - cdef uint8_t _safe_table[16] - cdef uint8_t _protected_table[16] - - def __init__( - self, *, str safe='', str protected='', bint qs=False, bint requote=True, - ): - cdef Py_UCS4 ch - - self._qs = qs - self._requote = requote - - if not self._qs: - memcpy(self._safe_table, - ALLOWED_NOTQS_TABLE, - sizeof(self._safe_table)) - else: - memcpy(self._safe_table, - ALLOWED_TABLE, - sizeof(self._safe_table)) - for ch in safe: - if ord(ch) > 127: - raise ValueError("Only safe symbols with ORD < 128 are allowed") - set_bit(self._safe_table, ch) - - memset(self._protected_table, 0, sizeof(self._protected_table)) - for ch in protected: - if ord(ch) > 127: - raise ValueError("Only safe symbols with ORD < 128 are allowed") - set_bit(self._safe_table, ch) - set_bit(self._protected_table, ch) - - def __call__(self, val): - cdef Writer writer - if val is None: - return None - if type(val) is not str: - if isinstance(val, str): - # derived from str - val = str(val) - else: - raise TypeError("Argument should be str") - _init_writer(&writer) - try: - return self._do_quote(val, &writer) - finally: - _release_writer(&writer) - - cdef str _do_quote(self, str val, Writer *writer): - cdef Py_UCS4 ch - cdef int changed - cdef int idx = 0 - cdef int length = len(val) - - while idx < length: - ch = val[idx] - idx += 1 - if ch == '%' and self._requote and idx <= length - 2: - ch = _restore_ch(val[idx], val[idx + 1]) - if ch != -1: - idx += 2 - if ch < 128: - if bit_at(self._protected_table, ch): - if _write_pct(writer, ch, True) < 0: - raise - continue - - if bit_at(self._safe_table, ch): - if _write_char(writer, ch, True) < 0: - raise - continue - - changed = (_is_lower_hex(val[idx - 2]) or - _is_lower_hex(val[idx - 1])) - if _write_pct(writer, ch, changed) < 0: - raise - continue - else: - ch = '%' - - if self._write(writer, ch) < 0: - raise - - if not writer.changed: - return val - else: - return PyUnicode_DecodeASCII(writer.buf, writer.pos, "strict") - - cdef inline int _write(self, Writer *writer, Py_UCS4 ch): - if self._qs: - if ch == ' ': - return _write_char(writer, '+', True) - - if ch < 128 and bit_at(self._safe_table, ch): - return _write_char(writer, ch, False) - - return _write_utf8(writer, ch) - - -cdef class _Unquoter: - cdef str _unsafe - cdef bint _qs - cdef _Quoter _quoter - cdef _Quoter _qs_quoter - - def __init__(self, *, unsafe='', qs=False): - self._unsafe = unsafe - self._qs = qs - self._quoter = _Quoter() - self._qs_quoter = _Quoter(qs=True) - - def __call__(self, val): - if val is None: - return None - if type(val) is not str: - if isinstance(val, str): - # derived from str - val = str(val) - else: - raise TypeError("Argument should be str") - return self._do_unquote(val) - - cdef str _do_unquote(self, str val): - if len(val) == 0: - return val - cdef list ret = [] - cdef char buffer[4] - cdef Py_ssize_t buflen = 0 - cdef Py_ssize_t consumed - cdef str unquoted - cdef Py_UCS4 ch = 0 - cdef Py_ssize_t idx = 0 - cdef Py_ssize_t length = len(val) - cdef Py_ssize_t start_pct - - while idx < length: - ch = val[idx] - idx += 1 - if ch == '%' and idx <= length - 2: - ch = _restore_ch(val[idx], val[idx + 1]) - if ch != -1: - idx += 2 - assert buflen < 4 - buffer[buflen] = ch - buflen += 1 - try: - unquoted = PyUnicode_DecodeUTF8Stateful(buffer, buflen, - NULL, &consumed) - except UnicodeDecodeError: - start_pct = idx - buflen * 3 - buffer[0] = ch - buflen = 1 - ret.append(val[start_pct : idx - 3]) - try: - unquoted = PyUnicode_DecodeUTF8Stateful(buffer, buflen, - NULL, &consumed) - except UnicodeDecodeError: - buflen = 0 - ret.append(val[idx - 3 : idx]) - continue - if not unquoted: - assert consumed == 0 - continue - assert consumed == buflen - buflen = 0 - if self._qs and unquoted in '+=&;': - ret.append(self._qs_quoter(unquoted)) - elif unquoted in self._unsafe: - ret.append(self._quoter(unquoted)) - else: - ret.append(unquoted) - continue - else: - ch = '%' - - if buflen: - start_pct = idx - 1 - buflen * 3 - ret.append(val[start_pct : idx - 1]) - buflen = 0 - - if ch == '+': - if not self._qs or ch in self._unsafe: - ret.append('+') - else: - ret.append(' ') - continue - - if ch in self._unsafe: - ret.append('%') - h = hex(ord(ch)).upper()[2:] - for ch in h: - ret.append(ch) - continue - - ret.append(ch) - - if buflen: - ret.append(val[length - buflen * 3 : length]) - - return ''.join(ret) diff --git a/third_party/python/yarl/yarl/_quoting_py.py b/third_party/python/yarl/yarl/_quoting_py.py deleted file mode 100644 index d6f33e15bdb7..000000000000 --- a/third_party/python/yarl/yarl/_quoting_py.py +++ /dev/null @@ -1,198 +0,0 @@ -import codecs -import re -from string import ascii_letters, ascii_lowercase, digits -from typing import Optional, cast - - -BASCII_LOWERCASE = ascii_lowercase.encode("ascii") -BPCT_ALLOWED = {"%{:02X}".format(i).encode("ascii") for i in range(256)} -GEN_DELIMS = ":/?#[]@" -SUB_DELIMS_WITHOUT_QS = "!$'()*," -SUB_DELIMS = SUB_DELIMS_WITHOUT_QS + "+&=;" -RESERVED = GEN_DELIMS + SUB_DELIMS -UNRESERVED = ascii_letters + digits + "-._~" -ALLOWED = UNRESERVED + SUB_DELIMS_WITHOUT_QS - - -_IS_HEX = re.compile(b"[A-Z0-9][A-Z0-9]") -_IS_HEX_STR = re.compile("[A-Fa-f0-9][A-Fa-f0-9]") - -utf8_decoder = codecs.getincrementaldecoder("utf-8") - - -class _Quoter: - def __init__( - self, - *, - safe: str = "", - protected: str = "", - qs: bool = False, - requote: bool = True - ) -> None: - self._safe = safe - self._protected = protected - self._qs = qs - self._requote = requote - - def __call__(self, val: Optional[str]) -> Optional[str]: - if val is None: - return None - if not isinstance(val, str): - raise TypeError("Argument should be str") - if not val: - return "" - bval = cast(str, val).encode("utf8", errors="ignore") - ret = bytearray() - pct = bytearray() - safe = self._safe - safe += ALLOWED - if not self._qs: - safe += "+&=;" - safe += self._protected - bsafe = safe.encode("ascii") - idx = 0 - while idx < len(bval): - ch = bval[idx] - idx += 1 - - if pct: - if ch in BASCII_LOWERCASE: - ch = ch - 32 # convert to uppercase - pct.append(ch) - if len(pct) == 3: # pragma: no branch # peephole optimizer - buf = pct[1:] - if not _IS_HEX.match(buf): - ret.extend(b"%25") - pct.clear() - idx -= 2 - continue - try: - unquoted = chr(int(pct[1:].decode("ascii"), base=16)) - except ValueError: - ret.extend(b"%25") - pct.clear() - idx -= 2 - continue - - if unquoted in self._protected: - ret.extend(pct) - elif unquoted in safe: - ret.append(ord(unquoted)) - else: - ret.extend(pct) - pct.clear() - - # special case, if we have only one char after "%" - elif len(pct) == 2 and idx == len(bval): - ret.extend(b"%25") - pct.clear() - idx -= 1 - - continue - - elif ch == ord("%") and self._requote: - pct.clear() - pct.append(ch) - - # special case if "%" is last char - if idx == len(bval): - ret.extend(b"%25") - - continue - - if self._qs: - if ch == ord(" "): - ret.append(ord("+")) - continue - if ch in bsafe: - ret.append(ch) - continue - - ret.extend(("%{:02X}".format(ch)).encode("ascii")) - - ret2 = ret.decode("ascii") - if ret2 == val: - return val - return ret2 - - -class _Unquoter: - def __init__(self, *, unsafe: str = "", qs: bool = False) -> None: - self._unsafe = unsafe - self._qs = qs - self._quoter = _Quoter() - self._qs_quoter = _Quoter(qs=True) - - def __call__(self, val: Optional[str]) -> Optional[str]: - if val is None: - return None - if not isinstance(val, str): - raise TypeError("Argument should be str") - if not val: - return "" - decoder = cast(codecs.BufferedIncrementalDecoder, utf8_decoder()) - ret = [] - idx = 0 - while idx < len(val): - ch = val[idx] - idx += 1 - if ch == "%" and idx <= len(val) - 2: - pct = val[idx : idx + 2] - if _IS_HEX_STR.fullmatch(pct): - b = bytes([int(pct, base=16)]) - idx += 2 - try: - unquoted = decoder.decode(b) - except UnicodeDecodeError: - start_pct = idx - 3 - len(decoder.buffer) * 3 - ret.append(val[start_pct : idx - 3]) - decoder.reset() - try: - unquoted = decoder.decode(b) - except UnicodeDecodeError: - ret.append(val[idx - 3 : idx]) - continue - if not unquoted: - continue - if self._qs and unquoted in "+=&;": - to_add = self._qs_quoter(unquoted) - if to_add is None: # pragma: no cover - raise RuntimeError("Cannot quote None") - ret.append(to_add) - elif unquoted in self._unsafe: - to_add = self._quoter(unquoted) - if to_add is None: # pragma: no cover - raise RuntimeError("Cannot quote None") - ret.append(to_add) - else: - ret.append(unquoted) - continue - - if decoder.buffer: - start_pct = idx - 1 - len(decoder.buffer) * 3 - ret.append(val[start_pct : idx - 1]) - decoder.reset() - - if ch == "+": - if not self._qs or ch in self._unsafe: - ret.append("+") - else: - ret.append(" ") - continue - - if ch in self._unsafe: - ret.append("%") - h = hex(ord(ch)).upper()[2:] - for ch in h: - ret.append(ch) - continue - - ret.append(ch) - - if decoder.buffer: - ret.append(val[-len(decoder.buffer) * 3 :]) - - ret2 = "".join(ret) - if ret2 == val: - return val - return ret2 diff --git a/third_party/python/yarl/yarl/_url.py b/third_party/python/yarl/yarl/_url.py deleted file mode 100644 index 99c424514a6d..000000000000 --- a/third_party/python/yarl/yarl/_url.py +++ /dev/null @@ -1,1144 +0,0 @@ -import functools -import sys -import warnings -from collections.abc import Mapping, Sequence -from ipaddress import ip_address -from urllib.parse import SplitResult, parse_qsl, urljoin, urlsplit, urlunsplit, quote - -from multidict import MultiDict, MultiDictProxy -import idna - -import math - - -from ._quoting import _Quoter, _Unquoter - - -DEFAULT_PORTS = {"http": 80, "https": 443, "ws": 80, "wss": 443} - -sentinel = object() - - -def rewrite_module(obj: object) -> object: - obj.__module__ = "yarl" - return obj - - -class cached_property: - """Use as a class method decorator. It operates almost exactly like - the Python `@property` decorator, but it puts the result of the - method it decorates into the instance dict after the first call, - effectively replacing the function it decorates with an instance - variable. It is, in Python parlance, a data descriptor. - - """ - - def __init__(self, wrapped): - self.wrapped = wrapped - try: - self.__doc__ = wrapped.__doc__ - except AttributeError: # pragma: no cover - self.__doc__ = "" - self.name = wrapped.__name__ - - def __get__(self, inst, owner, _sentinel=sentinel): - if inst is None: - return self - val = inst._cache.get(self.name, _sentinel) - if val is not _sentinel: - return val - val = self.wrapped(inst) - inst._cache[self.name] = val - return val - - def __set__(self, inst, value): - raise AttributeError("cached property is read-only") - - -@rewrite_module -class URL: - # Don't derive from str - # follow pathlib.Path design - # probably URL will not suffer from pathlib problems: - # it's intended for libraries like aiohttp, - # not to be passed into standard library functions like os.open etc. - - # URL grammar (RFC 3986) - # pct-encoded = "%" HEXDIG HEXDIG - # reserved = gen-delims / sub-delims - # gen-delims = ":" / "/" / "?" / "#" / "[" / "]" / "@" - # sub-delims = "!" / "$" / "&" / "'" / "(" / ")" - # / "*" / "+" / "," / ";" / "=" - # unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~" - # URI = scheme ":" hier-part [ "?" query ] [ "#" fragment ] - # hier-part = "//" authority path-abempty - # / path-absolute - # / path-rootless - # / path-empty - # scheme = ALPHA *( ALPHA / DIGIT / "+" / "-" / "." ) - # authority = [ userinfo "@" ] host [ ":" port ] - # userinfo = *( unreserved / pct-encoded / sub-delims / ":" ) - # host = IP-literal / IPv4address / reg-name - # IP-literal = "[" ( IPv6address / IPvFuture ) "]" - # IPvFuture = "v" 1*HEXDIG "." 1*( unreserved / sub-delims / ":" ) - # IPv6address = 6( h16 ":" ) ls32 - # / "::" 5( h16 ":" ) ls32 - # / [ h16 ] "::" 4( h16 ":" ) ls32 - # / [ *1( h16 ":" ) h16 ] "::" 3( h16 ":" ) ls32 - # / [ *2( h16 ":" ) h16 ] "::" 2( h16 ":" ) ls32 - # / [ *3( h16 ":" ) h16 ] "::" h16 ":" ls32 - # / [ *4( h16 ":" ) h16 ] "::" ls32 - # / [ *5( h16 ":" ) h16 ] "::" h16 - # / [ *6( h16 ":" ) h16 ] "::" - # ls32 = ( h16 ":" h16 ) / IPv4address - # ; least-significant 32 bits of address - # h16 = 1*4HEXDIG - # ; 16 bits of address represented in hexadecimal - # IPv4address = dec-octet "." dec-octet "." dec-octet "." dec-octet - # dec-octet = DIGIT ; 0-9 - # / %x31-39 DIGIT ; 10-99 - # / "1" 2DIGIT ; 100-199 - # / "2" %x30-34 DIGIT ; 200-249 - # / "25" %x30-35 ; 250-255 - # reg-name = *( unreserved / pct-encoded / sub-delims ) - # port = *DIGIT - # path = path-abempty ; begins with "/" or is empty - # / path-absolute ; begins with "/" but not "//" - # / path-noscheme ; begins with a non-colon segment - # / path-rootless ; begins with a segment - # / path-empty ; zero characters - # path-abempty = *( "/" segment ) - # path-absolute = "/" [ segment-nz *( "/" segment ) ] - # path-noscheme = segment-nz-nc *( "/" segment ) - # path-rootless = segment-nz *( "/" segment ) - # path-empty = 0 - # segment = *pchar - # segment-nz = 1*pchar - # segment-nz-nc = 1*( unreserved / pct-encoded / sub-delims / "@" ) - # ; non-zero-length segment without any colon ":" - # pchar = unreserved / pct-encoded / sub-delims / ":" / "@" - # query = *( pchar / "/" / "?" ) - # fragment = *( pchar / "/" / "?" ) - # URI-reference = URI / relative-ref - # relative-ref = relative-part [ "?" query ] [ "#" fragment ] - # relative-part = "//" authority path-abempty - # / path-absolute - # / path-noscheme - # / path-empty - # absolute-URI = scheme ":" hier-part [ "?" query ] - __slots__ = ("_cache", "_val") - - _QUOTER = _Quoter(requote=False) - _REQUOTER = _Quoter() - _PATH_QUOTER = _Quoter(safe="@:", protected="/+", requote=False) - _PATH_REQUOTER = _Quoter(safe="@:", protected="/+") - _QUERY_QUOTER = _Quoter(safe="?/:@", protected="=+&;", qs=True, requote=False) - _QUERY_REQUOTER = _Quoter(safe="?/:@", protected="=+&;", qs=True) - _QUERY_PART_QUOTER = _Quoter(safe="?/:@", qs=True, requote=False) - _FRAGMENT_QUOTER = _Quoter(safe="?/:@", requote=False) - _FRAGMENT_REQUOTER = _Quoter(safe="?/:@") - - _UNQUOTER = _Unquoter() - _PATH_UNQUOTER = _Unquoter(unsafe="+") - _QS_UNQUOTER = _Unquoter(qs=True) - - def __new__(cls, val="", *, encoded=False, strict=None): - if strict is not None: # pragma: no cover - warnings.warn("strict parameter is ignored") - if type(val) is cls: - return val - if type(val) is str: - val = urlsplit(val) - elif type(val) is SplitResult: - if not encoded: - raise ValueError("Cannot apply decoding to SplitResult") - elif isinstance(val, str): - val = urlsplit(str(val)) - else: - raise TypeError("Constructor parameter should be str") - - if not encoded: - if not val[1]: # netloc - netloc = "" - host = "" - else: - host = val.hostname - if host is None: - raise ValueError("Invalid URL: host is required for absolute urls") - - try: - port = val.port - except ValueError as e: - raise ValueError( - "Invalid URL: port can't be converted to integer" - ) from e - - netloc = cls._make_netloc( - val.username, val.password, host, port, encode=True, requote=True - ) - path = cls._PATH_REQUOTER(val[2]) - if netloc: - path = cls._normalize_path(path) - - cls._validate_authority_uri_abs_path(host=host, path=path) - query = cls._QUERY_REQUOTER(val[3]) - fragment = cls._FRAGMENT_REQUOTER(val[4]) - val = SplitResult(val[0], netloc, path, query, fragment) - - self = object.__new__(cls) - self._val = val - self._cache = {} - return self - - @classmethod - def build( - cls, - *, - scheme="", - authority="", - user=None, - password=None, - host="", - port=None, - path="", - query=None, - query_string="", - fragment="", - encoded=False - ): - """Creates and returns a new URL""" - - if authority and (user or password or host or port): - raise ValueError( - 'Can\'t mix "authority" with "user", "password", "host" or "port".' - ) - if port and not host: - raise ValueError('Can\'t build URL with "port" but without "host".') - if query and query_string: - raise ValueError('Only one of "query" or "query_string" should be passed') - if ( - scheme is None - or authority is None - or path is None - or query_string is None - or fragment is None - ): - raise TypeError( - 'NoneType is illegal for "scheme", "authority", "path", ' - '"query_string", and "fragment" args, use empty string instead.' - ) - - if authority: - if encoded: - netloc = authority - else: - tmp = SplitResult("", authority, "", "", "") - netloc = cls._make_netloc( - tmp.username, tmp.password, tmp.hostname, tmp.port, encode=True - ) - elif not user and not password and not host and not port: - netloc = "" - else: - netloc = cls._make_netloc( - user, password, host, port, encode=not encoded, encode_host=not encoded - ) - if not encoded: - path = cls._PATH_QUOTER(path) - if netloc: - path = cls._normalize_path(path) - - cls._validate_authority_uri_abs_path(host=host, path=path) - query_string = cls._QUERY_QUOTER(query_string) - fragment = cls._FRAGMENT_QUOTER(fragment) - - url = cls( - SplitResult(scheme, netloc, path, query_string, fragment), encoded=True - ) - - if query: - return url.with_query(query) - else: - return url - - def __init_subclass__(cls): - raise TypeError("Inheritance a class {!r} from URL is forbidden".format(cls)) - - def __str__(self): - val = self._val - if not val.path and self.is_absolute() and (val.query or val.fragment): - val = val._replace(path="/") - return urlunsplit(val) - - def __repr__(self): - return "{}('{}')".format(self.__class__.__name__, str(self)) - - def __eq__(self, other): - if not type(other) is URL: - return NotImplemented - - val1 = self._val - if not val1.path and self.is_absolute(): - val1 = val1._replace(path="/") - - val2 = other._val - if not val2.path and other.is_absolute(): - val2 = val2._replace(path="/") - - return val1 == val2 - - def __hash__(self): - ret = self._cache.get("hash") - if ret is None: - val = self._val - if not val.path and self.is_absolute(): - val = val._replace(path="/") - ret = self._cache["hash"] = hash(val) - return ret - - def __le__(self, other): - if not type(other) is URL: - return NotImplemented - return self._val <= other._val - - def __lt__(self, other): - if not type(other) is URL: - return NotImplemented - return self._val < other._val - - def __ge__(self, other): - if not type(other) is URL: - return NotImplemented - return self._val >= other._val - - def __gt__(self, other): - if not type(other) is URL: - return NotImplemented - return self._val > other._val - - def __truediv__(self, name): - name = self._PATH_QUOTER(name) - if name.startswith("/"): - raise ValueError( - "Appending path {!r} starting from slash is forbidden".format(name) - ) - path = self._val.path - if path == "/": - new_path = "/" + name - elif not path and not self.is_absolute(): - new_path = name - else: - parts = path.rstrip("/").split("/") - parts.append(name) - new_path = "/".join(parts) - if self.is_absolute(): - new_path = self._normalize_path(new_path) - return URL( - self._val._replace(path=new_path, query="", fragment=""), encoded=True - ) - - def __mod__(self, query): - return self.update_query(query) - - def __bool__(self) -> bool: - return bool( - self._val.netloc or self._val.path or self._val.query or self._val.fragment - ) - - def __getstate__(self): - return (self._val,) - - def __setstate__(self, state): - if state[0] is None and isinstance(state[1], dict): - # default style pickle - self._val = state[1]["_val"] - else: - self._val, *unused = state - self._cache = {} - - def is_absolute(self): - """A check for absolute URLs. - - Return True for absolute ones (having scheme or starting - with //), False otherwise. - - """ - return self.raw_host is not None - - def is_default_port(self): - """A check for default port. - - Return True if port is default for specified scheme, - e.g. 'http://python.org' or 'http://python.org:80', False - otherwise. - - """ - if self.port is None: - return False - default = DEFAULT_PORTS.get(self.scheme) - if default is None: - return False - return self.port == default - - def origin(self): - """Return an URL with scheme, host and port parts only. - - user, password, path, query and fragment are removed. - - """ - # TODO: add a keyword-only option for keeping user/pass maybe? - if not self.is_absolute(): - raise ValueError("URL should be absolute") - if not self._val.scheme: - raise ValueError("URL should have scheme") - v = self._val - netloc = self._make_netloc(None, None, v.hostname, v.port) - val = v._replace(netloc=netloc, path="", query="", fragment="") - return URL(val, encoded=True) - - def relative(self): - """Return a relative part of the URL. - - scheme, user, password, host and port are removed. - - """ - if not self.is_absolute(): - raise ValueError("URL should be absolute") - val = self._val._replace(scheme="", netloc="") - return URL(val, encoded=True) - - @property - def scheme(self): - """Scheme for absolute URLs. - - Empty string for relative URLs or URLs starting with // - - """ - return self._val.scheme - - @property - def raw_authority(self): - """Encoded authority part of URL. - - Empty string for relative URLs. - - """ - return self._val.netloc - - @cached_property - def authority(self): - """Decoded authority part of URL. - - Empty string for relative URLs. - - """ - return self._make_netloc( - self.user, self.password, self.host, self.port, encode_host=False - ) - - @property - def raw_user(self): - """Encoded user part of URL. - - None if user is missing. - - """ - # not .username - ret = self._val.username - if not ret: - return None - return ret - - @cached_property - def user(self): - """Decoded user part of URL. - - None if user is missing. - - """ - return self._UNQUOTER(self.raw_user) - - @property - def raw_password(self): - """Encoded password part of URL. - - None if password is missing. - - """ - return self._val.password - - @cached_property - def password(self): - """Decoded password part of URL. - - None if password is missing. - - """ - return self._UNQUOTER(self.raw_password) - - @property - def raw_host(self): - """Encoded host part of URL. - - None for relative URLs. - - """ - # Use host instead of hostname for sake of shortness - # May add .hostname prop later - return self._val.hostname - - @cached_property - def host(self): - """Decoded host part of URL. - - None for relative URLs. - - """ - raw = self.raw_host - if raw is None: - return None - if "%" in raw: - # Hack for scoped IPv6 addresses like - # fe80::2%Проверка - # presence of '%' sign means only IPv6 address, so idna is useless. - return raw - return _idna_decode(raw) - - @property - def port(self): - """Port part of URL, with scheme-based fallback. - - None for relative URLs or URLs without explicit port and - scheme without default port substitution. - - """ - return self._val.port or DEFAULT_PORTS.get(self._val.scheme) - - @property - def explicit_port(self): - """Port part of URL, without scheme-based fallback. - - None for relative URLs or URLs without explicit port. - - """ - return self._val.port - - @property - def raw_path(self): - """Encoded path of URL. - - / for absolute URLs without path part. - - """ - ret = self._val.path - if not ret and self.is_absolute(): - ret = "/" - return ret - - @cached_property - def path(self): - """Decoded path of URL. - - / for absolute URLs without path part. - - """ - return self._PATH_UNQUOTER(self.raw_path) - - @cached_property - def query(self): - """A MultiDictProxy representing parsed query parameters in decoded - representation. - - Empty value if URL has no query part. - - """ - ret = MultiDict(parse_qsl(self.raw_query_string, keep_blank_values=True)) - return MultiDictProxy(ret) - - @property - def raw_query_string(self): - """Encoded query part of URL. - - Empty string if query is missing. - - """ - return self._val.query - - @cached_property - def query_string(self): - """Decoded query part of URL. - - Empty string if query is missing. - - """ - return self._QS_UNQUOTER(self.raw_query_string) - - @cached_property - def path_qs(self): - """Decoded path of URL with query.""" - if not self.query_string: - return self.path - return "{}?{}".format(self.path, self.query_string) - - @cached_property - def raw_path_qs(self): - """Encoded path of URL with query.""" - if not self.raw_query_string: - return self.raw_path - return "{}?{}".format(self.raw_path, self.raw_query_string) - - @property - def raw_fragment(self): - """Encoded fragment part of URL. - - Empty string if fragment is missing. - - """ - return self._val.fragment - - @cached_property - def fragment(self): - """Decoded fragment part of URL. - - Empty string if fragment is missing. - - """ - return self._UNQUOTER(self.raw_fragment) - - @cached_property - def raw_parts(self): - """A tuple containing encoded *path* parts. - - ('/',) for absolute URLs if *path* is missing. - - """ - path = self._val.path - if self.is_absolute(): - if not path: - parts = ["/"] - else: - parts = ["/"] + path[1:].split("/") - else: - if path.startswith("/"): - parts = ["/"] + path[1:].split("/") - else: - parts = path.split("/") - return tuple(parts) - - @cached_property - def parts(self): - """A tuple containing decoded *path* parts. - - ('/',) for absolute URLs if *path* is missing. - - """ - return tuple(self._UNQUOTER(part) for part in self.raw_parts) - - @cached_property - def parent(self): - """A new URL with last part of path removed and cleaned up query and - fragment. - - """ - path = self.raw_path - if not path or path == "/": - if self.raw_fragment or self.raw_query_string: - return URL(self._val._replace(query="", fragment=""), encoded=True) - return self - parts = path.split("/") - val = self._val._replace(path="/".join(parts[:-1]), query="", fragment="") - return URL(val, encoded=True) - - @cached_property - def raw_name(self): - """The last part of raw_parts.""" - parts = self.raw_parts - if self.is_absolute(): - parts = parts[1:] - if not parts: - return "" - else: - return parts[-1] - else: - return parts[-1] - - @cached_property - def name(self): - """The last part of parts.""" - return self._UNQUOTER(self.raw_name) - - @staticmethod - def _validate_authority_uri_abs_path(host, path): - """Ensure that path in URL with authority starts with a leading slash. - - Raise ValueError if not. - """ - if len(host) > 0 and len(path) > 0 and not path.startswith("/"): - raise ValueError( - "Path in a URL with authority should start with a slash ('/') if set" - ) - - @classmethod - def _normalize_path(cls, path): - # Drop '.' and '..' from path - - segments = path.split("/") - resolved_path = [] - - for seg in segments: - if seg == "..": - try: - resolved_path.pop() - except IndexError: - # ignore any .. segments that would otherwise cause an - # IndexError when popped from resolved_path if - # resolving for rfc3986 - pass - elif seg == ".": - continue - else: - resolved_path.append(seg) - - if segments[-1] in (".", ".."): - # do some post-processing here. - # if the last segment was a relative dir, - # then we need to append the trailing '/' - resolved_path.append("") - - return "/".join(resolved_path) - - if sys.version_info >= (3, 7): - - @classmethod - def _encode_host(cls, host, human=False): - try: - ip, sep, zone = host.partition("%") - ip = ip_address(ip) - except ValueError: - host = host.lower() - # IDNA encoding is slow, - # skip it for ASCII-only strings - # Don't move the check into _idna_encode() helper - # to reduce the cache size - if human or host.isascii(): - return host - host = _idna_encode(host) - else: - host = ip.compressed - if sep: - host += "%" + zone - if ip.version == 6: - host = "[" + host + "]" - return host - - else: - # work around for missing str.isascii() in Python <= 3.6 - @classmethod - def _encode_host(cls, host, human=False): - try: - ip, sep, zone = host.partition("%") - ip = ip_address(ip) - except ValueError: - host = host.lower() - if human: - return host - - for char in host: - if char > "\x7f": - break - else: - return host - host = _idna_encode(host) - else: - host = ip.compressed - if sep: - host += "%" + zone - if ip.version == 6: - host = "[" + host + "]" - return host - - @classmethod - def _make_netloc( - cls, user, password, host, port, encode=False, encode_host=True, requote=False - ): - quoter = cls._REQUOTER if requote else cls._QUOTER - if encode_host: - ret = cls._encode_host(host) - else: - ret = host - if port: - ret = ret + ":" + str(port) - if password is not None: - if not user: - user = "" - else: - if encode: - user = quoter(user) - if encode: - password = quoter(password) - user = user + ":" + password - elif user and encode: - user = quoter(user) - if user: - ret = user + "@" + ret - return ret - - def with_scheme(self, scheme): - """Return a new URL with scheme replaced.""" - # N.B. doesn't cleanup query/fragment - if not isinstance(scheme, str): - raise TypeError("Invalid scheme type") - if not self.is_absolute(): - raise ValueError("scheme replacement is not allowed for relative URLs") - return URL(self._val._replace(scheme=scheme.lower()), encoded=True) - - def with_user(self, user): - """Return a new URL with user replaced. - - Autoencode user if needed. - - Clear user/password if user is None. - - """ - # N.B. doesn't cleanup query/fragment - val = self._val - if user is None: - password = None - elif isinstance(user, str): - user = self._QUOTER(user) - password = val.password - else: - raise TypeError("Invalid user type") - if not self.is_absolute(): - raise ValueError("user replacement is not allowed for relative URLs") - return URL( - self._val._replace( - netloc=self._make_netloc(user, password, val.hostname, val.port) - ), - encoded=True, - ) - - def with_password(self, password): - """Return a new URL with password replaced. - - Autoencode password if needed. - - Clear password if argument is None. - - """ - # N.B. doesn't cleanup query/fragment - if password is None: - pass - elif isinstance(password, str): - password = self._QUOTER(password) - else: - raise TypeError("Invalid password type") - if not self.is_absolute(): - raise ValueError("password replacement is not allowed for relative URLs") - val = self._val - return URL( - self._val._replace( - netloc=self._make_netloc(val.username, password, val.hostname, val.port) - ), - encoded=True, - ) - - def with_host(self, host): - """Return a new URL with host replaced. - - Autoencode host if needed. - - Changing host for relative URLs is not allowed, use .join() - instead. - - """ - # N.B. doesn't cleanup query/fragment - if not isinstance(host, str): - raise TypeError("Invalid host type") - if not self.is_absolute(): - raise ValueError("host replacement is not allowed for relative URLs") - if not host: - raise ValueError("host removing is not allowed") - val = self._val - return URL( - self._val._replace( - netloc=self._make_netloc(val.username, val.password, host, val.port) - ), - encoded=True, - ) - - def with_port(self, port): - """Return a new URL with port replaced. - - Clear port to default if None is passed. - - """ - # N.B. doesn't cleanup query/fragment - if port is not None and not isinstance(port, int): - raise TypeError("port should be int or None, got {}".format(type(port))) - if not self.is_absolute(): - raise ValueError("port replacement is not allowed for relative URLs") - val = self._val - return URL( - self._val._replace( - netloc=self._make_netloc( - val.username, val.password, val.hostname, port, encode=True - ) - ), - encoded=True, - ) - - def with_path(self, path, *, encoded=False): - """Return a new URL with path replaced.""" - if not encoded: - path = self._PATH_QUOTER(path) - if self.is_absolute(): - path = self._normalize_path(path) - if len(path) > 0 and path[0] != "/": - path = "/" + path - return URL(self._val._replace(path=path, query="", fragment=""), encoded=True) - - @classmethod - def _query_seq_pairs(cls, quoter, pairs): - for key, val in pairs: - if isinstance(val, (list, tuple)): - for v in val: - yield quoter(key) + "=" + quoter(cls._query_var(v)) - else: - yield quoter(key) + "=" + quoter(cls._query_var(val)) - - @staticmethod - def _query_var(v): - cls = type(v) - if issubclass(cls, str): - return v - if issubclass(cls, float): - if math.isinf(v): - raise ValueError("float('inf') is not supported") - if math.isnan(v): - raise ValueError("float('nan') is not supported") - return str(float(v)) - if issubclass(cls, int) and cls is not bool: - return str(int(v)) - raise TypeError( - "Invalid variable type: value " - "should be str, int or float, got {!r} " - "of type {}".format(v, cls) - ) - - def _get_str_query(self, *args, **kwargs): - if kwargs: - if len(args) > 0: - raise ValueError( - "Either kwargs or single query parameter must be present" - ) - query = kwargs - elif len(args) == 1: - query = args[0] - else: - raise ValueError("Either kwargs or single query parameter must be present") - - if query is None: - query = "" - elif isinstance(query, Mapping): - quoter = self._QUERY_PART_QUOTER - query = "&".join(self._query_seq_pairs(quoter, query.items())) - elif isinstance(query, str): - query = self._QUERY_QUOTER(query) - elif isinstance(query, (bytes, bytearray, memoryview)): - raise TypeError( - "Invalid query type: bytes, bytearray and memoryview are forbidden" - ) - elif isinstance(query, Sequence): - quoter = self._QUERY_PART_QUOTER - # We don't expect sequence values if we're given a list of pairs - # already; only mappings like builtin `dict` which can't have the - # same key pointing to multiple values are allowed to use - # `_query_seq_pairs`. - query = "&".join( - quoter(k) + "=" + quoter(self._query_var(v)) for k, v in query - ) - else: - raise TypeError( - "Invalid query type: only str, mapping or " - "sequence of (key, value) pairs is allowed" - ) - - return query - - def with_query(self, *args, **kwargs): - """Return a new URL with query part replaced. - - Accepts any Mapping (e.g. dict, multidict.MultiDict instances) - or str, autoencode the argument if needed. - - A sequence of (key, value) pairs is supported as well. - - It also can take an arbitrary number of keyword arguments. - - Clear query if None is passed. - - """ - # N.B. doesn't cleanup query/fragment - - new_query = self._get_str_query(*args, **kwargs) - return URL( - self._val._replace(path=self._val.path, query=new_query), encoded=True - ) - - def update_query(self, *args, **kwargs): - """Return a new URL with query part updated.""" - s = self._get_str_query(*args, **kwargs) - new_query = MultiDict(parse_qsl(s, keep_blank_values=True)) - query = MultiDict(self.query) - query.update(new_query) - - return URL(self._val._replace(query=self._get_str_query(query)), encoded=True) - - def with_fragment(self, fragment): - """Return a new URL with fragment replaced. - - Autoencode fragment if needed. - - Clear fragment to default if None is passed. - - """ - # N.B. doesn't cleanup query/fragment - if fragment is None: - raw_fragment = "" - elif not isinstance(fragment, str): - raise TypeError("Invalid fragment type") - else: - raw_fragment = self._FRAGMENT_QUOTER(fragment) - if self.raw_fragment == raw_fragment: - return self - return URL(self._val._replace(fragment=raw_fragment), encoded=True) - - def with_name(self, name): - """Return a new URL with name (last part of path) replaced. - - Query and fragment parts are cleaned up. - - Name is encoded if needed. - - """ - # N.B. DOES cleanup query/fragment - if not isinstance(name, str): - raise TypeError("Invalid name type") - if "/" in name: - raise ValueError("Slash in name is not allowed") - name = self._PATH_QUOTER(name) - if name in (".", ".."): - raise ValueError(". and .. values are forbidden") - parts = list(self.raw_parts) - if self.is_absolute(): - if len(parts) == 1: - parts.append(name) - else: - parts[-1] = name - parts[0] = "" # replace leading '/' - else: - parts[-1] = name - if parts[0] == "/": - parts[0] = "" # replace leading '/' - return URL( - self._val._replace(path="/".join(parts), query="", fragment=""), - encoded=True, - ) - - def join(self, url): - """Join URLs - - Construct a full (“absolute”) URL by combining a “base URL” - (self) with another URL (url). - - Informally, this uses components of the base URL, in - particular the addressing scheme, the network location and - (part of) the path, to provide missing components in the - relative URL. - - """ - # See docs for urllib.parse.urljoin - if not isinstance(url, URL): - raise TypeError("url should be URL") - return URL(urljoin(str(self), str(url)), encoded=True) - - def human_repr(self): - """Return decoded human readable string for URL representation.""" - user = _human_quote(self.user, "#/:?@") - password = _human_quote(self.password, "#/:?@") - host = self.host - if host: - host = self._encode_host(self.host, human=True) - path = _human_quote(self.path, "#?") - query_string = "&".join( - "{}={}".format(_human_quote(k, "#&+;="), _human_quote(v, "#&+;=")) - for k, v in self.query.items() - ) - fragment = _human_quote(self.fragment, "") - return urlunsplit( - SplitResult( - self.scheme, - self._make_netloc( - user, - password, - host, - self._val.port, - encode_host=False, - ), - path, - query_string, - fragment, - ) - ) - - -def _human_quote(s, unsafe): - if not s: - return s - for c in "%" + unsafe: - if c in s: - s = s.replace(c, "%{:02X}".format(ord(c))) - if s.isprintable(): - return s - return "".join(c if c.isprintable() else quote(c) for c in s) - - -_MAXCACHE = 256 - - -@functools.lru_cache(_MAXCACHE) -def _idna_decode(raw): - try: - return idna.decode(raw.encode("ascii")) - except UnicodeError: # e.g. '::1' - return raw.encode("ascii").decode("idna") - - -@functools.lru_cache(_MAXCACHE) -def _idna_encode(host): - try: - return idna.encode(host, uts46=True).decode("ascii") - except UnicodeError: - return host.encode("idna").decode("ascii") - - -@rewrite_module -def cache_clear(): - _idna_decode.cache_clear() - _idna_encode.cache_clear() - - -@rewrite_module -def cache_info(): - return { - "idna_encode": _idna_encode.cache_info(), - "idna_decode": _idna_decode.cache_info(), - } - - -@rewrite_module -def cache_configure(*, idna_encode_size=_MAXCACHE, idna_decode_size=_MAXCACHE): - global _idna_decode, _idna_encode - - _idna_encode = functools.lru_cache(idna_encode_size)(_idna_encode.__wrapped__) - _idna_decode = functools.lru_cache(idna_decode_size)(_idna_decode.__wrapped__) diff --git a/third_party/python/yarl/yarl/py.typed b/third_party/python/yarl/yarl/py.typed deleted file mode 100644 index 867e2c849295..000000000000 --- a/third_party/python/yarl/yarl/py.typed +++ /dev/null @@ -1 +0,0 @@ -# Placeholder \ No newline at end of file