mirror of
https://github.com/hypervortex/VH-Bombsquad-Modded-Server-Files
synced 2025-11-07 17:36:08 +00:00
Added new files
This commit is contained in:
parent
867634cc5c
commit
3a407868d4
1775 changed files with 550222 additions and 0 deletions
BIN
dist/ba_data/python-site-packages/__pycache__/filelock.cpython-310.opt-1.pyc
vendored
Normal file
BIN
dist/ba_data/python-site-packages/__pycache__/filelock.cpython-310.opt-1.pyc
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/python-site-packages/__pycache__/typing_extensions.cpython-310.opt-1.pyc
vendored
Normal file
BIN
dist/ba_data/python-site-packages/__pycache__/typing_extensions.cpython-310.opt-1.pyc
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/python-site-packages/_cffi_backend.cpython-310-aarch64-linux-gnu.so
vendored
Normal file
BIN
dist/ba_data/python-site-packages/_cffi_backend.cpython-310-aarch64-linux-gnu.so
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/python-site-packages/_cffi_backend.cpython-310-x86_64-linux-gnu.so
vendored
Normal file
BIN
dist/ba_data/python-site-packages/_cffi_backend.cpython-310-x86_64-linux-gnu.so
vendored
Normal file
Binary file not shown.
33
dist/ba_data/python-site-packages/_yaml/__init__.py
vendored
Normal file
33
dist/ba_data/python-site-packages/_yaml/__init__.py
vendored
Normal file
|
|
@ -0,0 +1,33 @@
|
|||
# This is a stub package designed to roughly emulate the _yaml
|
||||
# extension module, which previously existed as a standalone module
|
||||
# and has been moved into the `yaml` package namespace.
|
||||
# It does not perfectly mimic its old counterpart, but should get
|
||||
# close enough for anyone who's relying on it even when they shouldn't.
|
||||
import yaml
|
||||
|
||||
# in some circumstances, the yaml module we imoprted may be from a different version, so we need
|
||||
# to tread carefully when poking at it here (it may not have the attributes we expect)
|
||||
if not getattr(yaml, '__with_libyaml__', False):
|
||||
from sys import version_info
|
||||
|
||||
exc = ModuleNotFoundError if version_info >= (3, 6) else ImportError
|
||||
raise exc("No module named '_yaml'")
|
||||
else:
|
||||
from yaml._yaml import *
|
||||
import warnings
|
||||
warnings.warn(
|
||||
'The _yaml extension module is now located at yaml._yaml'
|
||||
' and its location is subject to change. To use the'
|
||||
' LibYAML-based parser and emitter, import from `yaml`:'
|
||||
' `from yaml import CLoader as Loader, CDumper as Dumper`.',
|
||||
DeprecationWarning
|
||||
)
|
||||
del warnings
|
||||
# Don't `del yaml` here because yaml is actually an existing
|
||||
# namespace member of _yaml.
|
||||
|
||||
__name__ = '_yaml'
|
||||
# If the module is top-level (i.e. not a part of any specific package)
|
||||
# then the attribute should be set to ''.
|
||||
# https://docs.python.org/3.8/library/types.html
|
||||
__package__ = ''
|
||||
BIN
dist/ba_data/python-site-packages/_yaml/__pycache__/__init__.cpython-310.opt-1.pyc
vendored
Normal file
BIN
dist/ba_data/python-site-packages/_yaml/__pycache__/__init__.cpython-310.opt-1.pyc
vendored
Normal file
Binary file not shown.
1
dist/ba_data/python-site-packages/aiohttp/.hash/_cparser.pxd.hash
vendored
Normal file
1
dist/ba_data/python-site-packages/aiohttp/.hash/_cparser.pxd.hash
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
c6fb0b975dd95d7c871b26f652ced6b0b9dc9dd42bc61c860782979ef6ec46d4 *D:/a/aiohttp/aiohttp/aiohttp/_cparser.pxd
|
||||
1
dist/ba_data/python-site-packages/aiohttp/.hash/_find_header.pxd.hash
vendored
Normal file
1
dist/ba_data/python-site-packages/aiohttp/.hash/_find_header.pxd.hash
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
0455129b185e981b5b96ac738f31f7c74dc57f1696953cae0083b3f18679fe73 *D:/a/aiohttp/aiohttp/aiohttp/_find_header.pxd
|
||||
1
dist/ba_data/python-site-packages/aiohttp/.hash/_frozenlist.pyx.hash
vendored
Normal file
1
dist/ba_data/python-site-packages/aiohttp/.hash/_frozenlist.pyx.hash
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
481f39d4a9ad5a9889d99074e53a68e3ce795640b246d80cb3ce375b3f2415e8 *D:/a/aiohttp/aiohttp/aiohttp/_frozenlist.pyx
|
||||
1
dist/ba_data/python-site-packages/aiohttp/.hash/_helpers.pyi.hash
vendored
Normal file
1
dist/ba_data/python-site-packages/aiohttp/.hash/_helpers.pyi.hash
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
d87779202d197f8613109e35dacbb2ca1b21d64572543bf9838b2d832a362ac7 *D:/a/aiohttp/aiohttp/aiohttp/_helpers.pyi
|
||||
1
dist/ba_data/python-site-packages/aiohttp/.hash/_helpers.pyx.hash
vendored
Normal file
1
dist/ba_data/python-site-packages/aiohttp/.hash/_helpers.pyx.hash
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
b6097b7d987440c4fa7237f88d227c89a3ba0dd403dc638ddbe487e0de7f1138 *D:/a/aiohttp/aiohttp/aiohttp/_helpers.pyx
|
||||
1
dist/ba_data/python-site-packages/aiohttp/.hash/_http_parser.pyx.hash
vendored
Normal file
1
dist/ba_data/python-site-packages/aiohttp/.hash/_http_parser.pyx.hash
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
83c05185224ad57f133f7fd5d56c331f4f9e101cd52f91237e7b6568b5459e1c *D:/a/aiohttp/aiohttp/aiohttp/_http_parser.pyx
|
||||
1
dist/ba_data/python-site-packages/aiohttp/.hash/_http_writer.pyx.hash
vendored
Normal file
1
dist/ba_data/python-site-packages/aiohttp/.hash/_http_writer.pyx.hash
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
ac1cdb93ec6b2163b6843d242a8e482ca48ab16fd4f177f5e4800f9ea487db74 *D:/a/aiohttp/aiohttp/aiohttp/_http_writer.pyx
|
||||
1
dist/ba_data/python-site-packages/aiohttp/.hash/_websocket.pyx.hash
vendored
Normal file
1
dist/ba_data/python-site-packages/aiohttp/.hash/_websocket.pyx.hash
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
a3d27bca2f5cdbe8d3063137754917c610d62af456273e4665fc8bb202506b7f *D:/a/aiohttp/aiohttp/aiohttp/_websocket.pyx
|
||||
1
dist/ba_data/python-site-packages/aiohttp/.hash/frozenlist.pyi.hash
vendored
Normal file
1
dist/ba_data/python-site-packages/aiohttp/.hash/frozenlist.pyi.hash
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
9011bd27ad72982aa252f064ae3b1119599f6a49a4ce4e8a1e665b76044b0996 *D:/a/aiohttp/aiohttp/aiohttp/frozenlist.pyi
|
||||
1
dist/ba_data/python-site-packages/aiohttp/.hash/hdrs.py.hash
vendored
Normal file
1
dist/ba_data/python-site-packages/aiohttp/.hash/hdrs.py.hash
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
5f2bdc50368865ef87528ae8fd6820c8b35677209c5430b669c8857abedb9e94 *D:/a/aiohttp/aiohttp/aiohttp/hdrs.py
|
||||
1
dist/ba_data/python-site-packages/aiohttp/.hash/signals.pyi.hash
vendored
Normal file
1
dist/ba_data/python-site-packages/aiohttp/.hash/signals.pyi.hash
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
1273686ce37b6b3150d5f1d28a496f6ebbd07d05273993e2de7ffaa4a1335e83 *D:/a/aiohttp/aiohttp/aiohttp/signals.pyi
|
||||
217
dist/ba_data/python-site-packages/aiohttp/__init__.py
vendored
Normal file
217
dist/ba_data/python-site-packages/aiohttp/__init__.py
vendored
Normal file
|
|
@ -0,0 +1,217 @@
|
|||
__version__ = "3.7.4.post0"
|
||||
|
||||
from typing import Tuple
|
||||
|
||||
from . import hdrs as hdrs
|
||||
from .client import (
|
||||
BaseConnector as BaseConnector,
|
||||
ClientConnectionError as ClientConnectionError,
|
||||
ClientConnectorCertificateError as ClientConnectorCertificateError,
|
||||
ClientConnectorError as ClientConnectorError,
|
||||
ClientConnectorSSLError as ClientConnectorSSLError,
|
||||
ClientError as ClientError,
|
||||
ClientHttpProxyError as ClientHttpProxyError,
|
||||
ClientOSError as ClientOSError,
|
||||
ClientPayloadError as ClientPayloadError,
|
||||
ClientProxyConnectionError as ClientProxyConnectionError,
|
||||
ClientRequest as ClientRequest,
|
||||
ClientResponse as ClientResponse,
|
||||
ClientResponseError as ClientResponseError,
|
||||
ClientSession as ClientSession,
|
||||
ClientSSLError as ClientSSLError,
|
||||
ClientTimeout as ClientTimeout,
|
||||
ClientWebSocketResponse as ClientWebSocketResponse,
|
||||
ContentTypeError as ContentTypeError,
|
||||
Fingerprint as Fingerprint,
|
||||
InvalidURL as InvalidURL,
|
||||
NamedPipeConnector as NamedPipeConnector,
|
||||
RequestInfo as RequestInfo,
|
||||
ServerConnectionError as ServerConnectionError,
|
||||
ServerDisconnectedError as ServerDisconnectedError,
|
||||
ServerFingerprintMismatch as ServerFingerprintMismatch,
|
||||
ServerTimeoutError as ServerTimeoutError,
|
||||
TCPConnector as TCPConnector,
|
||||
TooManyRedirects as TooManyRedirects,
|
||||
UnixConnector as UnixConnector,
|
||||
WSServerHandshakeError as WSServerHandshakeError,
|
||||
request as request,
|
||||
)
|
||||
from .cookiejar import CookieJar as CookieJar, DummyCookieJar as DummyCookieJar
|
||||
from .formdata import FormData as FormData
|
||||
from .helpers import BasicAuth as BasicAuth, ChainMapProxy as ChainMapProxy
|
||||
from .http import (
|
||||
HttpVersion as HttpVersion,
|
||||
HttpVersion10 as HttpVersion10,
|
||||
HttpVersion11 as HttpVersion11,
|
||||
WebSocketError as WebSocketError,
|
||||
WSCloseCode as WSCloseCode,
|
||||
WSMessage as WSMessage,
|
||||
WSMsgType as WSMsgType,
|
||||
)
|
||||
from .multipart import (
|
||||
BadContentDispositionHeader as BadContentDispositionHeader,
|
||||
BadContentDispositionParam as BadContentDispositionParam,
|
||||
BodyPartReader as BodyPartReader,
|
||||
MultipartReader as MultipartReader,
|
||||
MultipartWriter as MultipartWriter,
|
||||
content_disposition_filename as content_disposition_filename,
|
||||
parse_content_disposition as parse_content_disposition,
|
||||
)
|
||||
from .payload import (
|
||||
PAYLOAD_REGISTRY as PAYLOAD_REGISTRY,
|
||||
AsyncIterablePayload as AsyncIterablePayload,
|
||||
BufferedReaderPayload as BufferedReaderPayload,
|
||||
BytesIOPayload as BytesIOPayload,
|
||||
BytesPayload as BytesPayload,
|
||||
IOBasePayload as IOBasePayload,
|
||||
JsonPayload as JsonPayload,
|
||||
Payload as Payload,
|
||||
StringIOPayload as StringIOPayload,
|
||||
StringPayload as StringPayload,
|
||||
TextIOPayload as TextIOPayload,
|
||||
get_payload as get_payload,
|
||||
payload_type as payload_type,
|
||||
)
|
||||
from .payload_streamer import streamer as streamer
|
||||
from .resolver import (
|
||||
AsyncResolver as AsyncResolver,
|
||||
DefaultResolver as DefaultResolver,
|
||||
ThreadedResolver as ThreadedResolver,
|
||||
)
|
||||
from .signals import Signal as Signal
|
||||
from .streams import (
|
||||
EMPTY_PAYLOAD as EMPTY_PAYLOAD,
|
||||
DataQueue as DataQueue,
|
||||
EofStream as EofStream,
|
||||
FlowControlDataQueue as FlowControlDataQueue,
|
||||
StreamReader as StreamReader,
|
||||
)
|
||||
from .tracing import (
|
||||
TraceConfig as TraceConfig,
|
||||
TraceConnectionCreateEndParams as TraceConnectionCreateEndParams,
|
||||
TraceConnectionCreateStartParams as TraceConnectionCreateStartParams,
|
||||
TraceConnectionQueuedEndParams as TraceConnectionQueuedEndParams,
|
||||
TraceConnectionQueuedStartParams as TraceConnectionQueuedStartParams,
|
||||
TraceConnectionReuseconnParams as TraceConnectionReuseconnParams,
|
||||
TraceDnsCacheHitParams as TraceDnsCacheHitParams,
|
||||
TraceDnsCacheMissParams as TraceDnsCacheMissParams,
|
||||
TraceDnsResolveHostEndParams as TraceDnsResolveHostEndParams,
|
||||
TraceDnsResolveHostStartParams as TraceDnsResolveHostStartParams,
|
||||
TraceRequestChunkSentParams as TraceRequestChunkSentParams,
|
||||
TraceRequestEndParams as TraceRequestEndParams,
|
||||
TraceRequestExceptionParams as TraceRequestExceptionParams,
|
||||
TraceRequestRedirectParams as TraceRequestRedirectParams,
|
||||
TraceRequestStartParams as TraceRequestStartParams,
|
||||
TraceResponseChunkReceivedParams as TraceResponseChunkReceivedParams,
|
||||
)
|
||||
|
||||
__all__: Tuple[str, ...] = (
|
||||
"hdrs",
|
||||
# client
|
||||
"BaseConnector",
|
||||
"ClientConnectionError",
|
||||
"ClientConnectorCertificateError",
|
||||
"ClientConnectorError",
|
||||
"ClientConnectorSSLError",
|
||||
"ClientError",
|
||||
"ClientHttpProxyError",
|
||||
"ClientOSError",
|
||||
"ClientPayloadError",
|
||||
"ClientProxyConnectionError",
|
||||
"ClientResponse",
|
||||
"ClientRequest",
|
||||
"ClientResponseError",
|
||||
"ClientSSLError",
|
||||
"ClientSession",
|
||||
"ClientTimeout",
|
||||
"ClientWebSocketResponse",
|
||||
"ContentTypeError",
|
||||
"Fingerprint",
|
||||
"InvalidURL",
|
||||
"RequestInfo",
|
||||
"ServerConnectionError",
|
||||
"ServerDisconnectedError",
|
||||
"ServerFingerprintMismatch",
|
||||
"ServerTimeoutError",
|
||||
"TCPConnector",
|
||||
"TooManyRedirects",
|
||||
"UnixConnector",
|
||||
"NamedPipeConnector",
|
||||
"WSServerHandshakeError",
|
||||
"request",
|
||||
# cookiejar
|
||||
"CookieJar",
|
||||
"DummyCookieJar",
|
||||
# formdata
|
||||
"FormData",
|
||||
# helpers
|
||||
"BasicAuth",
|
||||
"ChainMapProxy",
|
||||
# http
|
||||
"HttpVersion",
|
||||
"HttpVersion10",
|
||||
"HttpVersion11",
|
||||
"WSMsgType",
|
||||
"WSCloseCode",
|
||||
"WSMessage",
|
||||
"WebSocketError",
|
||||
# multipart
|
||||
"BadContentDispositionHeader",
|
||||
"BadContentDispositionParam",
|
||||
"BodyPartReader",
|
||||
"MultipartReader",
|
||||
"MultipartWriter",
|
||||
"content_disposition_filename",
|
||||
"parse_content_disposition",
|
||||
# payload
|
||||
"AsyncIterablePayload",
|
||||
"BufferedReaderPayload",
|
||||
"BytesIOPayload",
|
||||
"BytesPayload",
|
||||
"IOBasePayload",
|
||||
"JsonPayload",
|
||||
"PAYLOAD_REGISTRY",
|
||||
"Payload",
|
||||
"StringIOPayload",
|
||||
"StringPayload",
|
||||
"TextIOPayload",
|
||||
"get_payload",
|
||||
"payload_type",
|
||||
# payload_streamer
|
||||
"streamer",
|
||||
# resolver
|
||||
"AsyncResolver",
|
||||
"DefaultResolver",
|
||||
"ThreadedResolver",
|
||||
# signals
|
||||
"Signal",
|
||||
"DataQueue",
|
||||
"EMPTY_PAYLOAD",
|
||||
"EofStream",
|
||||
"FlowControlDataQueue",
|
||||
"StreamReader",
|
||||
# tracing
|
||||
"TraceConfig",
|
||||
"TraceConnectionCreateEndParams",
|
||||
"TraceConnectionCreateStartParams",
|
||||
"TraceConnectionQueuedEndParams",
|
||||
"TraceConnectionQueuedStartParams",
|
||||
"TraceConnectionReuseconnParams",
|
||||
"TraceDnsCacheHitParams",
|
||||
"TraceDnsCacheMissParams",
|
||||
"TraceDnsResolveHostEndParams",
|
||||
"TraceDnsResolveHostStartParams",
|
||||
"TraceRequestChunkSentParams",
|
||||
"TraceRequestEndParams",
|
||||
"TraceRequestExceptionParams",
|
||||
"TraceRequestRedirectParams",
|
||||
"TraceRequestStartParams",
|
||||
"TraceResponseChunkReceivedParams",
|
||||
)
|
||||
|
||||
try:
|
||||
from .worker import GunicornUVLoopWebWorker, GunicornWebWorker
|
||||
|
||||
__all__ += ("GunicornWebWorker", "GunicornUVLoopWebWorker")
|
||||
except ImportError: # pragma: no cover
|
||||
pass
|
||||
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/__init__.cpython-310.opt-1.pyc
vendored
Normal file
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/__init__.cpython-310.opt-1.pyc
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/abc.cpython-310.opt-1.pyc
vendored
Normal file
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/abc.cpython-310.opt-1.pyc
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/base_protocol.cpython-310.opt-1.pyc
vendored
Normal file
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/base_protocol.cpython-310.opt-1.pyc
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/client.cpython-310.opt-1.pyc
vendored
Normal file
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/client.cpython-310.opt-1.pyc
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/client_exceptions.cpython-310.opt-1.pyc
vendored
Normal file
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/client_exceptions.cpython-310.opt-1.pyc
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/client_proto.cpython-310.opt-1.pyc
vendored
Normal file
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/client_proto.cpython-310.opt-1.pyc
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/client_reqrep.cpython-310.opt-1.pyc
vendored
Normal file
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/client_reqrep.cpython-310.opt-1.pyc
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/client_ws.cpython-310.opt-1.pyc
vendored
Normal file
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/client_ws.cpython-310.opt-1.pyc
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/connector.cpython-310.opt-1.pyc
vendored
Normal file
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/connector.cpython-310.opt-1.pyc
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/cookiejar.cpython-310.opt-1.pyc
vendored
Normal file
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/cookiejar.cpython-310.opt-1.pyc
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/formdata.cpython-310.opt-1.pyc
vendored
Normal file
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/formdata.cpython-310.opt-1.pyc
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/frozenlist.cpython-310.opt-1.pyc
vendored
Normal file
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/frozenlist.cpython-310.opt-1.pyc
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/hdrs.cpython-310.opt-1.pyc
vendored
Normal file
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/hdrs.cpython-310.opt-1.pyc
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/helpers.cpython-310.opt-1.pyc
vendored
Normal file
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/helpers.cpython-310.opt-1.pyc
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/http.cpython-310.opt-1.pyc
vendored
Normal file
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/http.cpython-310.opt-1.pyc
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/http_exceptions.cpython-310.opt-1.pyc
vendored
Normal file
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/http_exceptions.cpython-310.opt-1.pyc
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/http_parser.cpython-310.opt-1.pyc
vendored
Normal file
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/http_parser.cpython-310.opt-1.pyc
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/http_websocket.cpython-310.opt-1.pyc
vendored
Normal file
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/http_websocket.cpython-310.opt-1.pyc
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/http_writer.cpython-310.opt-1.pyc
vendored
Normal file
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/http_writer.cpython-310.opt-1.pyc
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/locks.cpython-310.opt-1.pyc
vendored
Normal file
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/locks.cpython-310.opt-1.pyc
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/log.cpython-310.opt-1.pyc
vendored
Normal file
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/log.cpython-310.opt-1.pyc
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/multipart.cpython-310.opt-1.pyc
vendored
Normal file
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/multipart.cpython-310.opt-1.pyc
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/payload.cpython-310.opt-1.pyc
vendored
Normal file
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/payload.cpython-310.opt-1.pyc
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/payload_streamer.cpython-310.opt-1.pyc
vendored
Normal file
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/payload_streamer.cpython-310.opt-1.pyc
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/resolver.cpython-310.opt-1.pyc
vendored
Normal file
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/resolver.cpython-310.opt-1.pyc
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/signals.cpython-310.opt-1.pyc
vendored
Normal file
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/signals.cpython-310.opt-1.pyc
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/streams.cpython-310.opt-1.pyc
vendored
Normal file
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/streams.cpython-310.opt-1.pyc
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/tcp_helpers.cpython-310.opt-1.pyc
vendored
Normal file
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/tcp_helpers.cpython-310.opt-1.pyc
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/tracing.cpython-310.opt-1.pyc
vendored
Normal file
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/tracing.cpython-310.opt-1.pyc
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/typedefs.cpython-310.opt-1.pyc
vendored
Normal file
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/typedefs.cpython-310.opt-1.pyc
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/worker.cpython-310.opt-1.pyc
vendored
Normal file
BIN
dist/ba_data/python-site-packages/aiohttp/__pycache__/worker.cpython-310.opt-1.pyc
vendored
Normal file
Binary file not shown.
140
dist/ba_data/python-site-packages/aiohttp/_cparser.pxd
vendored
Normal file
140
dist/ba_data/python-site-packages/aiohttp/_cparser.pxd
vendored
Normal file
|
|
@ -0,0 +1,140 @@
|
|||
from libc.stdint cimport uint16_t, uint32_t, uint64_t
|
||||
|
||||
|
||||
cdef extern from "../vendor/http-parser/http_parser.h":
|
||||
ctypedef int (*http_data_cb) (http_parser*,
|
||||
const char *at,
|
||||
size_t length) except -1
|
||||
|
||||
ctypedef int (*http_cb) (http_parser*) except -1
|
||||
|
||||
struct http_parser:
|
||||
unsigned int type
|
||||
unsigned int flags
|
||||
unsigned int state
|
||||
unsigned int header_state
|
||||
unsigned int index
|
||||
|
||||
uint32_t nread
|
||||
uint64_t content_length
|
||||
|
||||
unsigned short http_major
|
||||
unsigned short http_minor
|
||||
unsigned int status_code
|
||||
unsigned int method
|
||||
unsigned int http_errno
|
||||
|
||||
unsigned int upgrade
|
||||
|
||||
void *data
|
||||
|
||||
struct http_parser_settings:
|
||||
http_cb on_message_begin
|
||||
http_data_cb on_url
|
||||
http_data_cb on_status
|
||||
http_data_cb on_header_field
|
||||
http_data_cb on_header_value
|
||||
http_cb on_headers_complete
|
||||
http_data_cb on_body
|
||||
http_cb on_message_complete
|
||||
http_cb on_chunk_header
|
||||
http_cb on_chunk_complete
|
||||
|
||||
enum http_parser_type:
|
||||
HTTP_REQUEST,
|
||||
HTTP_RESPONSE,
|
||||
HTTP_BOTH
|
||||
|
||||
enum http_errno:
|
||||
HPE_OK,
|
||||
HPE_CB_message_begin,
|
||||
HPE_CB_url,
|
||||
HPE_CB_header_field,
|
||||
HPE_CB_header_value,
|
||||
HPE_CB_headers_complete,
|
||||
HPE_CB_body,
|
||||
HPE_CB_message_complete,
|
||||
HPE_CB_status,
|
||||
HPE_CB_chunk_header,
|
||||
HPE_CB_chunk_complete,
|
||||
HPE_INVALID_EOF_STATE,
|
||||
HPE_HEADER_OVERFLOW,
|
||||
HPE_CLOSED_CONNECTION,
|
||||
HPE_INVALID_VERSION,
|
||||
HPE_INVALID_STATUS,
|
||||
HPE_INVALID_METHOD,
|
||||
HPE_INVALID_URL,
|
||||
HPE_INVALID_HOST,
|
||||
HPE_INVALID_PORT,
|
||||
HPE_INVALID_PATH,
|
||||
HPE_INVALID_QUERY_STRING,
|
||||
HPE_INVALID_FRAGMENT,
|
||||
HPE_LF_EXPECTED,
|
||||
HPE_INVALID_HEADER_TOKEN,
|
||||
HPE_INVALID_CONTENT_LENGTH,
|
||||
HPE_INVALID_CHUNK_SIZE,
|
||||
HPE_INVALID_CONSTANT,
|
||||
HPE_INVALID_INTERNAL_STATE,
|
||||
HPE_STRICT,
|
||||
HPE_PAUSED,
|
||||
HPE_UNKNOWN
|
||||
|
||||
enum flags:
|
||||
F_CHUNKED,
|
||||
F_CONNECTION_KEEP_ALIVE,
|
||||
F_CONNECTION_CLOSE,
|
||||
F_CONNECTION_UPGRADE,
|
||||
F_TRAILING,
|
||||
F_UPGRADE,
|
||||
F_SKIPBODY,
|
||||
F_CONTENTLENGTH
|
||||
|
||||
enum http_method:
|
||||
DELETE, GET, HEAD, POST, PUT, CONNECT, OPTIONS, TRACE, COPY,
|
||||
LOCK, MKCOL, MOVE, PROPFIND, PROPPATCH, SEARCH, UNLOCK, BIND,
|
||||
REBIND, UNBIND, ACL, REPORT, MKACTIVITY, CHECKOUT, MERGE,
|
||||
MSEARCH, NOTIFY, SUBSCRIBE, UNSUBSCRIBE, PATCH, PURGE, MKCALENDAR,
|
||||
LINK, UNLINK
|
||||
|
||||
void http_parser_init(http_parser *parser, http_parser_type type)
|
||||
|
||||
size_t http_parser_execute(http_parser *parser,
|
||||
const http_parser_settings *settings,
|
||||
const char *data,
|
||||
size_t len)
|
||||
|
||||
int http_should_keep_alive(const http_parser *parser)
|
||||
|
||||
void http_parser_settings_init(http_parser_settings *settings)
|
||||
|
||||
const char *http_errno_name(http_errno err)
|
||||
const char *http_errno_description(http_errno err)
|
||||
const char *http_method_str(http_method m)
|
||||
|
||||
# URL Parser
|
||||
|
||||
enum http_parser_url_fields:
|
||||
UF_SCHEMA = 0,
|
||||
UF_HOST = 1,
|
||||
UF_PORT = 2,
|
||||
UF_PATH = 3,
|
||||
UF_QUERY = 4,
|
||||
UF_FRAGMENT = 5,
|
||||
UF_USERINFO = 6,
|
||||
UF_MAX = 7
|
||||
|
||||
struct http_parser_url_field_data:
|
||||
uint16_t off
|
||||
uint16_t len
|
||||
|
||||
struct http_parser_url:
|
||||
uint16_t field_set
|
||||
uint16_t port
|
||||
http_parser_url_field_data[<int>UF_MAX] field_data
|
||||
|
||||
void http_parser_url_init(http_parser_url *u)
|
||||
|
||||
int http_parser_parse_url(const char *buf,
|
||||
size_t buflen,
|
||||
int is_connect,
|
||||
http_parser_url *u)
|
||||
9870
dist/ba_data/python-site-packages/aiohttp/_find_header.c
vendored
Normal file
9870
dist/ba_data/python-site-packages/aiohttp/_find_header.c
vendored
Normal file
File diff suppressed because it is too large
Load diff
14
dist/ba_data/python-site-packages/aiohttp/_find_header.h
vendored
Normal file
14
dist/ba_data/python-site-packages/aiohttp/_find_header.h
vendored
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
#ifndef _FIND_HEADERS_H
|
||||
#define _FIND_HEADERS_H
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
int find_header(const char *str, int size);
|
||||
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
#endif
|
||||
2
dist/ba_data/python-site-packages/aiohttp/_find_header.pxd
vendored
Normal file
2
dist/ba_data/python-site-packages/aiohttp/_find_header.pxd
vendored
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
cdef extern from "_find_header.h":
|
||||
int find_header(char *, int)
|
||||
7512
dist/ba_data/python-site-packages/aiohttp/_frozenlist.c
vendored
Normal file
7512
dist/ba_data/python-site-packages/aiohttp/_frozenlist.c
vendored
Normal file
File diff suppressed because it is too large
Load diff
BIN
dist/ba_data/python-site-packages/aiohttp/_frozenlist.cp39-win_amd64.pyd
vendored
Normal file
BIN
dist/ba_data/python-site-packages/aiohttp/_frozenlist.cp39-win_amd64.pyd
vendored
Normal file
Binary file not shown.
108
dist/ba_data/python-site-packages/aiohttp/_frozenlist.pyx
vendored
Normal file
108
dist/ba_data/python-site-packages/aiohttp/_frozenlist.pyx
vendored
Normal file
|
|
@ -0,0 +1,108 @@
|
|||
from collections.abc import MutableSequence
|
||||
|
||||
|
||||
cdef class FrozenList:
|
||||
|
||||
cdef readonly bint frozen
|
||||
cdef list _items
|
||||
|
||||
def __init__(self, items=None):
|
||||
self.frozen = False
|
||||
if items is not None:
|
||||
items = list(items)
|
||||
else:
|
||||
items = []
|
||||
self._items = items
|
||||
|
||||
cdef object _check_frozen(self):
|
||||
if self.frozen:
|
||||
raise RuntimeError("Cannot modify frozen list.")
|
||||
|
||||
cdef inline object _fast_len(self):
|
||||
return len(self._items)
|
||||
|
||||
def freeze(self):
|
||||
self.frozen = True
|
||||
|
||||
def __getitem__(self, index):
|
||||
return self._items[index]
|
||||
|
||||
def __setitem__(self, index, value):
|
||||
self._check_frozen()
|
||||
self._items[index] = value
|
||||
|
||||
def __delitem__(self, index):
|
||||
self._check_frozen()
|
||||
del self._items[index]
|
||||
|
||||
def __len__(self):
|
||||
return self._fast_len()
|
||||
|
||||
def __iter__(self):
|
||||
return self._items.__iter__()
|
||||
|
||||
def __reversed__(self):
|
||||
return self._items.__reversed__()
|
||||
|
||||
def __richcmp__(self, other, op):
|
||||
if op == 0: # <
|
||||
return list(self) < other
|
||||
if op == 1: # <=
|
||||
return list(self) <= other
|
||||
if op == 2: # ==
|
||||
return list(self) == other
|
||||
if op == 3: # !=
|
||||
return list(self) != other
|
||||
if op == 4: # >
|
||||
return list(self) > other
|
||||
if op == 5: # =>
|
||||
return list(self) >= other
|
||||
|
||||
def insert(self, pos, item):
|
||||
self._check_frozen()
|
||||
self._items.insert(pos, item)
|
||||
|
||||
def __contains__(self, item):
|
||||
return item in self._items
|
||||
|
||||
def __iadd__(self, items):
|
||||
self._check_frozen()
|
||||
self._items += list(items)
|
||||
return self
|
||||
|
||||
def index(self, item):
|
||||
return self._items.index(item)
|
||||
|
||||
def remove(self, item):
|
||||
self._check_frozen()
|
||||
self._items.remove(item)
|
||||
|
||||
def clear(self):
|
||||
self._check_frozen()
|
||||
self._items.clear()
|
||||
|
||||
def extend(self, items):
|
||||
self._check_frozen()
|
||||
self._items += list(items)
|
||||
|
||||
def reverse(self):
|
||||
self._check_frozen()
|
||||
self._items.reverse()
|
||||
|
||||
def pop(self, index=-1):
|
||||
self._check_frozen()
|
||||
return self._items.pop(index)
|
||||
|
||||
def append(self, item):
|
||||
self._check_frozen()
|
||||
return self._items.append(item)
|
||||
|
||||
def count(self, item):
|
||||
return self._items.count(item)
|
||||
|
||||
def __repr__(self):
|
||||
return '<FrozenList(frozen={}, {!r})>'.format(self.frozen,
|
||||
self._items)
|
||||
|
||||
|
||||
MutableSequence.register(FrozenList)
|
||||
83
dist/ba_data/python-site-packages/aiohttp/_headers.pxi
vendored
Normal file
83
dist/ba_data/python-site-packages/aiohttp/_headers.pxi
vendored
Normal file
|
|
@ -0,0 +1,83 @@
|
|||
# The file is autogenerated from aiohttp/hdrs.py
|
||||
# Run ./tools/gen.py to update it after the origin changing.
|
||||
|
||||
from . import hdrs
|
||||
cdef tuple headers = (
|
||||
hdrs.ACCEPT,
|
||||
hdrs.ACCEPT_CHARSET,
|
||||
hdrs.ACCEPT_ENCODING,
|
||||
hdrs.ACCEPT_LANGUAGE,
|
||||
hdrs.ACCEPT_RANGES,
|
||||
hdrs.ACCESS_CONTROL_ALLOW_CREDENTIALS,
|
||||
hdrs.ACCESS_CONTROL_ALLOW_HEADERS,
|
||||
hdrs.ACCESS_CONTROL_ALLOW_METHODS,
|
||||
hdrs.ACCESS_CONTROL_ALLOW_ORIGIN,
|
||||
hdrs.ACCESS_CONTROL_EXPOSE_HEADERS,
|
||||
hdrs.ACCESS_CONTROL_MAX_AGE,
|
||||
hdrs.ACCESS_CONTROL_REQUEST_HEADERS,
|
||||
hdrs.ACCESS_CONTROL_REQUEST_METHOD,
|
||||
hdrs.AGE,
|
||||
hdrs.ALLOW,
|
||||
hdrs.AUTHORIZATION,
|
||||
hdrs.CACHE_CONTROL,
|
||||
hdrs.CONNECTION,
|
||||
hdrs.CONTENT_DISPOSITION,
|
||||
hdrs.CONTENT_ENCODING,
|
||||
hdrs.CONTENT_LANGUAGE,
|
||||
hdrs.CONTENT_LENGTH,
|
||||
hdrs.CONTENT_LOCATION,
|
||||
hdrs.CONTENT_MD5,
|
||||
hdrs.CONTENT_RANGE,
|
||||
hdrs.CONTENT_TRANSFER_ENCODING,
|
||||
hdrs.CONTENT_TYPE,
|
||||
hdrs.COOKIE,
|
||||
hdrs.DATE,
|
||||
hdrs.DESTINATION,
|
||||
hdrs.DIGEST,
|
||||
hdrs.ETAG,
|
||||
hdrs.EXPECT,
|
||||
hdrs.EXPIRES,
|
||||
hdrs.FORWARDED,
|
||||
hdrs.FROM,
|
||||
hdrs.HOST,
|
||||
hdrs.IF_MATCH,
|
||||
hdrs.IF_MODIFIED_SINCE,
|
||||
hdrs.IF_NONE_MATCH,
|
||||
hdrs.IF_RANGE,
|
||||
hdrs.IF_UNMODIFIED_SINCE,
|
||||
hdrs.KEEP_ALIVE,
|
||||
hdrs.LAST_EVENT_ID,
|
||||
hdrs.LAST_MODIFIED,
|
||||
hdrs.LINK,
|
||||
hdrs.LOCATION,
|
||||
hdrs.MAX_FORWARDS,
|
||||
hdrs.ORIGIN,
|
||||
hdrs.PRAGMA,
|
||||
hdrs.PROXY_AUTHENTICATE,
|
||||
hdrs.PROXY_AUTHORIZATION,
|
||||
hdrs.RANGE,
|
||||
hdrs.REFERER,
|
||||
hdrs.RETRY_AFTER,
|
||||
hdrs.SEC_WEBSOCKET_ACCEPT,
|
||||
hdrs.SEC_WEBSOCKET_EXTENSIONS,
|
||||
hdrs.SEC_WEBSOCKET_KEY,
|
||||
hdrs.SEC_WEBSOCKET_KEY1,
|
||||
hdrs.SEC_WEBSOCKET_PROTOCOL,
|
||||
hdrs.SEC_WEBSOCKET_VERSION,
|
||||
hdrs.SERVER,
|
||||
hdrs.SET_COOKIE,
|
||||
hdrs.TE,
|
||||
hdrs.TRAILER,
|
||||
hdrs.TRANSFER_ENCODING,
|
||||
hdrs.URI,
|
||||
hdrs.UPGRADE,
|
||||
hdrs.USER_AGENT,
|
||||
hdrs.VARY,
|
||||
hdrs.VIA,
|
||||
hdrs.WWW_AUTHENTICATE,
|
||||
hdrs.WANT_DIGEST,
|
||||
hdrs.WARNING,
|
||||
hdrs.X_FORWARDED_FOR,
|
||||
hdrs.X_FORWARDED_HOST,
|
||||
hdrs.X_FORWARDED_PROTO,
|
||||
)
|
||||
5433
dist/ba_data/python-site-packages/aiohttp/_helpers.c
vendored
Normal file
5433
dist/ba_data/python-site-packages/aiohttp/_helpers.c
vendored
Normal file
File diff suppressed because it is too large
Load diff
BIN
dist/ba_data/python-site-packages/aiohttp/_helpers.cp39-win_amd64.pyd
vendored
Normal file
BIN
dist/ba_data/python-site-packages/aiohttp/_helpers.cp39-win_amd64.pyd
vendored
Normal file
Binary file not shown.
6
dist/ba_data/python-site-packages/aiohttp/_helpers.pyi
vendored
Normal file
6
dist/ba_data/python-site-packages/aiohttp/_helpers.pyi
vendored
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
from typing import Any
|
||||
|
||||
class reify:
|
||||
def __init__(self, wrapped: Any) -> None: ...
|
||||
def __get__(self, inst: Any, owner: Any) -> Any: ...
|
||||
def __set__(self, inst: Any, value: Any) -> None: ...
|
||||
35
dist/ba_data/python-site-packages/aiohttp/_helpers.pyx
vendored
Normal file
35
dist/ba_data/python-site-packages/aiohttp/_helpers.pyx
vendored
Normal file
|
|
@ -0,0 +1,35 @@
|
|||
cdef class reify:
|
||||
"""Use as a class method decorator. It operates almost exactly like
|
||||
the Python `@property` decorator, but it puts the result of the
|
||||
method it decorates into the instance dict after the first call,
|
||||
effectively replacing the function it decorates with an instance
|
||||
variable. It is, in Python parlance, a data descriptor.
|
||||
|
||||
"""
|
||||
|
||||
cdef object wrapped
|
||||
cdef object name
|
||||
|
||||
def __init__(self, wrapped):
|
||||
self.wrapped = wrapped
|
||||
self.name = wrapped.__name__
|
||||
|
||||
@property
|
||||
def __doc__(self):
|
||||
return self.wrapped.__doc__
|
||||
|
||||
def __get__(self, inst, owner):
|
||||
try:
|
||||
try:
|
||||
return inst._cache[self.name]
|
||||
except KeyError:
|
||||
val = self.wrapped(inst)
|
||||
inst._cache[self.name] = val
|
||||
return val
|
||||
except AttributeError:
|
||||
if inst is None:
|
||||
return self
|
||||
raise
|
||||
|
||||
def __set__(self, inst, value):
|
||||
raise AttributeError("reified property is read-only")
|
||||
24607
dist/ba_data/python-site-packages/aiohttp/_http_parser.c
vendored
Normal file
24607
dist/ba_data/python-site-packages/aiohttp/_http_parser.c
vendored
Normal file
File diff suppressed because it is too large
Load diff
BIN
dist/ba_data/python-site-packages/aiohttp/_http_parser.cp39-win_amd64.pyd
vendored
Normal file
BIN
dist/ba_data/python-site-packages/aiohttp/_http_parser.cp39-win_amd64.pyd
vendored
Normal file
Binary file not shown.
875
dist/ba_data/python-site-packages/aiohttp/_http_parser.pyx
vendored
Normal file
875
dist/ba_data/python-site-packages/aiohttp/_http_parser.pyx
vendored
Normal file
|
|
@ -0,0 +1,875 @@
|
|||
#cython: language_level=3
|
||||
#
|
||||
# Based on https://github.com/MagicStack/httptools
|
||||
#
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
from cpython cimport (
|
||||
Py_buffer,
|
||||
PyBUF_SIMPLE,
|
||||
PyBuffer_Release,
|
||||
PyBytes_AsString,
|
||||
PyBytes_AsStringAndSize,
|
||||
PyObject_GetBuffer,
|
||||
)
|
||||
from cpython.mem cimport PyMem_Free, PyMem_Malloc
|
||||
from libc.limits cimport ULLONG_MAX
|
||||
from libc.string cimport memcpy
|
||||
|
||||
from multidict import CIMultiDict as _CIMultiDict, CIMultiDictProxy as _CIMultiDictProxy
|
||||
from yarl import URL as _URL
|
||||
|
||||
from aiohttp import hdrs
|
||||
|
||||
from .http_exceptions import (
|
||||
BadHttpMessage,
|
||||
BadStatusLine,
|
||||
ContentLengthError,
|
||||
InvalidHeader,
|
||||
InvalidURLError,
|
||||
LineTooLong,
|
||||
PayloadEncodingError,
|
||||
TransferEncodingError,
|
||||
)
|
||||
from .http_parser import DeflateBuffer as _DeflateBuffer
|
||||
from .http_writer import (
|
||||
HttpVersion as _HttpVersion,
|
||||
HttpVersion10 as _HttpVersion10,
|
||||
HttpVersion11 as _HttpVersion11,
|
||||
)
|
||||
from .streams import EMPTY_PAYLOAD as _EMPTY_PAYLOAD, StreamReader as _StreamReader
|
||||
|
||||
cimport cython
|
||||
|
||||
from aiohttp cimport _cparser as cparser
|
||||
|
||||
include "_headers.pxi"
|
||||
|
||||
from aiohttp cimport _find_header
|
||||
|
||||
DEF DEFAULT_FREELIST_SIZE = 250
|
||||
|
||||
cdef extern from "Python.h":
|
||||
int PyByteArray_Resize(object, Py_ssize_t) except -1
|
||||
Py_ssize_t PyByteArray_Size(object) except -1
|
||||
char* PyByteArray_AsString(object)
|
||||
|
||||
__all__ = ('HttpRequestParser', 'HttpResponseParser',
|
||||
'RawRequestMessage', 'RawResponseMessage')
|
||||
|
||||
cdef object URL = _URL
|
||||
cdef object URL_build = URL.build
|
||||
cdef object CIMultiDict = _CIMultiDict
|
||||
cdef object CIMultiDictProxy = _CIMultiDictProxy
|
||||
cdef object HttpVersion = _HttpVersion
|
||||
cdef object HttpVersion10 = _HttpVersion10
|
||||
cdef object HttpVersion11 = _HttpVersion11
|
||||
cdef object SEC_WEBSOCKET_KEY1 = hdrs.SEC_WEBSOCKET_KEY1
|
||||
cdef object CONTENT_ENCODING = hdrs.CONTENT_ENCODING
|
||||
cdef object EMPTY_PAYLOAD = _EMPTY_PAYLOAD
|
||||
cdef object StreamReader = _StreamReader
|
||||
cdef object DeflateBuffer = _DeflateBuffer
|
||||
|
||||
|
||||
cdef inline object extend(object buf, const char* at, size_t length):
|
||||
cdef Py_ssize_t s
|
||||
cdef char* ptr
|
||||
s = PyByteArray_Size(buf)
|
||||
PyByteArray_Resize(buf, s + length)
|
||||
ptr = PyByteArray_AsString(buf)
|
||||
memcpy(ptr + s, at, length)
|
||||
|
||||
|
||||
DEF METHODS_COUNT = 34;
|
||||
|
||||
cdef list _http_method = []
|
||||
|
||||
for i in range(METHODS_COUNT):
|
||||
_http_method.append(
|
||||
cparser.http_method_str(<cparser.http_method> i).decode('ascii'))
|
||||
|
||||
|
||||
cdef inline str http_method_str(int i):
|
||||
if i < METHODS_COUNT:
|
||||
return <str>_http_method[i]
|
||||
else:
|
||||
return "<unknown>"
|
||||
|
||||
cdef inline object find_header(bytes raw_header):
|
||||
cdef Py_ssize_t size
|
||||
cdef char *buf
|
||||
cdef int idx
|
||||
PyBytes_AsStringAndSize(raw_header, &buf, &size)
|
||||
idx = _find_header.find_header(buf, size)
|
||||
if idx == -1:
|
||||
return raw_header.decode('utf-8', 'surrogateescape')
|
||||
return headers[idx]
|
||||
|
||||
|
||||
@cython.freelist(DEFAULT_FREELIST_SIZE)
|
||||
cdef class RawRequestMessage:
|
||||
cdef readonly str method
|
||||
cdef readonly str path
|
||||
cdef readonly object version # HttpVersion
|
||||
cdef readonly object headers # CIMultiDict
|
||||
cdef readonly object raw_headers # tuple
|
||||
cdef readonly object should_close
|
||||
cdef readonly object compression
|
||||
cdef readonly object upgrade
|
||||
cdef readonly object chunked
|
||||
cdef readonly object url # yarl.URL
|
||||
|
||||
def __init__(self, method, path, version, headers, raw_headers,
|
||||
should_close, compression, upgrade, chunked, url):
|
||||
self.method = method
|
||||
self.path = path
|
||||
self.version = version
|
||||
self.headers = headers
|
||||
self.raw_headers = raw_headers
|
||||
self.should_close = should_close
|
||||
self.compression = compression
|
||||
self.upgrade = upgrade
|
||||
self.chunked = chunked
|
||||
self.url = url
|
||||
|
||||
def __repr__(self):
|
||||
info = []
|
||||
info.append(("method", self.method))
|
||||
info.append(("path", self.path))
|
||||
info.append(("version", self.version))
|
||||
info.append(("headers", self.headers))
|
||||
info.append(("raw_headers", self.raw_headers))
|
||||
info.append(("should_close", self.should_close))
|
||||
info.append(("compression", self.compression))
|
||||
info.append(("upgrade", self.upgrade))
|
||||
info.append(("chunked", self.chunked))
|
||||
info.append(("url", self.url))
|
||||
sinfo = ', '.join(name + '=' + repr(val) for name, val in info)
|
||||
return '<RawRequestMessage(' + sinfo + ')>'
|
||||
|
||||
def _replace(self, **dct):
|
||||
cdef RawRequestMessage ret
|
||||
ret = _new_request_message(self.method,
|
||||
self.path,
|
||||
self.version,
|
||||
self.headers,
|
||||
self.raw_headers,
|
||||
self.should_close,
|
||||
self.compression,
|
||||
self.upgrade,
|
||||
self.chunked,
|
||||
self.url)
|
||||
if "method" in dct:
|
||||
ret.method = dct["method"]
|
||||
if "path" in dct:
|
||||
ret.path = dct["path"]
|
||||
if "version" in dct:
|
||||
ret.version = dct["version"]
|
||||
if "headers" in dct:
|
||||
ret.headers = dct["headers"]
|
||||
if "raw_headers" in dct:
|
||||
ret.raw_headers = dct["raw_headers"]
|
||||
if "should_close" in dct:
|
||||
ret.should_close = dct["should_close"]
|
||||
if "compression" in dct:
|
||||
ret.compression = dct["compression"]
|
||||
if "upgrade" in dct:
|
||||
ret.upgrade = dct["upgrade"]
|
||||
if "chunked" in dct:
|
||||
ret.chunked = dct["chunked"]
|
||||
if "url" in dct:
|
||||
ret.url = dct["url"]
|
||||
return ret
|
||||
|
||||
cdef _new_request_message(str method,
|
||||
str path,
|
||||
object version,
|
||||
object headers,
|
||||
object raw_headers,
|
||||
bint should_close,
|
||||
object compression,
|
||||
bint upgrade,
|
||||
bint chunked,
|
||||
object url):
|
||||
cdef RawRequestMessage ret
|
||||
ret = RawRequestMessage.__new__(RawRequestMessage)
|
||||
ret.method = method
|
||||
ret.path = path
|
||||
ret.version = version
|
||||
ret.headers = headers
|
||||
ret.raw_headers = raw_headers
|
||||
ret.should_close = should_close
|
||||
ret.compression = compression
|
||||
ret.upgrade = upgrade
|
||||
ret.chunked = chunked
|
||||
ret.url = url
|
||||
return ret
|
||||
|
||||
|
||||
@cython.freelist(DEFAULT_FREELIST_SIZE)
|
||||
cdef class RawResponseMessage:
|
||||
cdef readonly object version # HttpVersion
|
||||
cdef readonly int code
|
||||
cdef readonly str reason
|
||||
cdef readonly object headers # CIMultiDict
|
||||
cdef readonly object raw_headers # tuple
|
||||
cdef readonly object should_close
|
||||
cdef readonly object compression
|
||||
cdef readonly object upgrade
|
||||
cdef readonly object chunked
|
||||
|
||||
def __init__(self, version, code, reason, headers, raw_headers,
|
||||
should_close, compression, upgrade, chunked):
|
||||
self.version = version
|
||||
self.code = code
|
||||
self.reason = reason
|
||||
self.headers = headers
|
||||
self.raw_headers = raw_headers
|
||||
self.should_close = should_close
|
||||
self.compression = compression
|
||||
self.upgrade = upgrade
|
||||
self.chunked = chunked
|
||||
|
||||
def __repr__(self):
|
||||
info = []
|
||||
info.append(("version", self.version))
|
||||
info.append(("code", self.code))
|
||||
info.append(("reason", self.reason))
|
||||
info.append(("headers", self.headers))
|
||||
info.append(("raw_headers", self.raw_headers))
|
||||
info.append(("should_close", self.should_close))
|
||||
info.append(("compression", self.compression))
|
||||
info.append(("upgrade", self.upgrade))
|
||||
info.append(("chunked", self.chunked))
|
||||
sinfo = ', '.join(name + '=' + repr(val) for name, val in info)
|
||||
return '<RawResponseMessage(' + sinfo + ')>'
|
||||
|
||||
|
||||
cdef _new_response_message(object version,
|
||||
int code,
|
||||
str reason,
|
||||
object headers,
|
||||
object raw_headers,
|
||||
bint should_close,
|
||||
object compression,
|
||||
bint upgrade,
|
||||
bint chunked):
|
||||
cdef RawResponseMessage ret
|
||||
ret = RawResponseMessage.__new__(RawResponseMessage)
|
||||
ret.version = version
|
||||
ret.code = code
|
||||
ret.reason = reason
|
||||
ret.headers = headers
|
||||
ret.raw_headers = raw_headers
|
||||
ret.should_close = should_close
|
||||
ret.compression = compression
|
||||
ret.upgrade = upgrade
|
||||
ret.chunked = chunked
|
||||
return ret
|
||||
|
||||
|
||||
@cython.internal
|
||||
cdef class HttpParser:
|
||||
|
||||
cdef:
|
||||
cparser.http_parser* _cparser
|
||||
cparser.http_parser_settings* _csettings
|
||||
|
||||
bytearray _raw_name
|
||||
bytearray _raw_value
|
||||
bint _has_value
|
||||
|
||||
object _protocol
|
||||
object _loop
|
||||
object _timer
|
||||
|
||||
size_t _max_line_size
|
||||
size_t _max_field_size
|
||||
size_t _max_headers
|
||||
bint _response_with_body
|
||||
bint _read_until_eof
|
||||
|
||||
bint _started
|
||||
object _url
|
||||
bytearray _buf
|
||||
str _path
|
||||
str _reason
|
||||
object _headers
|
||||
list _raw_headers
|
||||
bint _upgraded
|
||||
list _messages
|
||||
object _payload
|
||||
bint _payload_error
|
||||
object _payload_exception
|
||||
object _last_error
|
||||
bint _auto_decompress
|
||||
int _limit
|
||||
|
||||
str _content_encoding
|
||||
|
||||
Py_buffer py_buf
|
||||
|
||||
def __cinit__(self):
|
||||
self._cparser = <cparser.http_parser*> \
|
||||
PyMem_Malloc(sizeof(cparser.http_parser))
|
||||
if self._cparser is NULL:
|
||||
raise MemoryError()
|
||||
|
||||
self._csettings = <cparser.http_parser_settings*> \
|
||||
PyMem_Malloc(sizeof(cparser.http_parser_settings))
|
||||
if self._csettings is NULL:
|
||||
raise MemoryError()
|
||||
|
||||
def __dealloc__(self):
|
||||
PyMem_Free(self._cparser)
|
||||
PyMem_Free(self._csettings)
|
||||
|
||||
cdef _init(self, cparser.http_parser_type mode,
|
||||
object protocol, object loop, int limit,
|
||||
object timer=None,
|
||||
size_t max_line_size=8190, size_t max_headers=32768,
|
||||
size_t max_field_size=8190, payload_exception=None,
|
||||
bint response_with_body=True, bint read_until_eof=False,
|
||||
bint auto_decompress=True):
|
||||
cparser.http_parser_init(self._cparser, mode)
|
||||
self._cparser.data = <void*>self
|
||||
self._cparser.content_length = 0
|
||||
|
||||
cparser.http_parser_settings_init(self._csettings)
|
||||
|
||||
self._protocol = protocol
|
||||
self._loop = loop
|
||||
self._timer = timer
|
||||
|
||||
self._buf = bytearray()
|
||||
self._payload = None
|
||||
self._payload_error = 0
|
||||
self._payload_exception = payload_exception
|
||||
self._messages = []
|
||||
|
||||
self._raw_name = bytearray()
|
||||
self._raw_value = bytearray()
|
||||
self._has_value = False
|
||||
|
||||
self._max_line_size = max_line_size
|
||||
self._max_headers = max_headers
|
||||
self._max_field_size = max_field_size
|
||||
self._response_with_body = response_with_body
|
||||
self._read_until_eof = read_until_eof
|
||||
self._upgraded = False
|
||||
self._auto_decompress = auto_decompress
|
||||
self._content_encoding = None
|
||||
|
||||
self._csettings.on_url = cb_on_url
|
||||
self._csettings.on_status = cb_on_status
|
||||
self._csettings.on_header_field = cb_on_header_field
|
||||
self._csettings.on_header_value = cb_on_header_value
|
||||
self._csettings.on_headers_complete = cb_on_headers_complete
|
||||
self._csettings.on_body = cb_on_body
|
||||
self._csettings.on_message_begin = cb_on_message_begin
|
||||
self._csettings.on_message_complete = cb_on_message_complete
|
||||
self._csettings.on_chunk_header = cb_on_chunk_header
|
||||
self._csettings.on_chunk_complete = cb_on_chunk_complete
|
||||
|
||||
self._last_error = None
|
||||
self._limit = limit
|
||||
|
||||
cdef _process_header(self):
|
||||
if self._raw_name:
|
||||
raw_name = bytes(self._raw_name)
|
||||
raw_value = bytes(self._raw_value)
|
||||
|
||||
name = find_header(raw_name)
|
||||
value = raw_value.decode('utf-8', 'surrogateescape')
|
||||
|
||||
self._headers.add(name, value)
|
||||
|
||||
if name is CONTENT_ENCODING:
|
||||
self._content_encoding = value
|
||||
|
||||
PyByteArray_Resize(self._raw_name, 0)
|
||||
PyByteArray_Resize(self._raw_value, 0)
|
||||
self._has_value = False
|
||||
self._raw_headers.append((raw_name, raw_value))
|
||||
|
||||
cdef _on_header_field(self, char* at, size_t length):
|
||||
cdef Py_ssize_t size
|
||||
cdef char *buf
|
||||
if self._has_value:
|
||||
self._process_header()
|
||||
|
||||
size = PyByteArray_Size(self._raw_name)
|
||||
PyByteArray_Resize(self._raw_name, size + length)
|
||||
buf = PyByteArray_AsString(self._raw_name)
|
||||
memcpy(buf + size, at, length)
|
||||
|
||||
cdef _on_header_value(self, char* at, size_t length):
|
||||
cdef Py_ssize_t size
|
||||
cdef char *buf
|
||||
|
||||
size = PyByteArray_Size(self._raw_value)
|
||||
PyByteArray_Resize(self._raw_value, size + length)
|
||||
buf = PyByteArray_AsString(self._raw_value)
|
||||
memcpy(buf + size, at, length)
|
||||
self._has_value = True
|
||||
|
||||
cdef _on_headers_complete(self):
|
||||
self._process_header()
|
||||
|
||||
method = http_method_str(self._cparser.method)
|
||||
should_close = not cparser.http_should_keep_alive(self._cparser)
|
||||
upgrade = self._cparser.upgrade
|
||||
chunked = self._cparser.flags & cparser.F_CHUNKED
|
||||
|
||||
raw_headers = tuple(self._raw_headers)
|
||||
headers = CIMultiDictProxy(self._headers)
|
||||
|
||||
if upgrade or self._cparser.method == 5: # cparser.CONNECT:
|
||||
self._upgraded = True
|
||||
|
||||
# do not support old websocket spec
|
||||
if SEC_WEBSOCKET_KEY1 in headers:
|
||||
raise InvalidHeader(SEC_WEBSOCKET_KEY1)
|
||||
|
||||
encoding = None
|
||||
enc = self._content_encoding
|
||||
if enc is not None:
|
||||
self._content_encoding = None
|
||||
enc = enc.lower()
|
||||
if enc in ('gzip', 'deflate', 'br'):
|
||||
encoding = enc
|
||||
|
||||
if self._cparser.type == cparser.HTTP_REQUEST:
|
||||
msg = _new_request_message(
|
||||
method, self._path,
|
||||
self.http_version(), headers, raw_headers,
|
||||
should_close, encoding, upgrade, chunked, self._url)
|
||||
else:
|
||||
msg = _new_response_message(
|
||||
self.http_version(), self._cparser.status_code, self._reason,
|
||||
headers, raw_headers, should_close, encoding,
|
||||
upgrade, chunked)
|
||||
|
||||
if (ULLONG_MAX > self._cparser.content_length > 0 or chunked or
|
||||
self._cparser.method == 5 or # CONNECT: 5
|
||||
(self._cparser.status_code >= 199 and
|
||||
self._cparser.content_length == ULLONG_MAX and
|
||||
self._read_until_eof)
|
||||
):
|
||||
payload = StreamReader(
|
||||
self._protocol, timer=self._timer, loop=self._loop,
|
||||
limit=self._limit)
|
||||
else:
|
||||
payload = EMPTY_PAYLOAD
|
||||
|
||||
self._payload = payload
|
||||
if encoding is not None and self._auto_decompress:
|
||||
self._payload = DeflateBuffer(payload, encoding)
|
||||
|
||||
if not self._response_with_body:
|
||||
payload = EMPTY_PAYLOAD
|
||||
|
||||
self._messages.append((msg, payload))
|
||||
|
||||
cdef _on_message_complete(self):
|
||||
self._payload.feed_eof()
|
||||
self._payload = None
|
||||
|
||||
cdef _on_chunk_header(self):
|
||||
self._payload.begin_http_chunk_receiving()
|
||||
|
||||
cdef _on_chunk_complete(self):
|
||||
self._payload.end_http_chunk_receiving()
|
||||
|
||||
cdef object _on_status_complete(self):
|
||||
pass
|
||||
|
||||
cdef inline http_version(self):
|
||||
cdef cparser.http_parser* parser = self._cparser
|
||||
|
||||
if parser.http_major == 1:
|
||||
if parser.http_minor == 0:
|
||||
return HttpVersion10
|
||||
elif parser.http_minor == 1:
|
||||
return HttpVersion11
|
||||
|
||||
return HttpVersion(parser.http_major, parser.http_minor)
|
||||
|
||||
### Public API ###
|
||||
|
||||
def feed_eof(self):
|
||||
cdef bytes desc
|
||||
|
||||
if self._payload is not None:
|
||||
if self._cparser.flags & cparser.F_CHUNKED:
|
||||
raise TransferEncodingError(
|
||||
"Not enough data for satisfy transfer length header.")
|
||||
elif self._cparser.flags & cparser.F_CONTENTLENGTH:
|
||||
raise ContentLengthError(
|
||||
"Not enough data for satisfy content length header.")
|
||||
elif self._cparser.http_errno != cparser.HPE_OK:
|
||||
desc = cparser.http_errno_description(
|
||||
<cparser.http_errno> self._cparser.http_errno)
|
||||
raise PayloadEncodingError(desc.decode('latin-1'))
|
||||
else:
|
||||
self._payload.feed_eof()
|
||||
elif self._started:
|
||||
self._on_headers_complete()
|
||||
if self._messages:
|
||||
return self._messages[-1][0]
|
||||
|
||||
def feed_data(self, data):
|
||||
cdef:
|
||||
size_t data_len
|
||||
size_t nb
|
||||
|
||||
PyObject_GetBuffer(data, &self.py_buf, PyBUF_SIMPLE)
|
||||
data_len = <size_t>self.py_buf.len
|
||||
|
||||
nb = cparser.http_parser_execute(
|
||||
self._cparser,
|
||||
self._csettings,
|
||||
<char*>self.py_buf.buf,
|
||||
data_len)
|
||||
|
||||
PyBuffer_Release(&self.py_buf)
|
||||
|
||||
if (self._cparser.http_errno != cparser.HPE_OK):
|
||||
if self._payload_error == 0:
|
||||
if self._last_error is not None:
|
||||
ex = self._last_error
|
||||
self._last_error = None
|
||||
else:
|
||||
ex = parser_error_from_errno(
|
||||
<cparser.http_errno> self._cparser.http_errno)
|
||||
self._payload = None
|
||||
raise ex
|
||||
|
||||
if self._messages:
|
||||
messages = self._messages
|
||||
self._messages = []
|
||||
else:
|
||||
messages = ()
|
||||
|
||||
if self._upgraded:
|
||||
return messages, True, data[nb:]
|
||||
else:
|
||||
return messages, False, b''
|
||||
|
||||
def set_upgraded(self, val):
|
||||
self._upgraded = val
|
||||
|
||||
|
||||
cdef class HttpRequestParser(HttpParser):
|
||||
|
||||
def __init__(self, protocol, loop, int limit, timer=None,
|
||||
size_t max_line_size=8190, size_t max_headers=32768,
|
||||
size_t max_field_size=8190, payload_exception=None,
|
||||
bint response_with_body=True, bint read_until_eof=False,
|
||||
):
|
||||
self._init(cparser.HTTP_REQUEST, protocol, loop, limit, timer,
|
||||
max_line_size, max_headers, max_field_size,
|
||||
payload_exception, response_with_body, read_until_eof)
|
||||
|
||||
cdef object _on_status_complete(self):
|
||||
cdef Py_buffer py_buf
|
||||
if not self._buf:
|
||||
return
|
||||
self._path = self._buf.decode('utf-8', 'surrogateescape')
|
||||
if self._cparser.method == 5: # CONNECT
|
||||
self._url = URL(self._path)
|
||||
else:
|
||||
PyObject_GetBuffer(self._buf, &py_buf, PyBUF_SIMPLE)
|
||||
try:
|
||||
self._url = _parse_url(<char*>py_buf.buf,
|
||||
py_buf.len)
|
||||
finally:
|
||||
PyBuffer_Release(&py_buf)
|
||||
PyByteArray_Resize(self._buf, 0)
|
||||
|
||||
|
||||
cdef class HttpResponseParser(HttpParser):
|
||||
|
||||
def __init__(self, protocol, loop, int limit, timer=None,
|
||||
size_t max_line_size=8190, size_t max_headers=32768,
|
||||
size_t max_field_size=8190, payload_exception=None,
|
||||
bint response_with_body=True, bint read_until_eof=False,
|
||||
bint auto_decompress=True
|
||||
):
|
||||
self._init(cparser.HTTP_RESPONSE, protocol, loop, limit, timer,
|
||||
max_line_size, max_headers, max_field_size,
|
||||
payload_exception, response_with_body, read_until_eof,
|
||||
auto_decompress)
|
||||
|
||||
cdef object _on_status_complete(self):
|
||||
if self._buf:
|
||||
self._reason = self._buf.decode('utf-8', 'surrogateescape')
|
||||
PyByteArray_Resize(self._buf, 0)
|
||||
else:
|
||||
self._reason = self._reason or ''
|
||||
|
||||
cdef int cb_on_message_begin(cparser.http_parser* parser) except -1:
|
||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||
|
||||
pyparser._started = True
|
||||
pyparser._headers = CIMultiDict()
|
||||
pyparser._raw_headers = []
|
||||
PyByteArray_Resize(pyparser._buf, 0)
|
||||
pyparser._path = None
|
||||
pyparser._reason = None
|
||||
return 0
|
||||
|
||||
|
||||
cdef int cb_on_url(cparser.http_parser* parser,
|
||||
const char *at, size_t length) except -1:
|
||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||
try:
|
||||
if length > pyparser._max_line_size:
|
||||
raise LineTooLong(
|
||||
'Status line is too long', pyparser._max_line_size, length)
|
||||
extend(pyparser._buf, at, length)
|
||||
except BaseException as ex:
|
||||
pyparser._last_error = ex
|
||||
return -1
|
||||
else:
|
||||
return 0
|
||||
|
||||
|
||||
cdef int cb_on_status(cparser.http_parser* parser,
|
||||
const char *at, size_t length) except -1:
|
||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||
cdef str reason
|
||||
try:
|
||||
if length > pyparser._max_line_size:
|
||||
raise LineTooLong(
|
||||
'Status line is too long', pyparser._max_line_size, length)
|
||||
extend(pyparser._buf, at, length)
|
||||
except BaseException as ex:
|
||||
pyparser._last_error = ex
|
||||
return -1
|
||||
else:
|
||||
return 0
|
||||
|
||||
|
||||
cdef int cb_on_header_field(cparser.http_parser* parser,
|
||||
const char *at, size_t length) except -1:
|
||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||
cdef Py_ssize_t size
|
||||
try:
|
||||
pyparser._on_status_complete()
|
||||
size = len(pyparser._raw_name) + length
|
||||
if size > pyparser._max_field_size:
|
||||
raise LineTooLong(
|
||||
'Header name is too long', pyparser._max_field_size, size)
|
||||
pyparser._on_header_field(at, length)
|
||||
except BaseException as ex:
|
||||
pyparser._last_error = ex
|
||||
return -1
|
||||
else:
|
||||
return 0
|
||||
|
||||
|
||||
cdef int cb_on_header_value(cparser.http_parser* parser,
|
||||
const char *at, size_t length) except -1:
|
||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||
cdef Py_ssize_t size
|
||||
try:
|
||||
size = len(pyparser._raw_value) + length
|
||||
if size > pyparser._max_field_size:
|
||||
raise LineTooLong(
|
||||
'Header value is too long', pyparser._max_field_size, size)
|
||||
pyparser._on_header_value(at, length)
|
||||
except BaseException as ex:
|
||||
pyparser._last_error = ex
|
||||
return -1
|
||||
else:
|
||||
return 0
|
||||
|
||||
|
||||
cdef int cb_on_headers_complete(cparser.http_parser* parser) except -1:
|
||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||
try:
|
||||
pyparser._on_status_complete()
|
||||
pyparser._on_headers_complete()
|
||||
except BaseException as exc:
|
||||
pyparser._last_error = exc
|
||||
return -1
|
||||
else:
|
||||
if pyparser._cparser.upgrade or pyparser._cparser.method == 5: # CONNECT
|
||||
return 2
|
||||
else:
|
||||
return 0
|
||||
|
||||
|
||||
cdef int cb_on_body(cparser.http_parser* parser,
|
||||
const char *at, size_t length) except -1:
|
||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||
cdef bytes body = at[:length]
|
||||
try:
|
||||
pyparser._payload.feed_data(body, length)
|
||||
except BaseException as exc:
|
||||
if pyparser._payload_exception is not None:
|
||||
pyparser._payload.set_exception(pyparser._payload_exception(str(exc)))
|
||||
else:
|
||||
pyparser._payload.set_exception(exc)
|
||||
pyparser._payload_error = 1
|
||||
return -1
|
||||
else:
|
||||
return 0
|
||||
|
||||
|
||||
cdef int cb_on_message_complete(cparser.http_parser* parser) except -1:
|
||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||
try:
|
||||
pyparser._started = False
|
||||
pyparser._on_message_complete()
|
||||
except BaseException as exc:
|
||||
pyparser._last_error = exc
|
||||
return -1
|
||||
else:
|
||||
return 0
|
||||
|
||||
|
||||
cdef int cb_on_chunk_header(cparser.http_parser* parser) except -1:
|
||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||
try:
|
||||
pyparser._on_chunk_header()
|
||||
except BaseException as exc:
|
||||
pyparser._last_error = exc
|
||||
return -1
|
||||
else:
|
||||
return 0
|
||||
|
||||
|
||||
cdef int cb_on_chunk_complete(cparser.http_parser* parser) except -1:
|
||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||
try:
|
||||
pyparser._on_chunk_complete()
|
||||
except BaseException as exc:
|
||||
pyparser._last_error = exc
|
||||
return -1
|
||||
else:
|
||||
return 0
|
||||
|
||||
|
||||
cdef parser_error_from_errno(cparser.http_errno errno):
|
||||
cdef bytes desc = cparser.http_errno_description(errno)
|
||||
|
||||
if errno in (cparser.HPE_CB_message_begin,
|
||||
cparser.HPE_CB_url,
|
||||
cparser.HPE_CB_header_field,
|
||||
cparser.HPE_CB_header_value,
|
||||
cparser.HPE_CB_headers_complete,
|
||||
cparser.HPE_CB_body,
|
||||
cparser.HPE_CB_message_complete,
|
||||
cparser.HPE_CB_status,
|
||||
cparser.HPE_CB_chunk_header,
|
||||
cparser.HPE_CB_chunk_complete):
|
||||
cls = BadHttpMessage
|
||||
|
||||
elif errno == cparser.HPE_INVALID_STATUS:
|
||||
cls = BadStatusLine
|
||||
|
||||
elif errno == cparser.HPE_INVALID_METHOD:
|
||||
cls = BadStatusLine
|
||||
|
||||
elif errno == cparser.HPE_INVALID_URL:
|
||||
cls = InvalidURLError
|
||||
|
||||
else:
|
||||
cls = BadHttpMessage
|
||||
|
||||
return cls(desc.decode('latin-1'))
|
||||
|
||||
|
||||
def parse_url(url):
|
||||
cdef:
|
||||
Py_buffer py_buf
|
||||
char* buf_data
|
||||
|
||||
PyObject_GetBuffer(url, &py_buf, PyBUF_SIMPLE)
|
||||
try:
|
||||
buf_data = <char*>py_buf.buf
|
||||
return _parse_url(buf_data, py_buf.len)
|
||||
finally:
|
||||
PyBuffer_Release(&py_buf)
|
||||
|
||||
|
||||
cdef _parse_url(char* buf_data, size_t length):
|
||||
cdef:
|
||||
cparser.http_parser_url* parsed
|
||||
int res
|
||||
str schema = None
|
||||
str host = None
|
||||
object port = None
|
||||
str path = None
|
||||
str query = None
|
||||
str fragment = None
|
||||
str user = None
|
||||
str password = None
|
||||
str userinfo = None
|
||||
object result = None
|
||||
int off
|
||||
int ln
|
||||
|
||||
parsed = <cparser.http_parser_url*> \
|
||||
PyMem_Malloc(sizeof(cparser.http_parser_url))
|
||||
if parsed is NULL:
|
||||
raise MemoryError()
|
||||
cparser.http_parser_url_init(parsed)
|
||||
try:
|
||||
res = cparser.http_parser_parse_url(buf_data, length, 0, parsed)
|
||||
|
||||
if res == 0:
|
||||
if parsed.field_set & (1 << cparser.UF_SCHEMA):
|
||||
off = parsed.field_data[<int>cparser.UF_SCHEMA].off
|
||||
ln = parsed.field_data[<int>cparser.UF_SCHEMA].len
|
||||
schema = buf_data[off:off+ln].decode('utf-8', 'surrogateescape')
|
||||
else:
|
||||
schema = ''
|
||||
|
||||
if parsed.field_set & (1 << cparser.UF_HOST):
|
||||
off = parsed.field_data[<int>cparser.UF_HOST].off
|
||||
ln = parsed.field_data[<int>cparser.UF_HOST].len
|
||||
host = buf_data[off:off+ln].decode('utf-8', 'surrogateescape')
|
||||
else:
|
||||
host = ''
|
||||
|
||||
if parsed.field_set & (1 << cparser.UF_PORT):
|
||||
port = parsed.port
|
||||
|
||||
if parsed.field_set & (1 << cparser.UF_PATH):
|
||||
off = parsed.field_data[<int>cparser.UF_PATH].off
|
||||
ln = parsed.field_data[<int>cparser.UF_PATH].len
|
||||
path = buf_data[off:off+ln].decode('utf-8', 'surrogateescape')
|
||||
else:
|
||||
path = ''
|
||||
|
||||
if parsed.field_set & (1 << cparser.UF_QUERY):
|
||||
off = parsed.field_data[<int>cparser.UF_QUERY].off
|
||||
ln = parsed.field_data[<int>cparser.UF_QUERY].len
|
||||
query = buf_data[off:off+ln].decode('utf-8', 'surrogateescape')
|
||||
else:
|
||||
query = ''
|
||||
|
||||
if parsed.field_set & (1 << cparser.UF_FRAGMENT):
|
||||
off = parsed.field_data[<int>cparser.UF_FRAGMENT].off
|
||||
ln = parsed.field_data[<int>cparser.UF_FRAGMENT].len
|
||||
fragment = buf_data[off:off+ln].decode('utf-8', 'surrogateescape')
|
||||
else:
|
||||
fragment = ''
|
||||
|
||||
if parsed.field_set & (1 << cparser.UF_USERINFO):
|
||||
off = parsed.field_data[<int>cparser.UF_USERINFO].off
|
||||
ln = parsed.field_data[<int>cparser.UF_USERINFO].len
|
||||
userinfo = buf_data[off:off+ln].decode('utf-8', 'surrogateescape')
|
||||
|
||||
user, sep, password = userinfo.partition(':')
|
||||
|
||||
return URL_build(scheme=schema,
|
||||
user=user, password=password, host=host, port=port,
|
||||
path=path, query_string=query, fragment=fragment, encoded=True)
|
||||
else:
|
||||
raise InvalidURLError("invalid url {!r}".format(buf_data))
|
||||
finally:
|
||||
PyMem_Free(parsed)
|
||||
5840
dist/ba_data/python-site-packages/aiohttp/_http_writer.c
vendored
Normal file
5840
dist/ba_data/python-site-packages/aiohttp/_http_writer.c
vendored
Normal file
File diff suppressed because it is too large
Load diff
BIN
dist/ba_data/python-site-packages/aiohttp/_http_writer.cp39-win_amd64.pyd
vendored
Normal file
BIN
dist/ba_data/python-site-packages/aiohttp/_http_writer.cp39-win_amd64.pyd
vendored
Normal file
Binary file not shown.
151
dist/ba_data/python-site-packages/aiohttp/_http_writer.pyx
vendored
Normal file
151
dist/ba_data/python-site-packages/aiohttp/_http_writer.pyx
vendored
Normal file
|
|
@ -0,0 +1,151 @@
|
|||
from cpython.bytes cimport PyBytes_FromStringAndSize
|
||||
from cpython.exc cimport PyErr_NoMemory
|
||||
from cpython.mem cimport PyMem_Free, PyMem_Malloc, PyMem_Realloc
|
||||
from cpython.object cimport PyObject_Str
|
||||
from libc.stdint cimport uint8_t, uint64_t
|
||||
from libc.string cimport memcpy
|
||||
|
||||
from multidict import istr
|
||||
|
||||
DEF BUF_SIZE = 16 * 1024 # 16KiB
|
||||
cdef char BUFFER[BUF_SIZE]
|
||||
|
||||
cdef object _istr = istr
|
||||
|
||||
|
||||
# ----------------- writer ---------------------------
|
||||
|
||||
cdef struct Writer:
|
||||
char *buf
|
||||
Py_ssize_t size
|
||||
Py_ssize_t pos
|
||||
|
||||
|
||||
cdef inline void _init_writer(Writer* writer):
|
||||
writer.buf = &BUFFER[0]
|
||||
writer.size = BUF_SIZE
|
||||
writer.pos = 0
|
||||
|
||||
|
||||
cdef inline void _release_writer(Writer* writer):
|
||||
if writer.buf != BUFFER:
|
||||
PyMem_Free(writer.buf)
|
||||
|
||||
|
||||
cdef inline int _write_byte(Writer* writer, uint8_t ch):
|
||||
cdef char * buf
|
||||
cdef Py_ssize_t size
|
||||
|
||||
if writer.pos == writer.size:
|
||||
# reallocate
|
||||
size = writer.size + BUF_SIZE
|
||||
if writer.buf == BUFFER:
|
||||
buf = <char*>PyMem_Malloc(size)
|
||||
if buf == NULL:
|
||||
PyErr_NoMemory()
|
||||
return -1
|
||||
memcpy(buf, writer.buf, writer.size)
|
||||
else:
|
||||
buf = <char*>PyMem_Realloc(writer.buf, size)
|
||||
if buf == NULL:
|
||||
PyErr_NoMemory()
|
||||
return -1
|
||||
writer.buf = buf
|
||||
writer.size = size
|
||||
writer.buf[writer.pos] = <char>ch
|
||||
writer.pos += 1
|
||||
return 0
|
||||
|
||||
|
||||
cdef inline int _write_utf8(Writer* writer, Py_UCS4 symbol):
|
||||
cdef uint64_t utf = <uint64_t> symbol
|
||||
|
||||
if utf < 0x80:
|
||||
return _write_byte(writer, <uint8_t>utf)
|
||||
elif utf < 0x800:
|
||||
if _write_byte(writer, <uint8_t>(0xc0 | (utf >> 6))) < 0:
|
||||
return -1
|
||||
return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
|
||||
elif 0xD800 <= utf <= 0xDFFF:
|
||||
# surogate pair, ignored
|
||||
return 0
|
||||
elif utf < 0x10000:
|
||||
if _write_byte(writer, <uint8_t>(0xe0 | (utf >> 12))) < 0:
|
||||
return -1
|
||||
if _write_byte(writer, <uint8_t>(0x80 | ((utf >> 6) & 0x3f))) < 0:
|
||||
return -1
|
||||
return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
|
||||
elif utf > 0x10FFFF:
|
||||
# symbol is too large
|
||||
return 0
|
||||
else:
|
||||
if _write_byte(writer, <uint8_t>(0xf0 | (utf >> 18))) < 0:
|
||||
return -1
|
||||
if _write_byte(writer,
|
||||
<uint8_t>(0x80 | ((utf >> 12) & 0x3f))) < 0:
|
||||
return -1
|
||||
if _write_byte(writer,
|
||||
<uint8_t>(0x80 | ((utf >> 6) & 0x3f))) < 0:
|
||||
return -1
|
||||
return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
|
||||
|
||||
|
||||
cdef inline int _write_str(Writer* writer, str s):
|
||||
cdef Py_UCS4 ch
|
||||
for ch in s:
|
||||
if _write_utf8(writer, ch) < 0:
|
||||
return -1
|
||||
|
||||
|
||||
# --------------- _serialize_headers ----------------------
|
||||
|
||||
cdef str to_str(object s):
|
||||
typ = type(s)
|
||||
if typ is str:
|
||||
return <str>s
|
||||
elif typ is _istr:
|
||||
return PyObject_Str(s)
|
||||
elif not isinstance(s, str):
|
||||
raise TypeError("Cannot serialize non-str key {!r}".format(s))
|
||||
else:
|
||||
return str(s)
|
||||
|
||||
|
||||
def _serialize_headers(str status_line, headers):
|
||||
cdef Writer writer
|
||||
cdef object key
|
||||
cdef object val
|
||||
cdef bytes ret
|
||||
|
||||
_init_writer(&writer)
|
||||
|
||||
try:
|
||||
if _write_str(&writer, status_line) < 0:
|
||||
raise
|
||||
if _write_byte(&writer, b'\r') < 0:
|
||||
raise
|
||||
if _write_byte(&writer, b'\n') < 0:
|
||||
raise
|
||||
|
||||
for key, val in headers.items():
|
||||
if _write_str(&writer, to_str(key)) < 0:
|
||||
raise
|
||||
if _write_byte(&writer, b':') < 0:
|
||||
raise
|
||||
if _write_byte(&writer, b' ') < 0:
|
||||
raise
|
||||
if _write_str(&writer, to_str(val)) < 0:
|
||||
raise
|
||||
if _write_byte(&writer, b'\r') < 0:
|
||||
raise
|
||||
if _write_byte(&writer, b'\n') < 0:
|
||||
raise
|
||||
|
||||
if _write_byte(&writer, b'\r') < 0:
|
||||
raise
|
||||
if _write_byte(&writer, b'\n') < 0:
|
||||
raise
|
||||
|
||||
return PyBytes_FromStringAndSize(writer.buf, writer.pos)
|
||||
finally:
|
||||
_release_writer(&writer)
|
||||
3588
dist/ba_data/python-site-packages/aiohttp/_websocket.c
vendored
Normal file
3588
dist/ba_data/python-site-packages/aiohttp/_websocket.c
vendored
Normal file
File diff suppressed because it is too large
Load diff
BIN
dist/ba_data/python-site-packages/aiohttp/_websocket.cp39-win_amd64.pyd
vendored
Normal file
BIN
dist/ba_data/python-site-packages/aiohttp/_websocket.cp39-win_amd64.pyd
vendored
Normal file
Binary file not shown.
56
dist/ba_data/python-site-packages/aiohttp/_websocket.pyx
vendored
Normal file
56
dist/ba_data/python-site-packages/aiohttp/_websocket.pyx
vendored
Normal file
|
|
@ -0,0 +1,56 @@
|
|||
from cpython cimport PyBytes_AsString
|
||||
|
||||
|
||||
#from cpython cimport PyByteArray_AsString # cython still not exports that
|
||||
cdef extern from "Python.h":
|
||||
char* PyByteArray_AsString(bytearray ba) except NULL
|
||||
|
||||
from libc.stdint cimport uint32_t, uint64_t, uintmax_t
|
||||
|
||||
|
||||
def _websocket_mask_cython(object mask, object data):
|
||||
"""Note, this function mutates its `data` argument
|
||||
"""
|
||||
cdef:
|
||||
Py_ssize_t data_len, i
|
||||
# bit operations on signed integers are implementation-specific
|
||||
unsigned char * in_buf
|
||||
const unsigned char * mask_buf
|
||||
uint32_t uint32_msk
|
||||
uint64_t uint64_msk
|
||||
|
||||
assert len(mask) == 4
|
||||
|
||||
if not isinstance(mask, bytes):
|
||||
mask = bytes(mask)
|
||||
|
||||
if isinstance(data, bytearray):
|
||||
data = <bytearray>data
|
||||
else:
|
||||
data = bytearray(data)
|
||||
|
||||
data_len = len(data)
|
||||
in_buf = <unsigned char*>PyByteArray_AsString(data)
|
||||
mask_buf = <const unsigned char*>PyBytes_AsString(mask)
|
||||
uint32_msk = (<uint32_t*>mask_buf)[0]
|
||||
|
||||
# TODO: align in_data ptr to achieve even faster speeds
|
||||
# does it need in python ?! malloc() always aligns to sizeof(long) bytes
|
||||
|
||||
if sizeof(size_t) >= 8:
|
||||
uint64_msk = uint32_msk
|
||||
uint64_msk = (uint64_msk << 32) | uint32_msk
|
||||
|
||||
while data_len >= 8:
|
||||
(<uint64_t*>in_buf)[0] ^= uint64_msk
|
||||
in_buf += 8
|
||||
data_len -= 8
|
||||
|
||||
|
||||
while data_len >= 4:
|
||||
(<uint32_t*>in_buf)[0] ^= uint32_msk
|
||||
in_buf += 4
|
||||
data_len -= 4
|
||||
|
||||
for i in range(0, data_len):
|
||||
in_buf[i] ^= mask_buf[i]
|
||||
200
dist/ba_data/python-site-packages/aiohttp/abc.py
vendored
Normal file
200
dist/ba_data/python-site-packages/aiohttp/abc.py
vendored
Normal file
|
|
@ -0,0 +1,200 @@
|
|||
import asyncio
|
||||
import logging
|
||||
from abc import ABC, abstractmethod
|
||||
from collections.abc import Sized
|
||||
from http.cookies import BaseCookie, Morsel
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
Awaitable,
|
||||
Callable,
|
||||
Dict,
|
||||
Generator,
|
||||
Iterable,
|
||||
List,
|
||||
Optional,
|
||||
Tuple,
|
||||
)
|
||||
|
||||
from multidict import CIMultiDict
|
||||
from yarl import URL
|
||||
|
||||
from .helpers import get_running_loop
|
||||
from .typedefs import LooseCookies
|
||||
|
||||
if TYPE_CHECKING: # pragma: no cover
|
||||
from .web_app import Application
|
||||
from .web_exceptions import HTTPException
|
||||
from .web_request import BaseRequest, Request
|
||||
from .web_response import StreamResponse
|
||||
else:
|
||||
BaseRequest = Request = Application = StreamResponse = None
|
||||
HTTPException = None
|
||||
|
||||
|
||||
class AbstractRouter(ABC):
|
||||
def __init__(self) -> None:
|
||||
self._frozen = False
|
||||
|
||||
def post_init(self, app: Application) -> None:
|
||||
"""Post init stage.
|
||||
|
||||
Not an abstract method for sake of backward compatibility,
|
||||
but if the router wants to be aware of the application
|
||||
it can override this.
|
||||
"""
|
||||
|
||||
@property
|
||||
def frozen(self) -> bool:
|
||||
return self._frozen
|
||||
|
||||
def freeze(self) -> None:
|
||||
"""Freeze router."""
|
||||
self._frozen = True
|
||||
|
||||
@abstractmethod
|
||||
async def resolve(self, request: Request) -> "AbstractMatchInfo":
|
||||
"""Return MATCH_INFO for given request"""
|
||||
|
||||
|
||||
class AbstractMatchInfo(ABC):
|
||||
@property # pragma: no branch
|
||||
@abstractmethod
|
||||
def handler(self) -> Callable[[Request], Awaitable[StreamResponse]]:
|
||||
"""Execute matched request handler"""
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def expect_handler(self) -> Callable[[Request], Awaitable[None]]:
|
||||
"""Expect handler for 100-continue processing"""
|
||||
|
||||
@property # pragma: no branch
|
||||
@abstractmethod
|
||||
def http_exception(self) -> Optional[HTTPException]:
|
||||
"""HTTPException instance raised on router's resolving, or None"""
|
||||
|
||||
@abstractmethod # pragma: no branch
|
||||
def get_info(self) -> Dict[str, Any]:
|
||||
"""Return a dict with additional info useful for introspection"""
|
||||
|
||||
@property # pragma: no branch
|
||||
@abstractmethod
|
||||
def apps(self) -> Tuple[Application, ...]:
|
||||
"""Stack of nested applications.
|
||||
|
||||
Top level application is left-most element.
|
||||
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def add_app(self, app: Application) -> None:
|
||||
"""Add application to the nested apps stack."""
|
||||
|
||||
@abstractmethod
|
||||
def freeze(self) -> None:
|
||||
"""Freeze the match info.
|
||||
|
||||
The method is called after route resolution.
|
||||
|
||||
After the call .add_app() is forbidden.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class AbstractView(ABC):
|
||||
"""Abstract class based view."""
|
||||
|
||||
def __init__(self, request: Request) -> None:
|
||||
self._request = request
|
||||
|
||||
@property
|
||||
def request(self) -> Request:
|
||||
"""Request instance."""
|
||||
return self._request
|
||||
|
||||
@abstractmethod
|
||||
def __await__(self) -> Generator[Any, None, StreamResponse]:
|
||||
"""Execute the view handler."""
|
||||
|
||||
|
||||
class AbstractResolver(ABC):
|
||||
"""Abstract DNS resolver."""
|
||||
|
||||
@abstractmethod
|
||||
async def resolve(self, host: str, port: int, family: int) -> List[Dict[str, Any]]:
|
||||
"""Return IP address for given hostname"""
|
||||
|
||||
@abstractmethod
|
||||
async def close(self) -> None:
|
||||
"""Release resolver"""
|
||||
|
||||
|
||||
if TYPE_CHECKING: # pragma: no cover
|
||||
IterableBase = Iterable[Morsel[str]]
|
||||
else:
|
||||
IterableBase = Iterable
|
||||
|
||||
|
||||
class AbstractCookieJar(Sized, IterableBase):
|
||||
"""Abstract Cookie Jar."""
|
||||
|
||||
def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
|
||||
self._loop = get_running_loop(loop)
|
||||
|
||||
@abstractmethod
|
||||
def clear(self) -> None:
|
||||
"""Clear all cookies."""
|
||||
|
||||
@abstractmethod
|
||||
def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
|
||||
"""Update cookies."""
|
||||
|
||||
@abstractmethod
|
||||
def filter_cookies(self, request_url: URL) -> "BaseCookie[str]":
|
||||
"""Return the jar's cookies filtered by their attributes."""
|
||||
|
||||
|
||||
class AbstractStreamWriter(ABC):
|
||||
"""Abstract stream writer."""
|
||||
|
||||
buffer_size = 0
|
||||
output_size = 0
|
||||
length = 0 # type: Optional[int]
|
||||
|
||||
@abstractmethod
|
||||
async def write(self, chunk: bytes) -> None:
|
||||
"""Write chunk into stream."""
|
||||
|
||||
@abstractmethod
|
||||
async def write_eof(self, chunk: bytes = b"") -> None:
|
||||
"""Write last chunk."""
|
||||
|
||||
@abstractmethod
|
||||
async def drain(self) -> None:
|
||||
"""Flush the write buffer."""
|
||||
|
||||
@abstractmethod
|
||||
def enable_compression(self, encoding: str = "deflate") -> None:
|
||||
"""Enable HTTP body compression"""
|
||||
|
||||
@abstractmethod
|
||||
def enable_chunking(self) -> None:
|
||||
"""Enable HTTP chunked mode"""
|
||||
|
||||
@abstractmethod
|
||||
async def write_headers(
|
||||
self, status_line: str, headers: "CIMultiDict[str]"
|
||||
) -> None:
|
||||
"""Write HTTP headers"""
|
||||
|
||||
|
||||
class AbstractAccessLogger(ABC):
|
||||
"""Abstract writer to access log."""
|
||||
|
||||
def __init__(self, logger: logging.Logger, log_format: str) -> None:
|
||||
self.logger = logger
|
||||
self.log_format = log_format
|
||||
|
||||
@abstractmethod
|
||||
def log(self, request: BaseRequest, response: StreamResponse, time: float) -> None:
|
||||
"""Emit log to logger."""
|
||||
87
dist/ba_data/python-site-packages/aiohttp/base_protocol.py
vendored
Normal file
87
dist/ba_data/python-site-packages/aiohttp/base_protocol.py
vendored
Normal file
|
|
@ -0,0 +1,87 @@
|
|||
import asyncio
|
||||
from typing import Optional, cast
|
||||
|
||||
from .tcp_helpers import tcp_nodelay
|
||||
|
||||
|
||||
class BaseProtocol(asyncio.Protocol):
|
||||
__slots__ = (
|
||||
"_loop",
|
||||
"_paused",
|
||||
"_drain_waiter",
|
||||
"_connection_lost",
|
||||
"_reading_paused",
|
||||
"transport",
|
||||
)
|
||||
|
||||
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
|
||||
self._loop = loop # type: asyncio.AbstractEventLoop
|
||||
self._paused = False
|
||||
self._drain_waiter = None # type: Optional[asyncio.Future[None]]
|
||||
self._connection_lost = False
|
||||
self._reading_paused = False
|
||||
|
||||
self.transport = None # type: Optional[asyncio.Transport]
|
||||
|
||||
def pause_writing(self) -> None:
|
||||
assert not self._paused
|
||||
self._paused = True
|
||||
|
||||
def resume_writing(self) -> None:
|
||||
assert self._paused
|
||||
self._paused = False
|
||||
|
||||
waiter = self._drain_waiter
|
||||
if waiter is not None:
|
||||
self._drain_waiter = None
|
||||
if not waiter.done():
|
||||
waiter.set_result(None)
|
||||
|
||||
def pause_reading(self) -> None:
|
||||
if not self._reading_paused and self.transport is not None:
|
||||
try:
|
||||
self.transport.pause_reading()
|
||||
except (AttributeError, NotImplementedError, RuntimeError):
|
||||
pass
|
||||
self._reading_paused = True
|
||||
|
||||
def resume_reading(self) -> None:
|
||||
if self._reading_paused and self.transport is not None:
|
||||
try:
|
||||
self.transport.resume_reading()
|
||||
except (AttributeError, NotImplementedError, RuntimeError):
|
||||
pass
|
||||
self._reading_paused = False
|
||||
|
||||
def connection_made(self, transport: asyncio.BaseTransport) -> None:
|
||||
tr = cast(asyncio.Transport, transport)
|
||||
tcp_nodelay(tr, True)
|
||||
self.transport = tr
|
||||
|
||||
def connection_lost(self, exc: Optional[BaseException]) -> None:
|
||||
self._connection_lost = True
|
||||
# Wake up the writer if currently paused.
|
||||
self.transport = None
|
||||
if not self._paused:
|
||||
return
|
||||
waiter = self._drain_waiter
|
||||
if waiter is None:
|
||||
return
|
||||
self._drain_waiter = None
|
||||
if waiter.done():
|
||||
return
|
||||
if exc is None:
|
||||
waiter.set_result(None)
|
||||
else:
|
||||
waiter.set_exception(exc)
|
||||
|
||||
async def _drain_helper(self) -> None:
|
||||
if self._connection_lost:
|
||||
raise ConnectionResetError("Connection lost")
|
||||
if not self._paused:
|
||||
return
|
||||
waiter = self._drain_waiter
|
||||
assert waiter is None or waiter.cancelled()
|
||||
waiter = self._loop.create_future()
|
||||
self._drain_waiter = waiter
|
||||
await waiter
|
||||
1275
dist/ba_data/python-site-packages/aiohttp/client.py
vendored
Normal file
1275
dist/ba_data/python-site-packages/aiohttp/client.py
vendored
Normal file
File diff suppressed because it is too large
Load diff
317
dist/ba_data/python-site-packages/aiohttp/client_exceptions.py
vendored
Normal file
317
dist/ba_data/python-site-packages/aiohttp/client_exceptions.py
vendored
Normal file
|
|
@ -0,0 +1,317 @@
|
|||
"""HTTP related errors."""
|
||||
|
||||
import asyncio
|
||||
import warnings
|
||||
from typing import TYPE_CHECKING, Any, Optional, Tuple, Union
|
||||
|
||||
from .typedefs import LooseHeaders
|
||||
|
||||
try:
|
||||
import ssl
|
||||
|
||||
SSLContext = ssl.SSLContext
|
||||
except ImportError: # pragma: no cover
|
||||
ssl = SSLContext = None # type: ignore
|
||||
|
||||
|
||||
if TYPE_CHECKING: # pragma: no cover
|
||||
from .client_reqrep import ClientResponse, ConnectionKey, Fingerprint, RequestInfo
|
||||
else:
|
||||
RequestInfo = ClientResponse = ConnectionKey = None
|
||||
|
||||
__all__ = (
|
||||
"ClientError",
|
||||
"ClientConnectionError",
|
||||
"ClientOSError",
|
||||
"ClientConnectorError",
|
||||
"ClientProxyConnectionError",
|
||||
"ClientSSLError",
|
||||
"ClientConnectorSSLError",
|
||||
"ClientConnectorCertificateError",
|
||||
"ServerConnectionError",
|
||||
"ServerTimeoutError",
|
||||
"ServerDisconnectedError",
|
||||
"ServerFingerprintMismatch",
|
||||
"ClientResponseError",
|
||||
"ClientHttpProxyError",
|
||||
"WSServerHandshakeError",
|
||||
"ContentTypeError",
|
||||
"ClientPayloadError",
|
||||
"InvalidURL",
|
||||
)
|
||||
|
||||
|
||||
class ClientError(Exception):
|
||||
"""Base class for client connection errors."""
|
||||
|
||||
|
||||
class ClientResponseError(ClientError):
|
||||
"""Connection error during reading response.
|
||||
|
||||
request_info: instance of RequestInfo
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
request_info: RequestInfo,
|
||||
history: Tuple[ClientResponse, ...],
|
||||
*,
|
||||
code: Optional[int] = None,
|
||||
status: Optional[int] = None,
|
||||
message: str = "",
|
||||
headers: Optional[LooseHeaders] = None,
|
||||
) -> None:
|
||||
self.request_info = request_info
|
||||
if code is not None:
|
||||
if status is not None:
|
||||
raise ValueError(
|
||||
"Both code and status arguments are provided; "
|
||||
"code is deprecated, use status instead"
|
||||
)
|
||||
warnings.warn(
|
||||
"code argument is deprecated, use status instead",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
if status is not None:
|
||||
self.status = status
|
||||
elif code is not None:
|
||||
self.status = code
|
||||
else:
|
||||
self.status = 0
|
||||
self.message = message
|
||||
self.headers = headers
|
||||
self.history = history
|
||||
self.args = (request_info, history)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return "{}, message={!r}, url={!r}".format(
|
||||
self.status,
|
||||
self.message,
|
||||
self.request_info.real_url,
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
args = f"{self.request_info!r}, {self.history!r}"
|
||||
if self.status != 0:
|
||||
args += f", status={self.status!r}"
|
||||
if self.message != "":
|
||||
args += f", message={self.message!r}"
|
||||
if self.headers is not None:
|
||||
args += f", headers={self.headers!r}"
|
||||
return "{}({})".format(type(self).__name__, args)
|
||||
|
||||
@property
|
||||
def code(self) -> int:
|
||||
warnings.warn(
|
||||
"code property is deprecated, use status instead",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return self.status
|
||||
|
||||
@code.setter
|
||||
def code(self, value: int) -> None:
|
||||
warnings.warn(
|
||||
"code property is deprecated, use status instead",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
self.status = value
|
||||
|
||||
|
||||
class ContentTypeError(ClientResponseError):
|
||||
"""ContentType found is not valid."""
|
||||
|
||||
|
||||
class WSServerHandshakeError(ClientResponseError):
|
||||
"""websocket server handshake error."""
|
||||
|
||||
|
||||
class ClientHttpProxyError(ClientResponseError):
|
||||
"""HTTP proxy error.
|
||||
|
||||
Raised in :class:`aiohttp.connector.TCPConnector` if
|
||||
proxy responds with status other than ``200 OK``
|
||||
on ``CONNECT`` request.
|
||||
"""
|
||||
|
||||
|
||||
class TooManyRedirects(ClientResponseError):
|
||||
"""Client was redirected too many times."""
|
||||
|
||||
|
||||
class ClientConnectionError(ClientError):
|
||||
"""Base class for client socket errors."""
|
||||
|
||||
|
||||
class ClientOSError(ClientConnectionError, OSError):
|
||||
"""OSError error."""
|
||||
|
||||
|
||||
class ClientConnectorError(ClientOSError):
|
||||
"""Client connector error.
|
||||
|
||||
Raised in :class:`aiohttp.connector.TCPConnector` if
|
||||
connection to proxy can not be established.
|
||||
"""
|
||||
|
||||
def __init__(self, connection_key: ConnectionKey, os_error: OSError) -> None:
|
||||
self._conn_key = connection_key
|
||||
self._os_error = os_error
|
||||
super().__init__(os_error.errno, os_error.strerror)
|
||||
self.args = (connection_key, os_error)
|
||||
|
||||
@property
|
||||
def os_error(self) -> OSError:
|
||||
return self._os_error
|
||||
|
||||
@property
|
||||
def host(self) -> str:
|
||||
return self._conn_key.host
|
||||
|
||||
@property
|
||||
def port(self) -> Optional[int]:
|
||||
return self._conn_key.port
|
||||
|
||||
@property
|
||||
def ssl(self) -> Union[SSLContext, None, bool, "Fingerprint"]:
|
||||
return self._conn_key.ssl
|
||||
|
||||
def __str__(self) -> str:
|
||||
return "Cannot connect to host {0.host}:{0.port} ssl:{1} [{2}]".format(
|
||||
self, self.ssl if self.ssl is not None else "default", self.strerror
|
||||
)
|
||||
|
||||
# OSError.__reduce__ does too much black magick
|
||||
__reduce__ = BaseException.__reduce__
|
||||
|
||||
|
||||
class ClientProxyConnectionError(ClientConnectorError):
|
||||
"""Proxy connection error.
|
||||
|
||||
Raised in :class:`aiohttp.connector.TCPConnector` if
|
||||
connection to proxy can not be established.
|
||||
"""
|
||||
|
||||
|
||||
class ServerConnectionError(ClientConnectionError):
|
||||
"""Server connection errors."""
|
||||
|
||||
|
||||
class ServerDisconnectedError(ServerConnectionError):
|
||||
"""Server disconnected."""
|
||||
|
||||
def __init__(self, message: Optional[str] = None) -> None:
|
||||
if message is None:
|
||||
message = "Server disconnected"
|
||||
|
||||
self.args = (message,)
|
||||
self.message = message
|
||||
|
||||
|
||||
class ServerTimeoutError(ServerConnectionError, asyncio.TimeoutError):
|
||||
"""Server timeout error."""
|
||||
|
||||
|
||||
class ServerFingerprintMismatch(ServerConnectionError):
|
||||
"""SSL certificate does not match expected fingerprint."""
|
||||
|
||||
def __init__(self, expected: bytes, got: bytes, host: str, port: int) -> None:
|
||||
self.expected = expected
|
||||
self.got = got
|
||||
self.host = host
|
||||
self.port = port
|
||||
self.args = (expected, got, host, port)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return "<{} expected={!r} got={!r} host={!r} port={!r}>".format(
|
||||
self.__class__.__name__, self.expected, self.got, self.host, self.port
|
||||
)
|
||||
|
||||
|
||||
class ClientPayloadError(ClientError):
|
||||
"""Response payload error."""
|
||||
|
||||
|
||||
class InvalidURL(ClientError, ValueError):
|
||||
"""Invalid URL.
|
||||
|
||||
URL used for fetching is malformed, e.g. it doesn't contains host
|
||||
part."""
|
||||
|
||||
# Derive from ValueError for backward compatibility
|
||||
|
||||
def __init__(self, url: Any) -> None:
|
||||
# The type of url is not yarl.URL because the exception can be raised
|
||||
# on URL(url) call
|
||||
super().__init__(url)
|
||||
|
||||
@property
|
||||
def url(self) -> Any:
|
||||
return self.args[0]
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<{self.__class__.__name__} {self.url}>"
|
||||
|
||||
|
||||
class ClientSSLError(ClientConnectorError):
|
||||
"""Base error for ssl.*Errors."""
|
||||
|
||||
|
||||
if ssl is not None:
|
||||
cert_errors = (ssl.CertificateError,)
|
||||
cert_errors_bases = (
|
||||
ClientSSLError,
|
||||
ssl.CertificateError,
|
||||
)
|
||||
|
||||
ssl_errors = (ssl.SSLError,)
|
||||
ssl_error_bases = (ClientSSLError, ssl.SSLError)
|
||||
else: # pragma: no cover
|
||||
cert_errors = tuple()
|
||||
cert_errors_bases = (
|
||||
ClientSSLError,
|
||||
ValueError,
|
||||
)
|
||||
|
||||
ssl_errors = tuple()
|
||||
ssl_error_bases = (ClientSSLError,)
|
||||
|
||||
|
||||
class ClientConnectorSSLError(*ssl_error_bases): # type: ignore
|
||||
"""Response ssl error."""
|
||||
|
||||
|
||||
class ClientConnectorCertificateError(*cert_errors_bases): # type: ignore
|
||||
"""Response certificate error."""
|
||||
|
||||
def __init__(
|
||||
self, connection_key: ConnectionKey, certificate_error: Exception
|
||||
) -> None:
|
||||
self._conn_key = connection_key
|
||||
self._certificate_error = certificate_error
|
||||
self.args = (connection_key, certificate_error)
|
||||
|
||||
@property
|
||||
def certificate_error(self) -> Exception:
|
||||
return self._certificate_error
|
||||
|
||||
@property
|
||||
def host(self) -> str:
|
||||
return self._conn_key.host
|
||||
|
||||
@property
|
||||
def port(self) -> Optional[int]:
|
||||
return self._conn_key.port
|
||||
|
||||
@property
|
||||
def ssl(self) -> bool:
|
||||
return self._conn_key.is_ssl
|
||||
|
||||
def __str__(self) -> str:
|
||||
return (
|
||||
"Cannot connect to host {0.host}:{0.port} ssl:{0.ssl} "
|
||||
"[{0.certificate_error.__class__.__name__}: "
|
||||
"{0.certificate_error.args}]".format(self)
|
||||
)
|
||||
251
dist/ba_data/python-site-packages/aiohttp/client_proto.py
vendored
Normal file
251
dist/ba_data/python-site-packages/aiohttp/client_proto.py
vendored
Normal file
|
|
@ -0,0 +1,251 @@
|
|||
import asyncio
|
||||
from contextlib import suppress
|
||||
from typing import Any, Optional, Tuple
|
||||
|
||||
from .base_protocol import BaseProtocol
|
||||
from .client_exceptions import (
|
||||
ClientOSError,
|
||||
ClientPayloadError,
|
||||
ServerDisconnectedError,
|
||||
ServerTimeoutError,
|
||||
)
|
||||
from .helpers import BaseTimerContext
|
||||
from .http import HttpResponseParser, RawResponseMessage
|
||||
from .streams import EMPTY_PAYLOAD, DataQueue, StreamReader
|
||||
|
||||
|
||||
class ResponseHandler(BaseProtocol, DataQueue[Tuple[RawResponseMessage, StreamReader]]):
|
||||
"""Helper class to adapt between Protocol and StreamReader."""
|
||||
|
||||
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
|
||||
BaseProtocol.__init__(self, loop=loop)
|
||||
DataQueue.__init__(self, loop)
|
||||
|
||||
self._should_close = False
|
||||
|
||||
self._payload = None
|
||||
self._skip_payload = False
|
||||
self._payload_parser = None
|
||||
|
||||
self._timer = None
|
||||
|
||||
self._tail = b""
|
||||
self._upgraded = False
|
||||
self._parser = None # type: Optional[HttpResponseParser]
|
||||
|
||||
self._read_timeout = None # type: Optional[float]
|
||||
self._read_timeout_handle = None # type: Optional[asyncio.TimerHandle]
|
||||
|
||||
@property
|
||||
def upgraded(self) -> bool:
|
||||
return self._upgraded
|
||||
|
||||
@property
|
||||
def should_close(self) -> bool:
|
||||
if self._payload is not None and not self._payload.is_eof() or self._upgraded:
|
||||
return True
|
||||
|
||||
return (
|
||||
self._should_close
|
||||
or self._upgraded
|
||||
or self.exception() is not None
|
||||
or self._payload_parser is not None
|
||||
or len(self) > 0
|
||||
or bool(self._tail)
|
||||
)
|
||||
|
||||
def force_close(self) -> None:
|
||||
self._should_close = True
|
||||
|
||||
def close(self) -> None:
|
||||
transport = self.transport
|
||||
if transport is not None:
|
||||
transport.close()
|
||||
self.transport = None
|
||||
self._payload = None
|
||||
self._drop_timeout()
|
||||
|
||||
def is_connected(self) -> bool:
|
||||
return self.transport is not None and not self.transport.is_closing()
|
||||
|
||||
def connection_lost(self, exc: Optional[BaseException]) -> None:
|
||||
self._drop_timeout()
|
||||
|
||||
if self._payload_parser is not None:
|
||||
with suppress(Exception):
|
||||
self._payload_parser.feed_eof()
|
||||
|
||||
uncompleted = None
|
||||
if self._parser is not None:
|
||||
try:
|
||||
uncompleted = self._parser.feed_eof()
|
||||
except Exception:
|
||||
if self._payload is not None:
|
||||
self._payload.set_exception(
|
||||
ClientPayloadError("Response payload is not completed")
|
||||
)
|
||||
|
||||
if not self.is_eof():
|
||||
if isinstance(exc, OSError):
|
||||
exc = ClientOSError(*exc.args)
|
||||
if exc is None:
|
||||
exc = ServerDisconnectedError(uncompleted)
|
||||
# assigns self._should_close to True as side effect,
|
||||
# we do it anyway below
|
||||
self.set_exception(exc)
|
||||
|
||||
self._should_close = True
|
||||
self._parser = None
|
||||
self._payload = None
|
||||
self._payload_parser = None
|
||||
self._reading_paused = False
|
||||
|
||||
super().connection_lost(exc)
|
||||
|
||||
def eof_received(self) -> None:
|
||||
# should call parser.feed_eof() most likely
|
||||
self._drop_timeout()
|
||||
|
||||
def pause_reading(self) -> None:
|
||||
super().pause_reading()
|
||||
self._drop_timeout()
|
||||
|
||||
def resume_reading(self) -> None:
|
||||
super().resume_reading()
|
||||
self._reschedule_timeout()
|
||||
|
||||
def set_exception(self, exc: BaseException) -> None:
|
||||
self._should_close = True
|
||||
self._drop_timeout()
|
||||
super().set_exception(exc)
|
||||
|
||||
def set_parser(self, parser: Any, payload: Any) -> None:
|
||||
# TODO: actual types are:
|
||||
# parser: WebSocketReader
|
||||
# payload: FlowControlDataQueue
|
||||
# but they are not generi enough
|
||||
# Need an ABC for both types
|
||||
self._payload = payload
|
||||
self._payload_parser = parser
|
||||
|
||||
self._drop_timeout()
|
||||
|
||||
if self._tail:
|
||||
data, self._tail = self._tail, b""
|
||||
self.data_received(data)
|
||||
|
||||
def set_response_params(
|
||||
self,
|
||||
*,
|
||||
timer: Optional[BaseTimerContext] = None,
|
||||
skip_payload: bool = False,
|
||||
read_until_eof: bool = False,
|
||||
auto_decompress: bool = True,
|
||||
read_timeout: Optional[float] = None,
|
||||
read_bufsize: int = 2 ** 16
|
||||
) -> None:
|
||||
self._skip_payload = skip_payload
|
||||
|
||||
self._read_timeout = read_timeout
|
||||
self._reschedule_timeout()
|
||||
|
||||
self._parser = HttpResponseParser(
|
||||
self,
|
||||
self._loop,
|
||||
read_bufsize,
|
||||
timer=timer,
|
||||
payload_exception=ClientPayloadError,
|
||||
response_with_body=not skip_payload,
|
||||
read_until_eof=read_until_eof,
|
||||
auto_decompress=auto_decompress,
|
||||
)
|
||||
|
||||
if self._tail:
|
||||
data, self._tail = self._tail, b""
|
||||
self.data_received(data)
|
||||
|
||||
def _drop_timeout(self) -> None:
|
||||
if self._read_timeout_handle is not None:
|
||||
self._read_timeout_handle.cancel()
|
||||
self._read_timeout_handle = None
|
||||
|
||||
def _reschedule_timeout(self) -> None:
|
||||
timeout = self._read_timeout
|
||||
if self._read_timeout_handle is not None:
|
||||
self._read_timeout_handle.cancel()
|
||||
|
||||
if timeout:
|
||||
self._read_timeout_handle = self._loop.call_later(
|
||||
timeout, self._on_read_timeout
|
||||
)
|
||||
else:
|
||||
self._read_timeout_handle = None
|
||||
|
||||
def _on_read_timeout(self) -> None:
|
||||
exc = ServerTimeoutError("Timeout on reading data from socket")
|
||||
self.set_exception(exc)
|
||||
if self._payload is not None:
|
||||
self._payload.set_exception(exc)
|
||||
|
||||
def data_received(self, data: bytes) -> None:
|
||||
self._reschedule_timeout()
|
||||
|
||||
if not data:
|
||||
return
|
||||
|
||||
# custom payload parser
|
||||
if self._payload_parser is not None:
|
||||
eof, tail = self._payload_parser.feed_data(data)
|
||||
if eof:
|
||||
self._payload = None
|
||||
self._payload_parser = None
|
||||
|
||||
if tail:
|
||||
self.data_received(tail)
|
||||
return
|
||||
else:
|
||||
if self._upgraded or self._parser is None:
|
||||
# i.e. websocket connection, websocket parser is not set yet
|
||||
self._tail += data
|
||||
else:
|
||||
# parse http messages
|
||||
try:
|
||||
messages, upgraded, tail = self._parser.feed_data(data)
|
||||
except BaseException as exc:
|
||||
if self.transport is not None:
|
||||
# connection.release() could be called BEFORE
|
||||
# data_received(), the transport is already
|
||||
# closed in this case
|
||||
self.transport.close()
|
||||
# should_close is True after the call
|
||||
self.set_exception(exc)
|
||||
return
|
||||
|
||||
self._upgraded = upgraded
|
||||
|
||||
payload = None
|
||||
for message, payload in messages:
|
||||
if message.should_close:
|
||||
self._should_close = True
|
||||
|
||||
self._payload = payload
|
||||
|
||||
if self._skip_payload or message.code in (204, 304):
|
||||
self.feed_data((message, EMPTY_PAYLOAD), 0) # type: ignore
|
||||
else:
|
||||
self.feed_data((message, payload), 0)
|
||||
if payload is not None:
|
||||
# new message(s) was processed
|
||||
# register timeout handler unsubscribing
|
||||
# either on end-of-stream or immediately for
|
||||
# EMPTY_PAYLOAD
|
||||
if payload is not EMPTY_PAYLOAD:
|
||||
payload.on_eof(self._drop_timeout)
|
||||
else:
|
||||
self._drop_timeout()
|
||||
|
||||
if tail:
|
||||
if upgraded:
|
||||
self.data_received(tail)
|
||||
else:
|
||||
self._tail = tail
|
||||
1127
dist/ba_data/python-site-packages/aiohttp/client_reqrep.py
vendored
Normal file
1127
dist/ba_data/python-site-packages/aiohttp/client_reqrep.py
vendored
Normal file
File diff suppressed because it is too large
Load diff
301
dist/ba_data/python-site-packages/aiohttp/client_ws.py
vendored
Normal file
301
dist/ba_data/python-site-packages/aiohttp/client_ws.py
vendored
Normal file
|
|
@ -0,0 +1,301 @@
|
|||
"""WebSocket client for asyncio."""
|
||||
|
||||
import asyncio
|
||||
from typing import Any, Optional
|
||||
|
||||
import async_timeout
|
||||
|
||||
from .client_exceptions import ClientError
|
||||
from .client_reqrep import ClientResponse
|
||||
from .helpers import call_later, set_result
|
||||
from .http import (
|
||||
WS_CLOSED_MESSAGE,
|
||||
WS_CLOSING_MESSAGE,
|
||||
WebSocketError,
|
||||
WSMessage,
|
||||
WSMsgType,
|
||||
)
|
||||
from .http_websocket import WebSocketWriter # WSMessage
|
||||
from .streams import EofStream, FlowControlDataQueue
|
||||
from .typedefs import (
|
||||
DEFAULT_JSON_DECODER,
|
||||
DEFAULT_JSON_ENCODER,
|
||||
JSONDecoder,
|
||||
JSONEncoder,
|
||||
)
|
||||
|
||||
|
||||
class ClientWebSocketResponse:
|
||||
def __init__(
|
||||
self,
|
||||
reader: "FlowControlDataQueue[WSMessage]",
|
||||
writer: WebSocketWriter,
|
||||
protocol: Optional[str],
|
||||
response: ClientResponse,
|
||||
timeout: float,
|
||||
autoclose: bool,
|
||||
autoping: bool,
|
||||
loop: asyncio.AbstractEventLoop,
|
||||
*,
|
||||
receive_timeout: Optional[float] = None,
|
||||
heartbeat: Optional[float] = None,
|
||||
compress: int = 0,
|
||||
client_notakeover: bool = False,
|
||||
) -> None:
|
||||
self._response = response
|
||||
self._conn = response.connection
|
||||
|
||||
self._writer = writer
|
||||
self._reader = reader
|
||||
self._protocol = protocol
|
||||
self._closed = False
|
||||
self._closing = False
|
||||
self._close_code = None # type: Optional[int]
|
||||
self._timeout = timeout
|
||||
self._receive_timeout = receive_timeout
|
||||
self._autoclose = autoclose
|
||||
self._autoping = autoping
|
||||
self._heartbeat = heartbeat
|
||||
self._heartbeat_cb = None
|
||||
if heartbeat is not None:
|
||||
self._pong_heartbeat = heartbeat / 2.0
|
||||
self._pong_response_cb = None
|
||||
self._loop = loop
|
||||
self._waiting = None # type: Optional[asyncio.Future[bool]]
|
||||
self._exception = None # type: Optional[BaseException]
|
||||
self._compress = compress
|
||||
self._client_notakeover = client_notakeover
|
||||
|
||||
self._reset_heartbeat()
|
||||
|
||||
def _cancel_heartbeat(self) -> None:
|
||||
if self._pong_response_cb is not None:
|
||||
self._pong_response_cb.cancel()
|
||||
self._pong_response_cb = None
|
||||
|
||||
if self._heartbeat_cb is not None:
|
||||
self._heartbeat_cb.cancel()
|
||||
self._heartbeat_cb = None
|
||||
|
||||
def _reset_heartbeat(self) -> None:
|
||||
self._cancel_heartbeat()
|
||||
|
||||
if self._heartbeat is not None:
|
||||
self._heartbeat_cb = call_later(
|
||||
self._send_heartbeat, self._heartbeat, self._loop
|
||||
)
|
||||
|
||||
def _send_heartbeat(self) -> None:
|
||||
if self._heartbeat is not None and not self._closed:
|
||||
# fire-and-forget a task is not perfect but maybe ok for
|
||||
# sending ping. Otherwise we need a long-living heartbeat
|
||||
# task in the class.
|
||||
self._loop.create_task(self._writer.ping())
|
||||
|
||||
if self._pong_response_cb is not None:
|
||||
self._pong_response_cb.cancel()
|
||||
self._pong_response_cb = call_later(
|
||||
self._pong_not_received, self._pong_heartbeat, self._loop
|
||||
)
|
||||
|
||||
def _pong_not_received(self) -> None:
|
||||
if not self._closed:
|
||||
self._closed = True
|
||||
self._close_code = 1006
|
||||
self._exception = asyncio.TimeoutError()
|
||||
self._response.close()
|
||||
|
||||
@property
|
||||
def closed(self) -> bool:
|
||||
return self._closed
|
||||
|
||||
@property
|
||||
def close_code(self) -> Optional[int]:
|
||||
return self._close_code
|
||||
|
||||
@property
|
||||
def protocol(self) -> Optional[str]:
|
||||
return self._protocol
|
||||
|
||||
@property
|
||||
def compress(self) -> int:
|
||||
return self._compress
|
||||
|
||||
@property
|
||||
def client_notakeover(self) -> bool:
|
||||
return self._client_notakeover
|
||||
|
||||
def get_extra_info(self, name: str, default: Any = None) -> Any:
|
||||
"""extra info from connection transport"""
|
||||
conn = self._response.connection
|
||||
if conn is None:
|
||||
return default
|
||||
transport = conn.transport
|
||||
if transport is None:
|
||||
return default
|
||||
return transport.get_extra_info(name, default)
|
||||
|
||||
def exception(self) -> Optional[BaseException]:
|
||||
return self._exception
|
||||
|
||||
async def ping(self, message: bytes = b"") -> None:
|
||||
await self._writer.ping(message)
|
||||
|
||||
async def pong(self, message: bytes = b"") -> None:
|
||||
await self._writer.pong(message)
|
||||
|
||||
async def send_str(self, data: str, compress: Optional[int] = None) -> None:
|
||||
if not isinstance(data, str):
|
||||
raise TypeError("data argument must be str (%r)" % type(data))
|
||||
await self._writer.send(data, binary=False, compress=compress)
|
||||
|
||||
async def send_bytes(self, data: bytes, compress: Optional[int] = None) -> None:
|
||||
if not isinstance(data, (bytes, bytearray, memoryview)):
|
||||
raise TypeError("data argument must be byte-ish (%r)" % type(data))
|
||||
await self._writer.send(data, binary=True, compress=compress)
|
||||
|
||||
async def send_json(
|
||||
self,
|
||||
data: Any,
|
||||
compress: Optional[int] = None,
|
||||
*,
|
||||
dumps: JSONEncoder = DEFAULT_JSON_ENCODER,
|
||||
) -> None:
|
||||
await self.send_str(dumps(data), compress=compress)
|
||||
|
||||
async def close(self, *, code: int = 1000, message: bytes = b"") -> bool:
|
||||
# we need to break `receive()` cycle first,
|
||||
# `close()` may be called from different task
|
||||
if self._waiting is not None and not self._closed:
|
||||
self._reader.feed_data(WS_CLOSING_MESSAGE, 0)
|
||||
await self._waiting
|
||||
|
||||
if not self._closed:
|
||||
self._cancel_heartbeat()
|
||||
self._closed = True
|
||||
try:
|
||||
await self._writer.close(code, message)
|
||||
except asyncio.CancelledError:
|
||||
self._close_code = 1006
|
||||
self._response.close()
|
||||
raise
|
||||
except Exception as exc:
|
||||
self._close_code = 1006
|
||||
self._exception = exc
|
||||
self._response.close()
|
||||
return True
|
||||
|
||||
if self._closing:
|
||||
self._response.close()
|
||||
return True
|
||||
|
||||
while True:
|
||||
try:
|
||||
with async_timeout.timeout(self._timeout, loop=self._loop):
|
||||
msg = await self._reader.read()
|
||||
except asyncio.CancelledError:
|
||||
self._close_code = 1006
|
||||
self._response.close()
|
||||
raise
|
||||
except Exception as exc:
|
||||
self._close_code = 1006
|
||||
self._exception = exc
|
||||
self._response.close()
|
||||
return True
|
||||
|
||||
if msg.type == WSMsgType.CLOSE:
|
||||
self._close_code = msg.data
|
||||
self._response.close()
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
async def receive(self, timeout: Optional[float] = None) -> WSMessage:
|
||||
while True:
|
||||
if self._waiting is not None:
|
||||
raise RuntimeError("Concurrent call to receive() is not allowed")
|
||||
|
||||
if self._closed:
|
||||
return WS_CLOSED_MESSAGE
|
||||
elif self._closing:
|
||||
await self.close()
|
||||
return WS_CLOSED_MESSAGE
|
||||
|
||||
try:
|
||||
self._waiting = self._loop.create_future()
|
||||
try:
|
||||
with async_timeout.timeout(
|
||||
timeout or self._receive_timeout, loop=self._loop
|
||||
):
|
||||
msg = await self._reader.read()
|
||||
self._reset_heartbeat()
|
||||
finally:
|
||||
waiter = self._waiting
|
||||
self._waiting = None
|
||||
set_result(waiter, True)
|
||||
except (asyncio.CancelledError, asyncio.TimeoutError):
|
||||
self._close_code = 1006
|
||||
raise
|
||||
except EofStream:
|
||||
self._close_code = 1000
|
||||
await self.close()
|
||||
return WSMessage(WSMsgType.CLOSED, None, None)
|
||||
except ClientError:
|
||||
self._closed = True
|
||||
self._close_code = 1006
|
||||
return WS_CLOSED_MESSAGE
|
||||
except WebSocketError as exc:
|
||||
self._close_code = exc.code
|
||||
await self.close(code=exc.code)
|
||||
return WSMessage(WSMsgType.ERROR, exc, None)
|
||||
except Exception as exc:
|
||||
self._exception = exc
|
||||
self._closing = True
|
||||
self._close_code = 1006
|
||||
await self.close()
|
||||
return WSMessage(WSMsgType.ERROR, exc, None)
|
||||
|
||||
if msg.type == WSMsgType.CLOSE:
|
||||
self._closing = True
|
||||
self._close_code = msg.data
|
||||
if not self._closed and self._autoclose:
|
||||
await self.close()
|
||||
elif msg.type == WSMsgType.CLOSING:
|
||||
self._closing = True
|
||||
elif msg.type == WSMsgType.PING and self._autoping:
|
||||
await self.pong(msg.data)
|
||||
continue
|
||||
elif msg.type == WSMsgType.PONG and self._autoping:
|
||||
continue
|
||||
|
||||
return msg
|
||||
|
||||
async def receive_str(self, *, timeout: Optional[float] = None) -> str:
|
||||
msg = await self.receive(timeout)
|
||||
if msg.type != WSMsgType.TEXT:
|
||||
raise TypeError(f"Received message {msg.type}:{msg.data!r} is not str")
|
||||
return msg.data
|
||||
|
||||
async def receive_bytes(self, *, timeout: Optional[float] = None) -> bytes:
|
||||
msg = await self.receive(timeout)
|
||||
if msg.type != WSMsgType.BINARY:
|
||||
raise TypeError(f"Received message {msg.type}:{msg.data!r} is not bytes")
|
||||
return msg.data
|
||||
|
||||
async def receive_json(
|
||||
self,
|
||||
*,
|
||||
loads: JSONDecoder = DEFAULT_JSON_DECODER,
|
||||
timeout: Optional[float] = None,
|
||||
) -> Any:
|
||||
data = await self.receive_str(timeout=timeout)
|
||||
return loads(data)
|
||||
|
||||
def __aiter__(self) -> "ClientWebSocketResponse":
|
||||
return self
|
||||
|
||||
async def __anext__(self) -> WSMessage:
|
||||
msg = await self.receive()
|
||||
if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSING, WSMsgType.CLOSED):
|
||||
raise StopAsyncIteration
|
||||
return msg
|
||||
1262
dist/ba_data/python-site-packages/aiohttp/connector.py
vendored
Normal file
1262
dist/ba_data/python-site-packages/aiohttp/connector.py
vendored
Normal file
File diff suppressed because it is too large
Load diff
382
dist/ba_data/python-site-packages/aiohttp/cookiejar.py
vendored
Normal file
382
dist/ba_data/python-site-packages/aiohttp/cookiejar.py
vendored
Normal file
|
|
@ -0,0 +1,382 @@
|
|||
import asyncio
|
||||
import datetime
|
||||
import os # noqa
|
||||
import pathlib
|
||||
import pickle
|
||||
import re
|
||||
from collections import defaultdict
|
||||
from http.cookies import BaseCookie, Morsel, SimpleCookie
|
||||
from typing import ( # noqa
|
||||
DefaultDict,
|
||||
Dict,
|
||||
Iterable,
|
||||
Iterator,
|
||||
Mapping,
|
||||
Optional,
|
||||
Set,
|
||||
Tuple,
|
||||
Union,
|
||||
cast,
|
||||
)
|
||||
|
||||
from yarl import URL
|
||||
|
||||
from .abc import AbstractCookieJar
|
||||
from .helpers import is_ip_address, next_whole_second
|
||||
from .typedefs import LooseCookies, PathLike
|
||||
|
||||
__all__ = ("CookieJar", "DummyCookieJar")
|
||||
|
||||
|
||||
CookieItem = Union[str, "Morsel[str]"]
|
||||
|
||||
|
||||
class CookieJar(AbstractCookieJar):
|
||||
"""Implements cookie storage adhering to RFC 6265."""
|
||||
|
||||
DATE_TOKENS_RE = re.compile(
|
||||
r"[\x09\x20-\x2F\x3B-\x40\x5B-\x60\x7B-\x7E]*"
|
||||
r"(?P<token>[\x00-\x08\x0A-\x1F\d:a-zA-Z\x7F-\xFF]+)"
|
||||
)
|
||||
|
||||
DATE_HMS_TIME_RE = re.compile(r"(\d{1,2}):(\d{1,2}):(\d{1,2})")
|
||||
|
||||
DATE_DAY_OF_MONTH_RE = re.compile(r"(\d{1,2})")
|
||||
|
||||
DATE_MONTH_RE = re.compile(
|
||||
"(jan)|(feb)|(mar)|(apr)|(may)|(jun)|(jul)|" "(aug)|(sep)|(oct)|(nov)|(dec)",
|
||||
re.I,
|
||||
)
|
||||
|
||||
DATE_YEAR_RE = re.compile(r"(\d{2,4})")
|
||||
|
||||
MAX_TIME = datetime.datetime.max.replace(tzinfo=datetime.timezone.utc)
|
||||
|
||||
MAX_32BIT_TIME = datetime.datetime.utcfromtimestamp(2 ** 31 - 1)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
unsafe: bool = False,
|
||||
quote_cookie: bool = True,
|
||||
loop: Optional[asyncio.AbstractEventLoop] = None
|
||||
) -> None:
|
||||
super().__init__(loop=loop)
|
||||
self._cookies = defaultdict(
|
||||
SimpleCookie
|
||||
) # type: DefaultDict[str, SimpleCookie[str]]
|
||||
self._host_only_cookies = set() # type: Set[Tuple[str, str]]
|
||||
self._unsafe = unsafe
|
||||
self._quote_cookie = quote_cookie
|
||||
self._next_expiration = next_whole_second()
|
||||
self._expirations = {} # type: Dict[Tuple[str, str], datetime.datetime]
|
||||
# #4515: datetime.max may not be representable on 32-bit platforms
|
||||
self._max_time = self.MAX_TIME
|
||||
try:
|
||||
self._max_time.timestamp()
|
||||
except OverflowError:
|
||||
self._max_time = self.MAX_32BIT_TIME
|
||||
|
||||
def save(self, file_path: PathLike) -> None:
|
||||
file_path = pathlib.Path(file_path)
|
||||
with file_path.open(mode="wb") as f:
|
||||
pickle.dump(self._cookies, f, pickle.HIGHEST_PROTOCOL)
|
||||
|
||||
def load(self, file_path: PathLike) -> None:
|
||||
file_path = pathlib.Path(file_path)
|
||||
with file_path.open(mode="rb") as f:
|
||||
self._cookies = pickle.load(f)
|
||||
|
||||
def clear(self) -> None:
|
||||
self._cookies.clear()
|
||||
self._host_only_cookies.clear()
|
||||
self._next_expiration = next_whole_second()
|
||||
self._expirations.clear()
|
||||
|
||||
def __iter__(self) -> "Iterator[Morsel[str]]":
|
||||
self._do_expiration()
|
||||
for val in self._cookies.values():
|
||||
yield from val.values()
|
||||
|
||||
def __len__(self) -> int:
|
||||
return sum(1 for i in self)
|
||||
|
||||
def _do_expiration(self) -> None:
|
||||
now = datetime.datetime.now(datetime.timezone.utc)
|
||||
if self._next_expiration > now:
|
||||
return
|
||||
if not self._expirations:
|
||||
return
|
||||
next_expiration = self._max_time
|
||||
to_del = []
|
||||
cookies = self._cookies
|
||||
expirations = self._expirations
|
||||
for (domain, name), when in expirations.items():
|
||||
if when <= now:
|
||||
cookies[domain].pop(name, None)
|
||||
to_del.append((domain, name))
|
||||
self._host_only_cookies.discard((domain, name))
|
||||
else:
|
||||
next_expiration = min(next_expiration, when)
|
||||
for key in to_del:
|
||||
del expirations[key]
|
||||
|
||||
try:
|
||||
self._next_expiration = next_expiration.replace(
|
||||
microsecond=0
|
||||
) + datetime.timedelta(seconds=1)
|
||||
except OverflowError:
|
||||
self._next_expiration = self._max_time
|
||||
|
||||
def _expire_cookie(self, when: datetime.datetime, domain: str, name: str) -> None:
|
||||
self._next_expiration = min(self._next_expiration, when)
|
||||
self._expirations[(domain, name)] = when
|
||||
|
||||
def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
|
||||
"""Update cookies."""
|
||||
hostname = response_url.raw_host
|
||||
|
||||
if not self._unsafe and is_ip_address(hostname):
|
||||
# Don't accept cookies from IPs
|
||||
return
|
||||
|
||||
if isinstance(cookies, Mapping):
|
||||
cookies = cookies.items()
|
||||
|
||||
for name, cookie in cookies:
|
||||
if not isinstance(cookie, Morsel):
|
||||
tmp = SimpleCookie() # type: SimpleCookie[str]
|
||||
tmp[name] = cookie # type: ignore
|
||||
cookie = tmp[name]
|
||||
|
||||
domain = cookie["domain"]
|
||||
|
||||
# ignore domains with trailing dots
|
||||
if domain.endswith("."):
|
||||
domain = ""
|
||||
del cookie["domain"]
|
||||
|
||||
if not domain and hostname is not None:
|
||||
# Set the cookie's domain to the response hostname
|
||||
# and set its host-only-flag
|
||||
self._host_only_cookies.add((hostname, name))
|
||||
domain = cookie["domain"] = hostname
|
||||
|
||||
if domain.startswith("."):
|
||||
# Remove leading dot
|
||||
domain = domain[1:]
|
||||
cookie["domain"] = domain
|
||||
|
||||
if hostname and not self._is_domain_match(domain, hostname):
|
||||
# Setting cookies for different domains is not allowed
|
||||
continue
|
||||
|
||||
path = cookie["path"]
|
||||
if not path or not path.startswith("/"):
|
||||
# Set the cookie's path to the response path
|
||||
path = response_url.path
|
||||
if not path.startswith("/"):
|
||||
path = "/"
|
||||
else:
|
||||
# Cut everything from the last slash to the end
|
||||
path = "/" + path[1 : path.rfind("/")]
|
||||
cookie["path"] = path
|
||||
|
||||
max_age = cookie["max-age"]
|
||||
if max_age:
|
||||
try:
|
||||
delta_seconds = int(max_age)
|
||||
try:
|
||||
max_age_expiration = datetime.datetime.now(
|
||||
datetime.timezone.utc
|
||||
) + datetime.timedelta(seconds=delta_seconds)
|
||||
except OverflowError:
|
||||
max_age_expiration = self._max_time
|
||||
self._expire_cookie(max_age_expiration, domain, name)
|
||||
except ValueError:
|
||||
cookie["max-age"] = ""
|
||||
|
||||
else:
|
||||
expires = cookie["expires"]
|
||||
if expires:
|
||||
expire_time = self._parse_date(expires)
|
||||
if expire_time:
|
||||
self._expire_cookie(expire_time, domain, name)
|
||||
else:
|
||||
cookie["expires"] = ""
|
||||
|
||||
self._cookies[domain][name] = cookie
|
||||
|
||||
self._do_expiration()
|
||||
|
||||
def filter_cookies(
|
||||
self, request_url: URL = URL()
|
||||
) -> Union["BaseCookie[str]", "SimpleCookie[str]"]:
|
||||
"""Returns this jar's cookies filtered by their attributes."""
|
||||
self._do_expiration()
|
||||
request_url = URL(request_url)
|
||||
filtered: Union["SimpleCookie[str]", "BaseCookie[str]"] = (
|
||||
SimpleCookie() if self._quote_cookie else BaseCookie()
|
||||
)
|
||||
hostname = request_url.raw_host or ""
|
||||
is_not_secure = request_url.scheme not in ("https", "wss")
|
||||
|
||||
for cookie in self:
|
||||
name = cookie.key
|
||||
domain = cookie["domain"]
|
||||
|
||||
# Send shared cookies
|
||||
if not domain:
|
||||
filtered[name] = cookie.value
|
||||
continue
|
||||
|
||||
if not self._unsafe and is_ip_address(hostname):
|
||||
continue
|
||||
|
||||
if (domain, name) in self._host_only_cookies:
|
||||
if domain != hostname:
|
||||
continue
|
||||
elif not self._is_domain_match(domain, hostname):
|
||||
continue
|
||||
|
||||
if not self._is_path_match(request_url.path, cookie["path"]):
|
||||
continue
|
||||
|
||||
if is_not_secure and cookie["secure"]:
|
||||
continue
|
||||
|
||||
# It's critical we use the Morsel so the coded_value
|
||||
# (based on cookie version) is preserved
|
||||
mrsl_val = cast("Morsel[str]", cookie.get(cookie.key, Morsel()))
|
||||
mrsl_val.set(cookie.key, cookie.value, cookie.coded_value)
|
||||
filtered[name] = mrsl_val
|
||||
|
||||
return filtered
|
||||
|
||||
@staticmethod
|
||||
def _is_domain_match(domain: str, hostname: str) -> bool:
|
||||
"""Implements domain matching adhering to RFC 6265."""
|
||||
if hostname == domain:
|
||||
return True
|
||||
|
||||
if not hostname.endswith(domain):
|
||||
return False
|
||||
|
||||
non_matching = hostname[: -len(domain)]
|
||||
|
||||
if not non_matching.endswith("."):
|
||||
return False
|
||||
|
||||
return not is_ip_address(hostname)
|
||||
|
||||
@staticmethod
|
||||
def _is_path_match(req_path: str, cookie_path: str) -> bool:
|
||||
"""Implements path matching adhering to RFC 6265."""
|
||||
if not req_path.startswith("/"):
|
||||
req_path = "/"
|
||||
|
||||
if req_path == cookie_path:
|
||||
return True
|
||||
|
||||
if not req_path.startswith(cookie_path):
|
||||
return False
|
||||
|
||||
if cookie_path.endswith("/"):
|
||||
return True
|
||||
|
||||
non_matching = req_path[len(cookie_path) :]
|
||||
|
||||
return non_matching.startswith("/")
|
||||
|
||||
@classmethod
|
||||
def _parse_date(cls, date_str: str) -> Optional[datetime.datetime]:
|
||||
"""Implements date string parsing adhering to RFC 6265."""
|
||||
if not date_str:
|
||||
return None
|
||||
|
||||
found_time = False
|
||||
found_day = False
|
||||
found_month = False
|
||||
found_year = False
|
||||
|
||||
hour = minute = second = 0
|
||||
day = 0
|
||||
month = 0
|
||||
year = 0
|
||||
|
||||
for token_match in cls.DATE_TOKENS_RE.finditer(date_str):
|
||||
|
||||
token = token_match.group("token")
|
||||
|
||||
if not found_time:
|
||||
time_match = cls.DATE_HMS_TIME_RE.match(token)
|
||||
if time_match:
|
||||
found_time = True
|
||||
hour, minute, second = [int(s) for s in time_match.groups()]
|
||||
continue
|
||||
|
||||
if not found_day:
|
||||
day_match = cls.DATE_DAY_OF_MONTH_RE.match(token)
|
||||
if day_match:
|
||||
found_day = True
|
||||
day = int(day_match.group())
|
||||
continue
|
||||
|
||||
if not found_month:
|
||||
month_match = cls.DATE_MONTH_RE.match(token)
|
||||
if month_match:
|
||||
found_month = True
|
||||
assert month_match.lastindex is not None
|
||||
month = month_match.lastindex
|
||||
continue
|
||||
|
||||
if not found_year:
|
||||
year_match = cls.DATE_YEAR_RE.match(token)
|
||||
if year_match:
|
||||
found_year = True
|
||||
year = int(year_match.group())
|
||||
|
||||
if 70 <= year <= 99:
|
||||
year += 1900
|
||||
elif 0 <= year <= 69:
|
||||
year += 2000
|
||||
|
||||
if False in (found_day, found_month, found_year, found_time):
|
||||
return None
|
||||
|
||||
if not 1 <= day <= 31:
|
||||
return None
|
||||
|
||||
if year < 1601 or hour > 23 or minute > 59 or second > 59:
|
||||
return None
|
||||
|
||||
return datetime.datetime(
|
||||
year, month, day, hour, minute, second, tzinfo=datetime.timezone.utc
|
||||
)
|
||||
|
||||
|
||||
class DummyCookieJar(AbstractCookieJar):
|
||||
"""Implements a dummy cookie storage.
|
||||
|
||||
It can be used with the ClientSession when no cookie processing is needed.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
|
||||
super().__init__(loop=loop)
|
||||
|
||||
def __iter__(self) -> "Iterator[Morsel[str]]":
|
||||
while False:
|
||||
yield None
|
||||
|
||||
def __len__(self) -> int:
|
||||
return 0
|
||||
|
||||
def clear(self) -> None:
|
||||
pass
|
||||
|
||||
def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
|
||||
pass
|
||||
|
||||
def filter_cookies(self, request_url: URL) -> "BaseCookie[str]":
|
||||
return SimpleCookie()
|
||||
170
dist/ba_data/python-site-packages/aiohttp/formdata.py
vendored
Normal file
170
dist/ba_data/python-site-packages/aiohttp/formdata.py
vendored
Normal file
|
|
@ -0,0 +1,170 @@
|
|||
import io
|
||||
from typing import Any, Iterable, List, Optional
|
||||
from urllib.parse import urlencode
|
||||
|
||||
from multidict import MultiDict, MultiDictProxy
|
||||
|
||||
from . import hdrs, multipart, payload
|
||||
from .helpers import guess_filename
|
||||
from .payload import Payload
|
||||
|
||||
__all__ = ("FormData",)
|
||||
|
||||
|
||||
class FormData:
|
||||
"""Helper class for multipart/form-data and
|
||||
application/x-www-form-urlencoded body generation."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
fields: Iterable[Any] = (),
|
||||
quote_fields: bool = True,
|
||||
charset: Optional[str] = None,
|
||||
) -> None:
|
||||
self._writer = multipart.MultipartWriter("form-data")
|
||||
self._fields = [] # type: List[Any]
|
||||
self._is_multipart = False
|
||||
self._is_processed = False
|
||||
self._quote_fields = quote_fields
|
||||
self._charset = charset
|
||||
|
||||
if isinstance(fields, dict):
|
||||
fields = list(fields.items())
|
||||
elif not isinstance(fields, (list, tuple)):
|
||||
fields = (fields,)
|
||||
self.add_fields(*fields)
|
||||
|
||||
@property
|
||||
def is_multipart(self) -> bool:
|
||||
return self._is_multipart
|
||||
|
||||
def add_field(
|
||||
self,
|
||||
name: str,
|
||||
value: Any,
|
||||
*,
|
||||
content_type: Optional[str] = None,
|
||||
filename: Optional[str] = None,
|
||||
content_transfer_encoding: Optional[str] = None
|
||||
) -> None:
|
||||
|
||||
if isinstance(value, io.IOBase):
|
||||
self._is_multipart = True
|
||||
elif isinstance(value, (bytes, bytearray, memoryview)):
|
||||
if filename is None and content_transfer_encoding is None:
|
||||
filename = name
|
||||
|
||||
type_options = MultiDict({"name": name}) # type: MultiDict[str]
|
||||
if filename is not None and not isinstance(filename, str):
|
||||
raise TypeError(
|
||||
"filename must be an instance of str. " "Got: %s" % filename
|
||||
)
|
||||
if filename is None and isinstance(value, io.IOBase):
|
||||
filename = guess_filename(value, name)
|
||||
if filename is not None:
|
||||
type_options["filename"] = filename
|
||||
self._is_multipart = True
|
||||
|
||||
headers = {}
|
||||
if content_type is not None:
|
||||
if not isinstance(content_type, str):
|
||||
raise TypeError(
|
||||
"content_type must be an instance of str. " "Got: %s" % content_type
|
||||
)
|
||||
headers[hdrs.CONTENT_TYPE] = content_type
|
||||
self._is_multipart = True
|
||||
if content_transfer_encoding is not None:
|
||||
if not isinstance(content_transfer_encoding, str):
|
||||
raise TypeError(
|
||||
"content_transfer_encoding must be an instance"
|
||||
" of str. Got: %s" % content_transfer_encoding
|
||||
)
|
||||
headers[hdrs.CONTENT_TRANSFER_ENCODING] = content_transfer_encoding
|
||||
self._is_multipart = True
|
||||
|
||||
self._fields.append((type_options, headers, value))
|
||||
|
||||
def add_fields(self, *fields: Any) -> None:
|
||||
to_add = list(fields)
|
||||
|
||||
while to_add:
|
||||
rec = to_add.pop(0)
|
||||
|
||||
if isinstance(rec, io.IOBase):
|
||||
k = guess_filename(rec, "unknown")
|
||||
self.add_field(k, rec) # type: ignore
|
||||
|
||||
elif isinstance(rec, (MultiDictProxy, MultiDict)):
|
||||
to_add.extend(rec.items())
|
||||
|
||||
elif isinstance(rec, (list, tuple)) and len(rec) == 2:
|
||||
k, fp = rec
|
||||
self.add_field(k, fp) # type: ignore
|
||||
|
||||
else:
|
||||
raise TypeError(
|
||||
"Only io.IOBase, multidict and (name, file) "
|
||||
"pairs allowed, use .add_field() for passing "
|
||||
"more complex parameters, got {!r}".format(rec)
|
||||
)
|
||||
|
||||
def _gen_form_urlencoded(self) -> payload.BytesPayload:
|
||||
# form data (x-www-form-urlencoded)
|
||||
data = []
|
||||
for type_options, _, value in self._fields:
|
||||
data.append((type_options["name"], value))
|
||||
|
||||
charset = self._charset if self._charset is not None else "utf-8"
|
||||
|
||||
if charset == "utf-8":
|
||||
content_type = "application/x-www-form-urlencoded"
|
||||
else:
|
||||
content_type = "application/x-www-form-urlencoded; " "charset=%s" % charset
|
||||
|
||||
return payload.BytesPayload(
|
||||
urlencode(data, doseq=True, encoding=charset).encode(),
|
||||
content_type=content_type,
|
||||
)
|
||||
|
||||
def _gen_form_data(self) -> multipart.MultipartWriter:
|
||||
"""Encode a list of fields using the multipart/form-data MIME format"""
|
||||
if self._is_processed:
|
||||
raise RuntimeError("Form data has been processed already")
|
||||
for dispparams, headers, value in self._fields:
|
||||
try:
|
||||
if hdrs.CONTENT_TYPE in headers:
|
||||
part = payload.get_payload(
|
||||
value,
|
||||
content_type=headers[hdrs.CONTENT_TYPE],
|
||||
headers=headers,
|
||||
encoding=self._charset,
|
||||
)
|
||||
else:
|
||||
part = payload.get_payload(
|
||||
value, headers=headers, encoding=self._charset
|
||||
)
|
||||
except Exception as exc:
|
||||
raise TypeError(
|
||||
"Can not serialize value type: %r\n "
|
||||
"headers: %r\n value: %r" % (type(value), headers, value)
|
||||
) from exc
|
||||
|
||||
if dispparams:
|
||||
part.set_content_disposition(
|
||||
"form-data", quote_fields=self._quote_fields, **dispparams
|
||||
)
|
||||
# FIXME cgi.FieldStorage doesn't likes body parts with
|
||||
# Content-Length which were sent via chunked transfer encoding
|
||||
assert part.headers is not None
|
||||
part.headers.popall(hdrs.CONTENT_LENGTH, None)
|
||||
|
||||
self._writer.append_payload(part)
|
||||
|
||||
self._is_processed = True
|
||||
return self._writer
|
||||
|
||||
def __call__(self) -> Payload:
|
||||
if self._is_multipart:
|
||||
return self._gen_form_data()
|
||||
else:
|
||||
return self._gen_form_urlencoded()
|
||||
72
dist/ba_data/python-site-packages/aiohttp/frozenlist.py
vendored
Normal file
72
dist/ba_data/python-site-packages/aiohttp/frozenlist.py
vendored
Normal file
|
|
@ -0,0 +1,72 @@
|
|||
from collections.abc import MutableSequence
|
||||
from functools import total_ordering
|
||||
|
||||
from .helpers import NO_EXTENSIONS
|
||||
|
||||
|
||||
@total_ordering
|
||||
class FrozenList(MutableSequence):
|
||||
|
||||
__slots__ = ("_frozen", "_items")
|
||||
|
||||
def __init__(self, items=None):
|
||||
self._frozen = False
|
||||
if items is not None:
|
||||
items = list(items)
|
||||
else:
|
||||
items = []
|
||||
self._items = items
|
||||
|
||||
@property
|
||||
def frozen(self):
|
||||
return self._frozen
|
||||
|
||||
def freeze(self):
|
||||
self._frozen = True
|
||||
|
||||
def __getitem__(self, index):
|
||||
return self._items[index]
|
||||
|
||||
def __setitem__(self, index, value):
|
||||
if self._frozen:
|
||||
raise RuntimeError("Cannot modify frozen list.")
|
||||
self._items[index] = value
|
||||
|
||||
def __delitem__(self, index):
|
||||
if self._frozen:
|
||||
raise RuntimeError("Cannot modify frozen list.")
|
||||
del self._items[index]
|
||||
|
||||
def __len__(self):
|
||||
return self._items.__len__()
|
||||
|
||||
def __iter__(self):
|
||||
return self._items.__iter__()
|
||||
|
||||
def __reversed__(self):
|
||||
return self._items.__reversed__()
|
||||
|
||||
def __eq__(self, other):
|
||||
return list(self) == other
|
||||
|
||||
def __le__(self, other):
|
||||
return list(self) <= other
|
||||
|
||||
def insert(self, pos, item):
|
||||
if self._frozen:
|
||||
raise RuntimeError("Cannot modify frozen list.")
|
||||
self._items.insert(pos, item)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<FrozenList(frozen={self._frozen}, {self._items!r})>"
|
||||
|
||||
|
||||
PyFrozenList = FrozenList
|
||||
|
||||
try:
|
||||
from aiohttp._frozenlist import FrozenList as CFrozenList # type: ignore
|
||||
|
||||
if not NO_EXTENSIONS:
|
||||
FrozenList = CFrozenList # type: ignore
|
||||
except ImportError: # pragma: no cover
|
||||
pass
|
||||
46
dist/ba_data/python-site-packages/aiohttp/frozenlist.pyi
vendored
Normal file
46
dist/ba_data/python-site-packages/aiohttp/frozenlist.pyi
vendored
Normal file
|
|
@ -0,0 +1,46 @@
|
|||
from typing import (
|
||||
Generic,
|
||||
Iterable,
|
||||
Iterator,
|
||||
List,
|
||||
MutableSequence,
|
||||
Optional,
|
||||
TypeVar,
|
||||
Union,
|
||||
overload,
|
||||
)
|
||||
|
||||
_T = TypeVar("_T")
|
||||
_Arg = Union[List[_T], Iterable[_T]]
|
||||
|
||||
class FrozenList(MutableSequence[_T], Generic[_T]):
|
||||
def __init__(self, items: Optional[_Arg[_T]] = ...) -> None: ...
|
||||
@property
|
||||
def frozen(self) -> bool: ...
|
||||
def freeze(self) -> None: ...
|
||||
@overload
|
||||
def __getitem__(self, i: int) -> _T: ...
|
||||
@overload
|
||||
def __getitem__(self, s: slice) -> FrozenList[_T]: ...
|
||||
@overload
|
||||
def __setitem__(self, i: int, o: _T) -> None: ...
|
||||
@overload
|
||||
def __setitem__(self, s: slice, o: Iterable[_T]) -> None: ...
|
||||
@overload
|
||||
def __delitem__(self, i: int) -> None: ...
|
||||
@overload
|
||||
def __delitem__(self, i: slice) -> None: ...
|
||||
def __len__(self) -> int: ...
|
||||
def __iter__(self) -> Iterator[_T]: ...
|
||||
def __reversed__(self) -> Iterator[_T]: ...
|
||||
def __eq__(self, other: object) -> bool: ...
|
||||
def __le__(self, other: FrozenList[_T]) -> bool: ...
|
||||
def __ne__(self, other: object) -> bool: ...
|
||||
def __lt__(self, other: FrozenList[_T]) -> bool: ...
|
||||
def __ge__(self, other: FrozenList[_T]) -> bool: ...
|
||||
def __gt__(self, other: FrozenList[_T]) -> bool: ...
|
||||
def insert(self, pos: int, item: _T) -> None: ...
|
||||
def __repr__(self) -> str: ...
|
||||
|
||||
# types for C accelerators are the same
|
||||
CFrozenList = PyFrozenList = FrozenList
|
||||
108
dist/ba_data/python-site-packages/aiohttp/hdrs.py
vendored
Normal file
108
dist/ba_data/python-site-packages/aiohttp/hdrs.py
vendored
Normal file
|
|
@ -0,0 +1,108 @@
|
|||
"""HTTP Headers constants."""
|
||||
|
||||
# After changing the file content call ./tools/gen.py
|
||||
# to regenerate the headers parser
|
||||
|
||||
from multidict import istr
|
||||
|
||||
METH_ANY = "*"
|
||||
METH_CONNECT = "CONNECT"
|
||||
METH_HEAD = "HEAD"
|
||||
METH_GET = "GET"
|
||||
METH_DELETE = "DELETE"
|
||||
METH_OPTIONS = "OPTIONS"
|
||||
METH_PATCH = "PATCH"
|
||||
METH_POST = "POST"
|
||||
METH_PUT = "PUT"
|
||||
METH_TRACE = "TRACE"
|
||||
|
||||
METH_ALL = {
|
||||
METH_CONNECT,
|
||||
METH_HEAD,
|
||||
METH_GET,
|
||||
METH_DELETE,
|
||||
METH_OPTIONS,
|
||||
METH_PATCH,
|
||||
METH_POST,
|
||||
METH_PUT,
|
||||
METH_TRACE,
|
||||
}
|
||||
|
||||
|
||||
ACCEPT = istr("Accept")
|
||||
ACCEPT_CHARSET = istr("Accept-Charset")
|
||||
ACCEPT_ENCODING = istr("Accept-Encoding")
|
||||
ACCEPT_LANGUAGE = istr("Accept-Language")
|
||||
ACCEPT_RANGES = istr("Accept-Ranges")
|
||||
ACCESS_CONTROL_MAX_AGE = istr("Access-Control-Max-Age")
|
||||
ACCESS_CONTROL_ALLOW_CREDENTIALS = istr("Access-Control-Allow-Credentials")
|
||||
ACCESS_CONTROL_ALLOW_HEADERS = istr("Access-Control-Allow-Headers")
|
||||
ACCESS_CONTROL_ALLOW_METHODS = istr("Access-Control-Allow-Methods")
|
||||
ACCESS_CONTROL_ALLOW_ORIGIN = istr("Access-Control-Allow-Origin")
|
||||
ACCESS_CONTROL_EXPOSE_HEADERS = istr("Access-Control-Expose-Headers")
|
||||
ACCESS_CONTROL_REQUEST_HEADERS = istr("Access-Control-Request-Headers")
|
||||
ACCESS_CONTROL_REQUEST_METHOD = istr("Access-Control-Request-Method")
|
||||
AGE = istr("Age")
|
||||
ALLOW = istr("Allow")
|
||||
AUTHORIZATION = istr("Authorization")
|
||||
CACHE_CONTROL = istr("Cache-Control")
|
||||
CONNECTION = istr("Connection")
|
||||
CONTENT_DISPOSITION = istr("Content-Disposition")
|
||||
CONTENT_ENCODING = istr("Content-Encoding")
|
||||
CONTENT_LANGUAGE = istr("Content-Language")
|
||||
CONTENT_LENGTH = istr("Content-Length")
|
||||
CONTENT_LOCATION = istr("Content-Location")
|
||||
CONTENT_MD5 = istr("Content-MD5")
|
||||
CONTENT_RANGE = istr("Content-Range")
|
||||
CONTENT_TRANSFER_ENCODING = istr("Content-Transfer-Encoding")
|
||||
CONTENT_TYPE = istr("Content-Type")
|
||||
COOKIE = istr("Cookie")
|
||||
DATE = istr("Date")
|
||||
DESTINATION = istr("Destination")
|
||||
DIGEST = istr("Digest")
|
||||
ETAG = istr("Etag")
|
||||
EXPECT = istr("Expect")
|
||||
EXPIRES = istr("Expires")
|
||||
FORWARDED = istr("Forwarded")
|
||||
FROM = istr("From")
|
||||
HOST = istr("Host")
|
||||
IF_MATCH = istr("If-Match")
|
||||
IF_MODIFIED_SINCE = istr("If-Modified-Since")
|
||||
IF_NONE_MATCH = istr("If-None-Match")
|
||||
IF_RANGE = istr("If-Range")
|
||||
IF_UNMODIFIED_SINCE = istr("If-Unmodified-Since")
|
||||
KEEP_ALIVE = istr("Keep-Alive")
|
||||
LAST_EVENT_ID = istr("Last-Event-ID")
|
||||
LAST_MODIFIED = istr("Last-Modified")
|
||||
LINK = istr("Link")
|
||||
LOCATION = istr("Location")
|
||||
MAX_FORWARDS = istr("Max-Forwards")
|
||||
ORIGIN = istr("Origin")
|
||||
PRAGMA = istr("Pragma")
|
||||
PROXY_AUTHENTICATE = istr("Proxy-Authenticate")
|
||||
PROXY_AUTHORIZATION = istr("Proxy-Authorization")
|
||||
RANGE = istr("Range")
|
||||
REFERER = istr("Referer")
|
||||
RETRY_AFTER = istr("Retry-After")
|
||||
SEC_WEBSOCKET_ACCEPT = istr("Sec-WebSocket-Accept")
|
||||
SEC_WEBSOCKET_VERSION = istr("Sec-WebSocket-Version")
|
||||
SEC_WEBSOCKET_PROTOCOL = istr("Sec-WebSocket-Protocol")
|
||||
SEC_WEBSOCKET_EXTENSIONS = istr("Sec-WebSocket-Extensions")
|
||||
SEC_WEBSOCKET_KEY = istr("Sec-WebSocket-Key")
|
||||
SEC_WEBSOCKET_KEY1 = istr("Sec-WebSocket-Key1")
|
||||
SERVER = istr("Server")
|
||||
SET_COOKIE = istr("Set-Cookie")
|
||||
TE = istr("TE")
|
||||
TRAILER = istr("Trailer")
|
||||
TRANSFER_ENCODING = istr("Transfer-Encoding")
|
||||
UPGRADE = istr("Upgrade")
|
||||
URI = istr("URI")
|
||||
USER_AGENT = istr("User-Agent")
|
||||
VARY = istr("Vary")
|
||||
VIA = istr("Via")
|
||||
WANT_DIGEST = istr("Want-Digest")
|
||||
WARNING = istr("Warning")
|
||||
WWW_AUTHENTICATE = istr("WWW-Authenticate")
|
||||
X_FORWARDED_FOR = istr("X-Forwarded-For")
|
||||
X_FORWARDED_HOST = istr("X-Forwarded-Host")
|
||||
X_FORWARDED_PROTO = istr("X-Forwarded-Proto")
|
||||
780
dist/ba_data/python-site-packages/aiohttp/helpers.py
vendored
Normal file
780
dist/ba_data/python-site-packages/aiohttp/helpers.py
vendored
Normal file
|
|
@ -0,0 +1,780 @@
|
|||
"""Various helper functions"""
|
||||
|
||||
import asyncio
|
||||
import base64
|
||||
import binascii
|
||||
import cgi
|
||||
import datetime
|
||||
import functools
|
||||
import inspect
|
||||
import netrc
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
import warnings
|
||||
import weakref
|
||||
from collections import namedtuple
|
||||
from contextlib import suppress
|
||||
from math import ceil
|
||||
from pathlib import Path
|
||||
from types import TracebackType
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Dict,
|
||||
Generator,
|
||||
Generic,
|
||||
Iterable,
|
||||
Iterator,
|
||||
List,
|
||||
Mapping,
|
||||
Optional,
|
||||
Pattern,
|
||||
Set,
|
||||
Tuple,
|
||||
Type,
|
||||
TypeVar,
|
||||
Union,
|
||||
cast,
|
||||
)
|
||||
from urllib.parse import quote
|
||||
from urllib.request import getproxies
|
||||
|
||||
import async_timeout
|
||||
import attr
|
||||
from multidict import MultiDict, MultiDictProxy
|
||||
from typing_extensions import Protocol
|
||||
from yarl import URL
|
||||
|
||||
from . import hdrs
|
||||
from .log import client_logger, internal_logger
|
||||
from .typedefs import PathLike # noqa
|
||||
|
||||
__all__ = ("BasicAuth", "ChainMapProxy")
|
||||
|
||||
PY_36 = sys.version_info >= (3, 6)
|
||||
PY_37 = sys.version_info >= (3, 7)
|
||||
PY_38 = sys.version_info >= (3, 8)
|
||||
|
||||
if not PY_37:
|
||||
import idna_ssl
|
||||
|
||||
idna_ssl.patch_match_hostname()
|
||||
|
||||
try:
|
||||
from typing import ContextManager
|
||||
except ImportError:
|
||||
from typing_extensions import ContextManager
|
||||
|
||||
|
||||
def all_tasks(
|
||||
loop: Optional[asyncio.AbstractEventLoop] = None,
|
||||
) -> Set["asyncio.Task[Any]"]:
|
||||
tasks = list(asyncio.Task.all_tasks(loop))
|
||||
return {t for t in tasks if not t.done()}
|
||||
|
||||
|
||||
if PY_37:
|
||||
all_tasks = getattr(asyncio, "all_tasks")
|
||||
|
||||
|
||||
_T = TypeVar("_T")
|
||||
_S = TypeVar("_S")
|
||||
|
||||
|
||||
sentinel = object() # type: Any
|
||||
NO_EXTENSIONS = bool(os.environ.get("AIOHTTP_NO_EXTENSIONS")) # type: bool
|
||||
|
||||
# N.B. sys.flags.dev_mode is available on Python 3.7+, use getattr
|
||||
# for compatibility with older versions
|
||||
DEBUG = getattr(sys.flags, "dev_mode", False) or (
|
||||
not sys.flags.ignore_environment and bool(os.environ.get("PYTHONASYNCIODEBUG"))
|
||||
) # type: bool
|
||||
|
||||
|
||||
CHAR = {chr(i) for i in range(0, 128)}
|
||||
CTL = {chr(i) for i in range(0, 32)} | {
|
||||
chr(127),
|
||||
}
|
||||
SEPARATORS = {
|
||||
"(",
|
||||
")",
|
||||
"<",
|
||||
">",
|
||||
"@",
|
||||
",",
|
||||
";",
|
||||
":",
|
||||
"\\",
|
||||
'"',
|
||||
"/",
|
||||
"[",
|
||||
"]",
|
||||
"?",
|
||||
"=",
|
||||
"{",
|
||||
"}",
|
||||
" ",
|
||||
chr(9),
|
||||
}
|
||||
TOKEN = CHAR ^ CTL ^ SEPARATORS
|
||||
|
||||
|
||||
class noop:
|
||||
def __await__(self) -> Generator[None, None, None]:
|
||||
yield
|
||||
|
||||
|
||||
class BasicAuth(namedtuple("BasicAuth", ["login", "password", "encoding"])):
|
||||
"""Http basic authentication helper."""
|
||||
|
||||
def __new__(
|
||||
cls, login: str, password: str = "", encoding: str = "latin1"
|
||||
) -> "BasicAuth":
|
||||
if login is None:
|
||||
raise ValueError("None is not allowed as login value")
|
||||
|
||||
if password is None:
|
||||
raise ValueError("None is not allowed as password value")
|
||||
|
||||
if ":" in login:
|
||||
raise ValueError('A ":" is not allowed in login (RFC 1945#section-11.1)')
|
||||
|
||||
return super().__new__(cls, login, password, encoding)
|
||||
|
||||
@classmethod
|
||||
def decode(cls, auth_header: str, encoding: str = "latin1") -> "BasicAuth":
|
||||
"""Create a BasicAuth object from an Authorization HTTP header."""
|
||||
try:
|
||||
auth_type, encoded_credentials = auth_header.split(" ", 1)
|
||||
except ValueError:
|
||||
raise ValueError("Could not parse authorization header.")
|
||||
|
||||
if auth_type.lower() != "basic":
|
||||
raise ValueError("Unknown authorization method %s" % auth_type)
|
||||
|
||||
try:
|
||||
decoded = base64.b64decode(
|
||||
encoded_credentials.encode("ascii"), validate=True
|
||||
).decode(encoding)
|
||||
except binascii.Error:
|
||||
raise ValueError("Invalid base64 encoding.")
|
||||
|
||||
try:
|
||||
# RFC 2617 HTTP Authentication
|
||||
# https://www.ietf.org/rfc/rfc2617.txt
|
||||
# the colon must be present, but the username and password may be
|
||||
# otherwise blank.
|
||||
username, password = decoded.split(":", 1)
|
||||
except ValueError:
|
||||
raise ValueError("Invalid credentials.")
|
||||
|
||||
return cls(username, password, encoding=encoding)
|
||||
|
||||
@classmethod
|
||||
def from_url(cls, url: URL, *, encoding: str = "latin1") -> Optional["BasicAuth"]:
|
||||
"""Create BasicAuth from url."""
|
||||
if not isinstance(url, URL):
|
||||
raise TypeError("url should be yarl.URL instance")
|
||||
if url.user is None:
|
||||
return None
|
||||
return cls(url.user, url.password or "", encoding=encoding)
|
||||
|
||||
def encode(self) -> str:
|
||||
"""Encode credentials."""
|
||||
creds = (f"{self.login}:{self.password}").encode(self.encoding)
|
||||
return "Basic %s" % base64.b64encode(creds).decode(self.encoding)
|
||||
|
||||
|
||||
def strip_auth_from_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]:
|
||||
auth = BasicAuth.from_url(url)
|
||||
if auth is None:
|
||||
return url, None
|
||||
else:
|
||||
return url.with_user(None), auth
|
||||
|
||||
|
||||
def netrc_from_env() -> Optional[netrc.netrc]:
|
||||
"""Attempt to load the netrc file from the path specified by the env-var
|
||||
NETRC or in the default location in the user's home directory.
|
||||
|
||||
Returns None if it couldn't be found or fails to parse.
|
||||
"""
|
||||
netrc_env = os.environ.get("NETRC")
|
||||
|
||||
if netrc_env is not None:
|
||||
netrc_path = Path(netrc_env)
|
||||
else:
|
||||
try:
|
||||
home_dir = Path.home()
|
||||
except RuntimeError as e: # pragma: no cover
|
||||
# if pathlib can't resolve home, it may raise a RuntimeError
|
||||
client_logger.debug(
|
||||
"Could not resolve home directory when "
|
||||
"trying to look for .netrc file: %s",
|
||||
e,
|
||||
)
|
||||
return None
|
||||
|
||||
netrc_path = home_dir / (
|
||||
"_netrc" if platform.system() == "Windows" else ".netrc"
|
||||
)
|
||||
|
||||
try:
|
||||
return netrc.netrc(str(netrc_path))
|
||||
except netrc.NetrcParseError as e:
|
||||
client_logger.warning("Could not parse .netrc file: %s", e)
|
||||
except OSError as e:
|
||||
# we couldn't read the file (doesn't exist, permissions, etc.)
|
||||
if netrc_env or netrc_path.is_file():
|
||||
# only warn if the environment wanted us to load it,
|
||||
# or it appears like the default file does actually exist
|
||||
client_logger.warning("Could not read .netrc file: %s", e)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
||||
class ProxyInfo:
|
||||
proxy: URL
|
||||
proxy_auth: Optional[BasicAuth]
|
||||
|
||||
|
||||
def proxies_from_env() -> Dict[str, ProxyInfo]:
|
||||
proxy_urls = {k: URL(v) for k, v in getproxies().items() if k in ("http", "https")}
|
||||
netrc_obj = netrc_from_env()
|
||||
stripped = {k: strip_auth_from_url(v) for k, v in proxy_urls.items()}
|
||||
ret = {}
|
||||
for proto, val in stripped.items():
|
||||
proxy, auth = val
|
||||
if proxy.scheme == "https":
|
||||
client_logger.warning("HTTPS proxies %s are not supported, ignoring", proxy)
|
||||
continue
|
||||
if netrc_obj and auth is None:
|
||||
auth_from_netrc = None
|
||||
if proxy.host is not None:
|
||||
auth_from_netrc = netrc_obj.authenticators(proxy.host)
|
||||
if auth_from_netrc is not None:
|
||||
# auth_from_netrc is a (`user`, `account`, `password`) tuple,
|
||||
# `user` and `account` both can be username,
|
||||
# if `user` is None, use `account`
|
||||
*logins, password = auth_from_netrc
|
||||
login = logins[0] if logins[0] else logins[-1]
|
||||
auth = BasicAuth(cast(str, login), cast(str, password))
|
||||
ret[proto] = ProxyInfo(proxy, auth)
|
||||
return ret
|
||||
|
||||
|
||||
def current_task(
|
||||
loop: Optional[asyncio.AbstractEventLoop] = None,
|
||||
) -> "Optional[asyncio.Task[Any]]":
|
||||
if PY_37:
|
||||
return asyncio.current_task(loop=loop)
|
||||
else:
|
||||
return asyncio.Task.current_task(loop=loop)
|
||||
|
||||
|
||||
def get_running_loop(
|
||||
loop: Optional[asyncio.AbstractEventLoop] = None,
|
||||
) -> asyncio.AbstractEventLoop:
|
||||
if loop is None:
|
||||
loop = asyncio.get_event_loop()
|
||||
if not loop.is_running():
|
||||
warnings.warn(
|
||||
"The object should be created within an async function",
|
||||
DeprecationWarning,
|
||||
stacklevel=3,
|
||||
)
|
||||
if loop.get_debug():
|
||||
internal_logger.warning(
|
||||
"The object should be created within an async function", stack_info=True
|
||||
)
|
||||
return loop
|
||||
|
||||
|
||||
def isasyncgenfunction(obj: Any) -> bool:
|
||||
func = getattr(inspect, "isasyncgenfunction", None)
|
||||
if func is not None:
|
||||
return func(obj)
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
||||
class MimeType:
|
||||
type: str
|
||||
subtype: str
|
||||
suffix: str
|
||||
parameters: "MultiDictProxy[str]"
|
||||
|
||||
|
||||
@functools.lru_cache(maxsize=56)
|
||||
def parse_mimetype(mimetype: str) -> MimeType:
|
||||
"""Parses a MIME type into its components.
|
||||
|
||||
mimetype is a MIME type string.
|
||||
|
||||
Returns a MimeType object.
|
||||
|
||||
Example:
|
||||
|
||||
>>> parse_mimetype('text/html; charset=utf-8')
|
||||
MimeType(type='text', subtype='html', suffix='',
|
||||
parameters={'charset': 'utf-8'})
|
||||
|
||||
"""
|
||||
if not mimetype:
|
||||
return MimeType(
|
||||
type="", subtype="", suffix="", parameters=MultiDictProxy(MultiDict())
|
||||
)
|
||||
|
||||
parts = mimetype.split(";")
|
||||
params = MultiDict() # type: MultiDict[str]
|
||||
for item in parts[1:]:
|
||||
if not item:
|
||||
continue
|
||||
key, value = cast(
|
||||
Tuple[str, str], item.split("=", 1) if "=" in item else (item, "")
|
||||
)
|
||||
params.add(key.lower().strip(), value.strip(' "'))
|
||||
|
||||
fulltype = parts[0].strip().lower()
|
||||
if fulltype == "*":
|
||||
fulltype = "*/*"
|
||||
|
||||
mtype, stype = (
|
||||
cast(Tuple[str, str], fulltype.split("/", 1))
|
||||
if "/" in fulltype
|
||||
else (fulltype, "")
|
||||
)
|
||||
stype, suffix = (
|
||||
cast(Tuple[str, str], stype.split("+", 1)) if "+" in stype else (stype, "")
|
||||
)
|
||||
|
||||
return MimeType(
|
||||
type=mtype, subtype=stype, suffix=suffix, parameters=MultiDictProxy(params)
|
||||
)
|
||||
|
||||
|
||||
def guess_filename(obj: Any, default: Optional[str] = None) -> Optional[str]:
|
||||
name = getattr(obj, "name", None)
|
||||
if name and isinstance(name, str) and name[0] != "<" and name[-1] != ">":
|
||||
return Path(name).name
|
||||
return default
|
||||
|
||||
|
||||
def content_disposition_header(
|
||||
disptype: str, quote_fields: bool = True, **params: str
|
||||
) -> str:
|
||||
"""Sets ``Content-Disposition`` header.
|
||||
|
||||
disptype is a disposition type: inline, attachment, form-data.
|
||||
Should be valid extension token (see RFC 2183)
|
||||
|
||||
params is a dict with disposition params.
|
||||
"""
|
||||
if not disptype or not (TOKEN > set(disptype)):
|
||||
raise ValueError("bad content disposition type {!r}" "".format(disptype))
|
||||
|
||||
value = disptype
|
||||
if params:
|
||||
lparams = []
|
||||
for key, val in params.items():
|
||||
if not key or not (TOKEN > set(key)):
|
||||
raise ValueError(
|
||||
"bad content disposition parameter" " {!r}={!r}".format(key, val)
|
||||
)
|
||||
qval = quote(val, "") if quote_fields else val
|
||||
lparams.append((key, '"%s"' % qval))
|
||||
if key == "filename":
|
||||
lparams.append(("filename*", "utf-8''" + qval))
|
||||
sparams = "; ".join("=".join(pair) for pair in lparams)
|
||||
value = "; ".join((value, sparams))
|
||||
return value
|
||||
|
||||
|
||||
class _TSelf(Protocol):
|
||||
_cache: Dict[str, Any]
|
||||
|
||||
|
||||
class reify(Generic[_T]):
|
||||
"""Use as a class method decorator. It operates almost exactly like
|
||||
the Python `@property` decorator, but it puts the result of the
|
||||
method it decorates into the instance dict after the first call,
|
||||
effectively replacing the function it decorates with an instance
|
||||
variable. It is, in Python parlance, a data descriptor.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, wrapped: Callable[..., _T]) -> None:
|
||||
self.wrapped = wrapped
|
||||
self.__doc__ = wrapped.__doc__
|
||||
self.name = wrapped.__name__
|
||||
|
||||
def __get__(self, inst: _TSelf, owner: Optional[Type[Any]] = None) -> _T:
|
||||
try:
|
||||
try:
|
||||
return inst._cache[self.name]
|
||||
except KeyError:
|
||||
val = self.wrapped(inst)
|
||||
inst._cache[self.name] = val
|
||||
return val
|
||||
except AttributeError:
|
||||
if inst is None:
|
||||
return self
|
||||
raise
|
||||
|
||||
def __set__(self, inst: _TSelf, value: _T) -> None:
|
||||
raise AttributeError("reified property is read-only")
|
||||
|
||||
|
||||
reify_py = reify
|
||||
|
||||
try:
|
||||
from ._helpers import reify as reify_c
|
||||
|
||||
if not NO_EXTENSIONS:
|
||||
reify = reify_c # type: ignore
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
_ipv4_pattern = (
|
||||
r"^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}"
|
||||
r"(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$"
|
||||
)
|
||||
_ipv6_pattern = (
|
||||
r"^(?:(?:(?:[A-F0-9]{1,4}:){6}|(?=(?:[A-F0-9]{0,4}:){0,6}"
|
||||
r"(?:[0-9]{1,3}\.){3}[0-9]{1,3}$)(([0-9A-F]{1,4}:){0,5}|:)"
|
||||
r"((:[0-9A-F]{1,4}){1,5}:|:)|::(?:[A-F0-9]{1,4}:){5})"
|
||||
r"(?:(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])\.){3}"
|
||||
r"(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])|(?:[A-F0-9]{1,4}:){7}"
|
||||
r"[A-F0-9]{1,4}|(?=(?:[A-F0-9]{0,4}:){0,7}[A-F0-9]{0,4}$)"
|
||||
r"(([0-9A-F]{1,4}:){1,7}|:)((:[0-9A-F]{1,4}){1,7}|:)|(?:[A-F0-9]{1,4}:){7}"
|
||||
r":|:(:[A-F0-9]{1,4}){7})$"
|
||||
)
|
||||
_ipv4_regex = re.compile(_ipv4_pattern)
|
||||
_ipv6_regex = re.compile(_ipv6_pattern, flags=re.IGNORECASE)
|
||||
_ipv4_regexb = re.compile(_ipv4_pattern.encode("ascii"))
|
||||
_ipv6_regexb = re.compile(_ipv6_pattern.encode("ascii"), flags=re.IGNORECASE)
|
||||
|
||||
|
||||
def _is_ip_address(
|
||||
regex: Pattern[str], regexb: Pattern[bytes], host: Optional[Union[str, bytes]]
|
||||
) -> bool:
|
||||
if host is None:
|
||||
return False
|
||||
if isinstance(host, str):
|
||||
return bool(regex.match(host))
|
||||
elif isinstance(host, (bytes, bytearray, memoryview)):
|
||||
return bool(regexb.match(host))
|
||||
else:
|
||||
raise TypeError("{} [{}] is not a str or bytes".format(host, type(host)))
|
||||
|
||||
|
||||
is_ipv4_address = functools.partial(_is_ip_address, _ipv4_regex, _ipv4_regexb)
|
||||
is_ipv6_address = functools.partial(_is_ip_address, _ipv6_regex, _ipv6_regexb)
|
||||
|
||||
|
||||
def is_ip_address(host: Optional[Union[str, bytes, bytearray, memoryview]]) -> bool:
|
||||
return is_ipv4_address(host) or is_ipv6_address(host)
|
||||
|
||||
|
||||
def next_whole_second() -> datetime.datetime:
|
||||
"""Return current time rounded up to the next whole second."""
|
||||
return datetime.datetime.now(datetime.timezone.utc).replace(
|
||||
microsecond=0
|
||||
) + datetime.timedelta(seconds=0)
|
||||
|
||||
|
||||
_cached_current_datetime = None # type: Optional[int]
|
||||
_cached_formatted_datetime = ""
|
||||
|
||||
|
||||
def rfc822_formatted_time() -> str:
|
||||
global _cached_current_datetime
|
||||
global _cached_formatted_datetime
|
||||
|
||||
now = int(time.time())
|
||||
if now != _cached_current_datetime:
|
||||
# Weekday and month names for HTTP date/time formatting;
|
||||
# always English!
|
||||
# Tuples are constants stored in codeobject!
|
||||
_weekdayname = ("Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun")
|
||||
_monthname = (
|
||||
"", # Dummy so we can use 1-based month numbers
|
||||
"Jan",
|
||||
"Feb",
|
||||
"Mar",
|
||||
"Apr",
|
||||
"May",
|
||||
"Jun",
|
||||
"Jul",
|
||||
"Aug",
|
||||
"Sep",
|
||||
"Oct",
|
||||
"Nov",
|
||||
"Dec",
|
||||
)
|
||||
|
||||
year, month, day, hh, mm, ss, wd, *tail = time.gmtime(now)
|
||||
_cached_formatted_datetime = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % (
|
||||
_weekdayname[wd],
|
||||
day,
|
||||
_monthname[month],
|
||||
year,
|
||||
hh,
|
||||
mm,
|
||||
ss,
|
||||
)
|
||||
_cached_current_datetime = now
|
||||
return _cached_formatted_datetime
|
||||
|
||||
|
||||
def _weakref_handle(info): # type: ignore
|
||||
ref, name = info
|
||||
ob = ref()
|
||||
if ob is not None:
|
||||
with suppress(Exception):
|
||||
getattr(ob, name)()
|
||||
|
||||
|
||||
def weakref_handle(ob, name, timeout, loop): # type: ignore
|
||||
if timeout is not None and timeout > 0:
|
||||
when = loop.time() + timeout
|
||||
if timeout >= 5:
|
||||
when = ceil(when)
|
||||
|
||||
return loop.call_at(when, _weakref_handle, (weakref.ref(ob), name))
|
||||
|
||||
|
||||
def call_later(cb, timeout, loop): # type: ignore
|
||||
if timeout is not None and timeout > 0:
|
||||
when = loop.time() + timeout
|
||||
if timeout > 5:
|
||||
when = ceil(when)
|
||||
return loop.call_at(when, cb)
|
||||
|
||||
|
||||
class TimeoutHandle:
|
||||
""" Timeout handle """
|
||||
|
||||
def __init__(
|
||||
self, loop: asyncio.AbstractEventLoop, timeout: Optional[float]
|
||||
) -> None:
|
||||
self._timeout = timeout
|
||||
self._loop = loop
|
||||
self._callbacks = (
|
||||
[]
|
||||
) # type: List[Tuple[Callable[..., None], Tuple[Any, ...], Dict[str, Any]]]
|
||||
|
||||
def register(
|
||||
self, callback: Callable[..., None], *args: Any, **kwargs: Any
|
||||
) -> None:
|
||||
self._callbacks.append((callback, args, kwargs))
|
||||
|
||||
def close(self) -> None:
|
||||
self._callbacks.clear()
|
||||
|
||||
def start(self) -> Optional[asyncio.Handle]:
|
||||
timeout = self._timeout
|
||||
if timeout is not None and timeout > 0:
|
||||
when = self._loop.time() + timeout
|
||||
if timeout >= 5:
|
||||
when = ceil(when)
|
||||
return self._loop.call_at(when, self.__call__)
|
||||
else:
|
||||
return None
|
||||
|
||||
def timer(self) -> "BaseTimerContext":
|
||||
if self._timeout is not None and self._timeout > 0:
|
||||
timer = TimerContext(self._loop)
|
||||
self.register(timer.timeout)
|
||||
return timer
|
||||
else:
|
||||
return TimerNoop()
|
||||
|
||||
def __call__(self) -> None:
|
||||
for cb, args, kwargs in self._callbacks:
|
||||
with suppress(Exception):
|
||||
cb(*args, **kwargs)
|
||||
|
||||
self._callbacks.clear()
|
||||
|
||||
|
||||
class BaseTimerContext(ContextManager["BaseTimerContext"]):
|
||||
pass
|
||||
|
||||
|
||||
class TimerNoop(BaseTimerContext):
|
||||
def __enter__(self) -> BaseTimerContext:
|
||||
return self
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: Optional[Type[BaseException]],
|
||||
exc_val: Optional[BaseException],
|
||||
exc_tb: Optional[TracebackType],
|
||||
) -> None:
|
||||
return
|
||||
|
||||
|
||||
class TimerContext(BaseTimerContext):
|
||||
""" Low resolution timeout context manager """
|
||||
|
||||
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
|
||||
self._loop = loop
|
||||
self._tasks = [] # type: List[asyncio.Task[Any]]
|
||||
self._cancelled = False
|
||||
|
||||
def __enter__(self) -> BaseTimerContext:
|
||||
task = current_task(loop=self._loop)
|
||||
|
||||
if task is None:
|
||||
raise RuntimeError(
|
||||
"Timeout context manager should be used " "inside a task"
|
||||
)
|
||||
|
||||
if self._cancelled:
|
||||
task.cancel()
|
||||
raise asyncio.TimeoutError from None
|
||||
|
||||
self._tasks.append(task)
|
||||
return self
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: Optional[Type[BaseException]],
|
||||
exc_val: Optional[BaseException],
|
||||
exc_tb: Optional[TracebackType],
|
||||
) -> Optional[bool]:
|
||||
if self._tasks:
|
||||
self._tasks.pop()
|
||||
|
||||
if exc_type is asyncio.CancelledError and self._cancelled:
|
||||
raise asyncio.TimeoutError from None
|
||||
return None
|
||||
|
||||
def timeout(self) -> None:
|
||||
if not self._cancelled:
|
||||
for task in set(self._tasks):
|
||||
task.cancel()
|
||||
|
||||
self._cancelled = True
|
||||
|
||||
|
||||
class CeilTimeout(async_timeout.timeout):
|
||||
def __enter__(self) -> async_timeout.timeout:
|
||||
if self._timeout is not None:
|
||||
self._task = current_task(loop=self._loop)
|
||||
if self._task is None:
|
||||
raise RuntimeError(
|
||||
"Timeout context manager should be used inside a task"
|
||||
)
|
||||
now = self._loop.time()
|
||||
delay = self._timeout
|
||||
when = now + delay
|
||||
if delay > 5:
|
||||
when = ceil(when)
|
||||
self._cancel_handler = self._loop.call_at(when, self._cancel_task)
|
||||
return self
|
||||
|
||||
|
||||
class HeadersMixin:
|
||||
|
||||
ATTRS = frozenset(["_content_type", "_content_dict", "_stored_content_type"])
|
||||
|
||||
_content_type = None # type: Optional[str]
|
||||
_content_dict = None # type: Optional[Dict[str, str]]
|
||||
_stored_content_type = sentinel
|
||||
|
||||
def _parse_content_type(self, raw: str) -> None:
|
||||
self._stored_content_type = raw
|
||||
if raw is None:
|
||||
# default value according to RFC 2616
|
||||
self._content_type = "application/octet-stream"
|
||||
self._content_dict = {}
|
||||
else:
|
||||
self._content_type, self._content_dict = cgi.parse_header(raw)
|
||||
|
||||
@property
|
||||
def content_type(self) -> str:
|
||||
"""The value of content part for Content-Type HTTP header."""
|
||||
raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore
|
||||
if self._stored_content_type != raw:
|
||||
self._parse_content_type(raw)
|
||||
return self._content_type # type: ignore
|
||||
|
||||
@property
|
||||
def charset(self) -> Optional[str]:
|
||||
"""The value of charset part for Content-Type HTTP header."""
|
||||
raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore
|
||||
if self._stored_content_type != raw:
|
||||
self._parse_content_type(raw)
|
||||
return self._content_dict.get("charset") # type: ignore
|
||||
|
||||
@property
|
||||
def content_length(self) -> Optional[int]:
|
||||
"""The value of Content-Length HTTP header."""
|
||||
content_length = self._headers.get(hdrs.CONTENT_LENGTH) # type: ignore
|
||||
|
||||
if content_length is not None:
|
||||
return int(content_length)
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def set_result(fut: "asyncio.Future[_T]", result: _T) -> None:
|
||||
if not fut.done():
|
||||
fut.set_result(result)
|
||||
|
||||
|
||||
def set_exception(fut: "asyncio.Future[_T]", exc: BaseException) -> None:
|
||||
if not fut.done():
|
||||
fut.set_exception(exc)
|
||||
|
||||
|
||||
class ChainMapProxy(Mapping[str, Any]):
|
||||
__slots__ = ("_maps",)
|
||||
|
||||
def __init__(self, maps: Iterable[Mapping[str, Any]]) -> None:
|
||||
self._maps = tuple(maps)
|
||||
|
||||
def __init_subclass__(cls) -> None:
|
||||
raise TypeError(
|
||||
"Inheritance class {} from ChainMapProxy "
|
||||
"is forbidden".format(cls.__name__)
|
||||
)
|
||||
|
||||
def __getitem__(self, key: str) -> Any:
|
||||
for mapping in self._maps:
|
||||
try:
|
||||
return mapping[key]
|
||||
except KeyError:
|
||||
pass
|
||||
raise KeyError(key)
|
||||
|
||||
def get(self, key: str, default: Any = None) -> Any:
|
||||
return self[key] if key in self else default
|
||||
|
||||
def __len__(self) -> int:
|
||||
# reuses stored hash values if possible
|
||||
return len(set().union(*self._maps)) # type: ignore
|
||||
|
||||
def __iter__(self) -> Iterator[str]:
|
||||
d = {} # type: Dict[str, Any]
|
||||
for mapping in reversed(self._maps):
|
||||
# reuses stored hash values if possible
|
||||
d.update(mapping)
|
||||
return iter(d)
|
||||
|
||||
def __contains__(self, key: object) -> bool:
|
||||
return any(key in m for m in self._maps)
|
||||
|
||||
def __bool__(self) -> bool:
|
||||
return any(self._maps)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
content = ", ".join(map(repr, self._maps))
|
||||
return f"ChainMapProxy({content})"
|
||||
72
dist/ba_data/python-site-packages/aiohttp/http.py
vendored
Normal file
72
dist/ba_data/python-site-packages/aiohttp/http.py
vendored
Normal file
|
|
@ -0,0 +1,72 @@
|
|||
import http.server
|
||||
import sys
|
||||
from typing import Mapping, Tuple
|
||||
|
||||
from . import __version__
|
||||
from .http_exceptions import HttpProcessingError as HttpProcessingError
|
||||
from .http_parser import (
|
||||
HeadersParser as HeadersParser,
|
||||
HttpParser as HttpParser,
|
||||
HttpRequestParser as HttpRequestParser,
|
||||
HttpResponseParser as HttpResponseParser,
|
||||
RawRequestMessage as RawRequestMessage,
|
||||
RawResponseMessage as RawResponseMessage,
|
||||
)
|
||||
from .http_websocket import (
|
||||
WS_CLOSED_MESSAGE as WS_CLOSED_MESSAGE,
|
||||
WS_CLOSING_MESSAGE as WS_CLOSING_MESSAGE,
|
||||
WS_KEY as WS_KEY,
|
||||
WebSocketError as WebSocketError,
|
||||
WebSocketReader as WebSocketReader,
|
||||
WebSocketWriter as WebSocketWriter,
|
||||
WSCloseCode as WSCloseCode,
|
||||
WSMessage as WSMessage,
|
||||
WSMsgType as WSMsgType,
|
||||
ws_ext_gen as ws_ext_gen,
|
||||
ws_ext_parse as ws_ext_parse,
|
||||
)
|
||||
from .http_writer import (
|
||||
HttpVersion as HttpVersion,
|
||||
HttpVersion10 as HttpVersion10,
|
||||
HttpVersion11 as HttpVersion11,
|
||||
StreamWriter as StreamWriter,
|
||||
)
|
||||
|
||||
__all__ = (
|
||||
"HttpProcessingError",
|
||||
"RESPONSES",
|
||||
"SERVER_SOFTWARE",
|
||||
# .http_writer
|
||||
"StreamWriter",
|
||||
"HttpVersion",
|
||||
"HttpVersion10",
|
||||
"HttpVersion11",
|
||||
# .http_parser
|
||||
"HeadersParser",
|
||||
"HttpParser",
|
||||
"HttpRequestParser",
|
||||
"HttpResponseParser",
|
||||
"RawRequestMessage",
|
||||
"RawResponseMessage",
|
||||
# .http_websocket
|
||||
"WS_CLOSED_MESSAGE",
|
||||
"WS_CLOSING_MESSAGE",
|
||||
"WS_KEY",
|
||||
"WebSocketReader",
|
||||
"WebSocketWriter",
|
||||
"ws_ext_gen",
|
||||
"ws_ext_parse",
|
||||
"WSMessage",
|
||||
"WebSocketError",
|
||||
"WSMsgType",
|
||||
"WSCloseCode",
|
||||
)
|
||||
|
||||
|
||||
SERVER_SOFTWARE = "Python/{0[0]}.{0[1]} aiohttp/{1}".format(
|
||||
sys.version_info, __version__
|
||||
) # type: str
|
||||
|
||||
RESPONSES = (
|
||||
http.server.BaseHTTPRequestHandler.responses
|
||||
) # type: Mapping[int, Tuple[str, str]]
|
||||
105
dist/ba_data/python-site-packages/aiohttp/http_exceptions.py
vendored
Normal file
105
dist/ba_data/python-site-packages/aiohttp/http_exceptions.py
vendored
Normal file
|
|
@ -0,0 +1,105 @@
|
|||
"""Low-level http related exceptions."""
|
||||
|
||||
|
||||
from typing import Optional, Union
|
||||
|
||||
from .typedefs import _CIMultiDict
|
||||
|
||||
__all__ = ("HttpProcessingError",)
|
||||
|
||||
|
||||
class HttpProcessingError(Exception):
|
||||
"""HTTP error.
|
||||
|
||||
Shortcut for raising HTTP errors with custom code, message and headers.
|
||||
|
||||
code: HTTP Error code.
|
||||
message: (optional) Error message.
|
||||
headers: (optional) Headers to be sent in response, a list of pairs
|
||||
"""
|
||||
|
||||
code = 0
|
||||
message = ""
|
||||
headers = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
code: Optional[int] = None,
|
||||
message: str = "",
|
||||
headers: Optional[_CIMultiDict] = None,
|
||||
) -> None:
|
||||
if code is not None:
|
||||
self.code = code
|
||||
self.headers = headers
|
||||
self.message = message
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"{self.code}, message={self.message!r}"
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<{self.__class__.__name__}: {self}>"
|
||||
|
||||
|
||||
class BadHttpMessage(HttpProcessingError):
|
||||
|
||||
code = 400
|
||||
message = "Bad Request"
|
||||
|
||||
def __init__(self, message: str, *, headers: Optional[_CIMultiDict] = None) -> None:
|
||||
super().__init__(message=message, headers=headers)
|
||||
self.args = (message,)
|
||||
|
||||
|
||||
class HttpBadRequest(BadHttpMessage):
|
||||
|
||||
code = 400
|
||||
message = "Bad Request"
|
||||
|
||||
|
||||
class PayloadEncodingError(BadHttpMessage):
|
||||
"""Base class for payload errors"""
|
||||
|
||||
|
||||
class ContentEncodingError(PayloadEncodingError):
|
||||
"""Content encoding error."""
|
||||
|
||||
|
||||
class TransferEncodingError(PayloadEncodingError):
|
||||
"""transfer encoding error."""
|
||||
|
||||
|
||||
class ContentLengthError(PayloadEncodingError):
|
||||
"""Not enough data for satisfy content length header."""
|
||||
|
||||
|
||||
class LineTooLong(BadHttpMessage):
|
||||
def __init__(
|
||||
self, line: str, limit: str = "Unknown", actual_size: str = "Unknown"
|
||||
) -> None:
|
||||
super().__init__(
|
||||
f"Got more than {limit} bytes ({actual_size}) when reading {line}."
|
||||
)
|
||||
self.args = (line, limit, actual_size)
|
||||
|
||||
|
||||
class InvalidHeader(BadHttpMessage):
|
||||
def __init__(self, hdr: Union[bytes, str]) -> None:
|
||||
if isinstance(hdr, bytes):
|
||||
hdr = hdr.decode("utf-8", "surrogateescape")
|
||||
super().__init__(f"Invalid HTTP Header: {hdr}")
|
||||
self.hdr = hdr
|
||||
self.args = (hdr,)
|
||||
|
||||
|
||||
class BadStatusLine(BadHttpMessage):
|
||||
def __init__(self, line: str = "") -> None:
|
||||
if not isinstance(line, str):
|
||||
line = repr(line)
|
||||
super().__init__(f"Bad status line {line!r}")
|
||||
self.args = (line,)
|
||||
self.line = line
|
||||
|
||||
|
||||
class InvalidURLError(BadHttpMessage):
|
||||
pass
|
||||
901
dist/ba_data/python-site-packages/aiohttp/http_parser.py
vendored
Normal file
901
dist/ba_data/python-site-packages/aiohttp/http_parser.py
vendored
Normal file
|
|
@ -0,0 +1,901 @@
|
|||
import abc
|
||||
import asyncio
|
||||
import collections
|
||||
import re
|
||||
import string
|
||||
import zlib
|
||||
from enum import IntEnum
|
||||
from typing import Any, List, Optional, Tuple, Type, Union
|
||||
|
||||
from multidict import CIMultiDict, CIMultiDictProxy, istr
|
||||
from yarl import URL
|
||||
|
||||
from . import hdrs
|
||||
from .base_protocol import BaseProtocol
|
||||
from .helpers import NO_EXTENSIONS, BaseTimerContext
|
||||
from .http_exceptions import (
|
||||
BadStatusLine,
|
||||
ContentEncodingError,
|
||||
ContentLengthError,
|
||||
InvalidHeader,
|
||||
LineTooLong,
|
||||
TransferEncodingError,
|
||||
)
|
||||
from .http_writer import HttpVersion, HttpVersion10
|
||||
from .log import internal_logger
|
||||
from .streams import EMPTY_PAYLOAD, StreamReader
|
||||
from .typedefs import RawHeaders
|
||||
|
||||
try:
|
||||
import brotli
|
||||
|
||||
HAS_BROTLI = True
|
||||
except ImportError: # pragma: no cover
|
||||
HAS_BROTLI = False
|
||||
|
||||
|
||||
__all__ = (
|
||||
"HeadersParser",
|
||||
"HttpParser",
|
||||
"HttpRequestParser",
|
||||
"HttpResponseParser",
|
||||
"RawRequestMessage",
|
||||
"RawResponseMessage",
|
||||
)
|
||||
|
||||
ASCIISET = set(string.printable)
|
||||
|
||||
# See https://tools.ietf.org/html/rfc7230#section-3.1.1
|
||||
# and https://tools.ietf.org/html/rfc7230#appendix-B
|
||||
#
|
||||
# method = token
|
||||
# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" / "+" / "-" / "." /
|
||||
# "^" / "_" / "`" / "|" / "~" / DIGIT / ALPHA
|
||||
# token = 1*tchar
|
||||
METHRE = re.compile(r"[!#$%&'*+\-.^_`|~0-9A-Za-z]+")
|
||||
VERSRE = re.compile(r"HTTP/(\d+).(\d+)")
|
||||
HDRRE = re.compile(rb"[\x00-\x1F\x7F()<>@,;:\[\]={} \t\\\\\"]")
|
||||
|
||||
RawRequestMessage = collections.namedtuple(
|
||||
"RawRequestMessage",
|
||||
[
|
||||
"method",
|
||||
"path",
|
||||
"version",
|
||||
"headers",
|
||||
"raw_headers",
|
||||
"should_close",
|
||||
"compression",
|
||||
"upgrade",
|
||||
"chunked",
|
||||
"url",
|
||||
],
|
||||
)
|
||||
|
||||
RawResponseMessage = collections.namedtuple(
|
||||
"RawResponseMessage",
|
||||
[
|
||||
"version",
|
||||
"code",
|
||||
"reason",
|
||||
"headers",
|
||||
"raw_headers",
|
||||
"should_close",
|
||||
"compression",
|
||||
"upgrade",
|
||||
"chunked",
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
class ParseState(IntEnum):
|
||||
|
||||
PARSE_NONE = 0
|
||||
PARSE_LENGTH = 1
|
||||
PARSE_CHUNKED = 2
|
||||
PARSE_UNTIL_EOF = 3
|
||||
|
||||
|
||||
class ChunkState(IntEnum):
|
||||
PARSE_CHUNKED_SIZE = 0
|
||||
PARSE_CHUNKED_CHUNK = 1
|
||||
PARSE_CHUNKED_CHUNK_EOF = 2
|
||||
PARSE_MAYBE_TRAILERS = 3
|
||||
PARSE_TRAILERS = 4
|
||||
|
||||
|
||||
class HeadersParser:
|
||||
def __init__(
|
||||
self,
|
||||
max_line_size: int = 8190,
|
||||
max_headers: int = 32768,
|
||||
max_field_size: int = 8190,
|
||||
) -> None:
|
||||
self.max_line_size = max_line_size
|
||||
self.max_headers = max_headers
|
||||
self.max_field_size = max_field_size
|
||||
|
||||
def parse_headers(
|
||||
self, lines: List[bytes]
|
||||
) -> Tuple["CIMultiDictProxy[str]", RawHeaders]:
|
||||
headers = CIMultiDict() # type: CIMultiDict[str]
|
||||
raw_headers = []
|
||||
|
||||
lines_idx = 1
|
||||
line = lines[1]
|
||||
line_count = len(lines)
|
||||
|
||||
while line:
|
||||
# Parse initial header name : value pair.
|
||||
try:
|
||||
bname, bvalue = line.split(b":", 1)
|
||||
except ValueError:
|
||||
raise InvalidHeader(line) from None
|
||||
|
||||
bname = bname.strip(b" \t")
|
||||
bvalue = bvalue.lstrip()
|
||||
if HDRRE.search(bname):
|
||||
raise InvalidHeader(bname)
|
||||
if len(bname) > self.max_field_size:
|
||||
raise LineTooLong(
|
||||
"request header name {}".format(
|
||||
bname.decode("utf8", "xmlcharrefreplace")
|
||||
),
|
||||
str(self.max_field_size),
|
||||
str(len(bname)),
|
||||
)
|
||||
|
||||
header_length = len(bvalue)
|
||||
|
||||
# next line
|
||||
lines_idx += 1
|
||||
line = lines[lines_idx]
|
||||
|
||||
# consume continuation lines
|
||||
continuation = line and line[0] in (32, 9) # (' ', '\t')
|
||||
|
||||
if continuation:
|
||||
bvalue_lst = [bvalue]
|
||||
while continuation:
|
||||
header_length += len(line)
|
||||
if header_length > self.max_field_size:
|
||||
raise LineTooLong(
|
||||
"request header field {}".format(
|
||||
bname.decode("utf8", "xmlcharrefreplace")
|
||||
),
|
||||
str(self.max_field_size),
|
||||
str(header_length),
|
||||
)
|
||||
bvalue_lst.append(line)
|
||||
|
||||
# next line
|
||||
lines_idx += 1
|
||||
if lines_idx < line_count:
|
||||
line = lines[lines_idx]
|
||||
if line:
|
||||
continuation = line[0] in (32, 9) # (' ', '\t')
|
||||
else:
|
||||
line = b""
|
||||
break
|
||||
bvalue = b"".join(bvalue_lst)
|
||||
else:
|
||||
if header_length > self.max_field_size:
|
||||
raise LineTooLong(
|
||||
"request header field {}".format(
|
||||
bname.decode("utf8", "xmlcharrefreplace")
|
||||
),
|
||||
str(self.max_field_size),
|
||||
str(header_length),
|
||||
)
|
||||
|
||||
bvalue = bvalue.strip()
|
||||
name = bname.decode("utf-8", "surrogateescape")
|
||||
value = bvalue.decode("utf-8", "surrogateescape")
|
||||
|
||||
headers.add(name, value)
|
||||
raw_headers.append((bname, bvalue))
|
||||
|
||||
return (CIMultiDictProxy(headers), tuple(raw_headers))
|
||||
|
||||
|
||||
class HttpParser(abc.ABC):
|
||||
def __init__(
|
||||
self,
|
||||
protocol: Optional[BaseProtocol] = None,
|
||||
loop: Optional[asyncio.AbstractEventLoop] = None,
|
||||
limit: int = 2 ** 16,
|
||||
max_line_size: int = 8190,
|
||||
max_headers: int = 32768,
|
||||
max_field_size: int = 8190,
|
||||
timer: Optional[BaseTimerContext] = None,
|
||||
code: Optional[int] = None,
|
||||
method: Optional[str] = None,
|
||||
readall: bool = False,
|
||||
payload_exception: Optional[Type[BaseException]] = None,
|
||||
response_with_body: bool = True,
|
||||
read_until_eof: bool = False,
|
||||
auto_decompress: bool = True,
|
||||
) -> None:
|
||||
self.protocol = protocol
|
||||
self.loop = loop
|
||||
self.max_line_size = max_line_size
|
||||
self.max_headers = max_headers
|
||||
self.max_field_size = max_field_size
|
||||
self.timer = timer
|
||||
self.code = code
|
||||
self.method = method
|
||||
self.readall = readall
|
||||
self.payload_exception = payload_exception
|
||||
self.response_with_body = response_with_body
|
||||
self.read_until_eof = read_until_eof
|
||||
|
||||
self._lines = [] # type: List[bytes]
|
||||
self._tail = b""
|
||||
self._upgraded = False
|
||||
self._payload = None
|
||||
self._payload_parser = None # type: Optional[HttpPayloadParser]
|
||||
self._auto_decompress = auto_decompress
|
||||
self._limit = limit
|
||||
self._headers_parser = HeadersParser(max_line_size, max_headers, max_field_size)
|
||||
|
||||
@abc.abstractmethod
|
||||
def parse_message(self, lines: List[bytes]) -> Any:
|
||||
pass
|
||||
|
||||
def feed_eof(self) -> Any:
|
||||
if self._payload_parser is not None:
|
||||
self._payload_parser.feed_eof()
|
||||
self._payload_parser = None
|
||||
else:
|
||||
# try to extract partial message
|
||||
if self._tail:
|
||||
self._lines.append(self._tail)
|
||||
|
||||
if self._lines:
|
||||
if self._lines[-1] != "\r\n":
|
||||
self._lines.append(b"")
|
||||
try:
|
||||
return self.parse_message(self._lines)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def feed_data(
|
||||
self,
|
||||
data: bytes,
|
||||
SEP: bytes = b"\r\n",
|
||||
EMPTY: bytes = b"",
|
||||
CONTENT_LENGTH: istr = hdrs.CONTENT_LENGTH,
|
||||
METH_CONNECT: str = hdrs.METH_CONNECT,
|
||||
SEC_WEBSOCKET_KEY1: istr = hdrs.SEC_WEBSOCKET_KEY1,
|
||||
) -> Tuple[List[Any], bool, bytes]:
|
||||
|
||||
messages = []
|
||||
|
||||
if self._tail:
|
||||
data, self._tail = self._tail + data, b""
|
||||
|
||||
data_len = len(data)
|
||||
start_pos = 0
|
||||
loop = self.loop
|
||||
|
||||
while start_pos < data_len:
|
||||
|
||||
# read HTTP message (request/response line + headers), \r\n\r\n
|
||||
# and split by lines
|
||||
if self._payload_parser is None and not self._upgraded:
|
||||
pos = data.find(SEP, start_pos)
|
||||
# consume \r\n
|
||||
if pos == start_pos and not self._lines:
|
||||
start_pos = pos + 2
|
||||
continue
|
||||
|
||||
if pos >= start_pos:
|
||||
# line found
|
||||
self._lines.append(data[start_pos:pos])
|
||||
start_pos = pos + 2
|
||||
|
||||
# \r\n\r\n found
|
||||
if self._lines[-1] == EMPTY:
|
||||
try:
|
||||
msg = self.parse_message(self._lines)
|
||||
finally:
|
||||
self._lines.clear()
|
||||
|
||||
# payload length
|
||||
length = msg.headers.get(CONTENT_LENGTH)
|
||||
if length is not None:
|
||||
try:
|
||||
length = int(length)
|
||||
except ValueError:
|
||||
raise InvalidHeader(CONTENT_LENGTH)
|
||||
if length < 0:
|
||||
raise InvalidHeader(CONTENT_LENGTH)
|
||||
|
||||
# do not support old websocket spec
|
||||
if SEC_WEBSOCKET_KEY1 in msg.headers:
|
||||
raise InvalidHeader(SEC_WEBSOCKET_KEY1)
|
||||
|
||||
self._upgraded = msg.upgrade
|
||||
|
||||
method = getattr(msg, "method", self.method)
|
||||
|
||||
assert self.protocol is not None
|
||||
# calculate payload
|
||||
if (
|
||||
(length is not None and length > 0)
|
||||
or msg.chunked
|
||||
and not msg.upgrade
|
||||
):
|
||||
payload = StreamReader(
|
||||
self.protocol,
|
||||
timer=self.timer,
|
||||
loop=loop,
|
||||
limit=self._limit,
|
||||
)
|
||||
payload_parser = HttpPayloadParser(
|
||||
payload,
|
||||
length=length,
|
||||
chunked=msg.chunked,
|
||||
method=method,
|
||||
compression=msg.compression,
|
||||
code=self.code,
|
||||
readall=self.readall,
|
||||
response_with_body=self.response_with_body,
|
||||
auto_decompress=self._auto_decompress,
|
||||
)
|
||||
if not payload_parser.done:
|
||||
self._payload_parser = payload_parser
|
||||
elif method == METH_CONNECT:
|
||||
payload = StreamReader(
|
||||
self.protocol,
|
||||
timer=self.timer,
|
||||
loop=loop,
|
||||
limit=self._limit,
|
||||
)
|
||||
self._upgraded = True
|
||||
self._payload_parser = HttpPayloadParser(
|
||||
payload,
|
||||
method=msg.method,
|
||||
compression=msg.compression,
|
||||
readall=True,
|
||||
auto_decompress=self._auto_decompress,
|
||||
)
|
||||
else:
|
||||
if (
|
||||
getattr(msg, "code", 100) >= 199
|
||||
and length is None
|
||||
and self.read_until_eof
|
||||
):
|
||||
payload = StreamReader(
|
||||
self.protocol,
|
||||
timer=self.timer,
|
||||
loop=loop,
|
||||
limit=self._limit,
|
||||
)
|
||||
payload_parser = HttpPayloadParser(
|
||||
payload,
|
||||
length=length,
|
||||
chunked=msg.chunked,
|
||||
method=method,
|
||||
compression=msg.compression,
|
||||
code=self.code,
|
||||
readall=True,
|
||||
response_with_body=self.response_with_body,
|
||||
auto_decompress=self._auto_decompress,
|
||||
)
|
||||
if not payload_parser.done:
|
||||
self._payload_parser = payload_parser
|
||||
else:
|
||||
payload = EMPTY_PAYLOAD # type: ignore
|
||||
|
||||
messages.append((msg, payload))
|
||||
else:
|
||||
self._tail = data[start_pos:]
|
||||
data = EMPTY
|
||||
break
|
||||
|
||||
# no parser, just store
|
||||
elif self._payload_parser is None and self._upgraded:
|
||||
assert not self._lines
|
||||
break
|
||||
|
||||
# feed payload
|
||||
elif data and start_pos < data_len:
|
||||
assert not self._lines
|
||||
assert self._payload_parser is not None
|
||||
try:
|
||||
eof, data = self._payload_parser.feed_data(data[start_pos:])
|
||||
except BaseException as exc:
|
||||
if self.payload_exception is not None:
|
||||
self._payload_parser.payload.set_exception(
|
||||
self.payload_exception(str(exc))
|
||||
)
|
||||
else:
|
||||
self._payload_parser.payload.set_exception(exc)
|
||||
|
||||
eof = True
|
||||
data = b""
|
||||
|
||||
if eof:
|
||||
start_pos = 0
|
||||
data_len = len(data)
|
||||
self._payload_parser = None
|
||||
continue
|
||||
else:
|
||||
break
|
||||
|
||||
if data and start_pos < data_len:
|
||||
data = data[start_pos:]
|
||||
else:
|
||||
data = EMPTY
|
||||
|
||||
return messages, self._upgraded, data
|
||||
|
||||
def parse_headers(
|
||||
self, lines: List[bytes]
|
||||
) -> Tuple[
|
||||
"CIMultiDictProxy[str]", RawHeaders, Optional[bool], Optional[str], bool, bool
|
||||
]:
|
||||
"""Parses RFC 5322 headers from a stream.
|
||||
|
||||
Line continuations are supported. Returns list of header name
|
||||
and value pairs. Header name is in upper case.
|
||||
"""
|
||||
headers, raw_headers = self._headers_parser.parse_headers(lines)
|
||||
close_conn = None
|
||||
encoding = None
|
||||
upgrade = False
|
||||
chunked = False
|
||||
|
||||
# keep-alive
|
||||
conn = headers.get(hdrs.CONNECTION)
|
||||
if conn:
|
||||
v = conn.lower()
|
||||
if v == "close":
|
||||
close_conn = True
|
||||
elif v == "keep-alive":
|
||||
close_conn = False
|
||||
elif v == "upgrade":
|
||||
upgrade = True
|
||||
|
||||
# encoding
|
||||
enc = headers.get(hdrs.CONTENT_ENCODING)
|
||||
if enc:
|
||||
enc = enc.lower()
|
||||
if enc in ("gzip", "deflate", "br"):
|
||||
encoding = enc
|
||||
|
||||
# chunking
|
||||
te = headers.get(hdrs.TRANSFER_ENCODING)
|
||||
if te and "chunked" in te.lower():
|
||||
chunked = True
|
||||
|
||||
return (headers, raw_headers, close_conn, encoding, upgrade, chunked)
|
||||
|
||||
def set_upgraded(self, val: bool) -> None:
|
||||
"""Set connection upgraded (to websocket) mode.
|
||||
:param bool val: new state.
|
||||
"""
|
||||
self._upgraded = val
|
||||
|
||||
|
||||
class HttpRequestParser(HttpParser):
|
||||
"""Read request status line. Exception .http_exceptions.BadStatusLine
|
||||
could be raised in case of any errors in status line.
|
||||
Returns RawRequestMessage.
|
||||
"""
|
||||
|
||||
def parse_message(self, lines: List[bytes]) -> Any:
|
||||
# request line
|
||||
line = lines[0].decode("utf-8", "surrogateescape")
|
||||
try:
|
||||
method, path, version = line.split(None, 2)
|
||||
except ValueError:
|
||||
raise BadStatusLine(line) from None
|
||||
|
||||
if len(path) > self.max_line_size:
|
||||
raise LineTooLong(
|
||||
"Status line is too long", str(self.max_line_size), str(len(path))
|
||||
)
|
||||
|
||||
path_part, _hash_separator, url_fragment = path.partition("#")
|
||||
path_part, _question_mark_separator, qs_part = path_part.partition("?")
|
||||
|
||||
# method
|
||||
if not METHRE.match(method):
|
||||
raise BadStatusLine(method)
|
||||
|
||||
# version
|
||||
try:
|
||||
if version.startswith("HTTP/"):
|
||||
n1, n2 = version[5:].split(".", 1)
|
||||
version_o = HttpVersion(int(n1), int(n2))
|
||||
else:
|
||||
raise BadStatusLine(version)
|
||||
except Exception:
|
||||
raise BadStatusLine(version)
|
||||
|
||||
# read headers
|
||||
(
|
||||
headers,
|
||||
raw_headers,
|
||||
close,
|
||||
compression,
|
||||
upgrade,
|
||||
chunked,
|
||||
) = self.parse_headers(lines)
|
||||
|
||||
if close is None: # then the headers weren't set in the request
|
||||
if version_o <= HttpVersion10: # HTTP 1.0 must asks to not close
|
||||
close = True
|
||||
else: # HTTP 1.1 must ask to close.
|
||||
close = False
|
||||
|
||||
return RawRequestMessage(
|
||||
method,
|
||||
path,
|
||||
version_o,
|
||||
headers,
|
||||
raw_headers,
|
||||
close,
|
||||
compression,
|
||||
upgrade,
|
||||
chunked,
|
||||
# NOTE: `yarl.URL.build()` is used to mimic what the Cython-based
|
||||
# NOTE: parser does, otherwise it results into the same
|
||||
# NOTE: HTTP Request-Line input producing different
|
||||
# NOTE: `yarl.URL()` objects
|
||||
URL.build(
|
||||
path=path_part,
|
||||
query_string=qs_part,
|
||||
fragment=url_fragment,
|
||||
encoded=True,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class HttpResponseParser(HttpParser):
|
||||
"""Read response status line and headers.
|
||||
|
||||
BadStatusLine could be raised in case of any errors in status line.
|
||||
Returns RawResponseMessage"""
|
||||
|
||||
def parse_message(self, lines: List[bytes]) -> Any:
|
||||
line = lines[0].decode("utf-8", "surrogateescape")
|
||||
try:
|
||||
version, status = line.split(None, 1)
|
||||
except ValueError:
|
||||
raise BadStatusLine(line) from None
|
||||
|
||||
try:
|
||||
status, reason = status.split(None, 1)
|
||||
except ValueError:
|
||||
reason = ""
|
||||
|
||||
if len(reason) > self.max_line_size:
|
||||
raise LineTooLong(
|
||||
"Status line is too long", str(self.max_line_size), str(len(reason))
|
||||
)
|
||||
|
||||
# version
|
||||
match = VERSRE.match(version)
|
||||
if match is None:
|
||||
raise BadStatusLine(line)
|
||||
version_o = HttpVersion(int(match.group(1)), int(match.group(2)))
|
||||
|
||||
# The status code is a three-digit number
|
||||
try:
|
||||
status_i = int(status)
|
||||
except ValueError:
|
||||
raise BadStatusLine(line) from None
|
||||
|
||||
if status_i > 999:
|
||||
raise BadStatusLine(line)
|
||||
|
||||
# read headers
|
||||
(
|
||||
headers,
|
||||
raw_headers,
|
||||
close,
|
||||
compression,
|
||||
upgrade,
|
||||
chunked,
|
||||
) = self.parse_headers(lines)
|
||||
|
||||
if close is None:
|
||||
close = version_o <= HttpVersion10
|
||||
|
||||
return RawResponseMessage(
|
||||
version_o,
|
||||
status_i,
|
||||
reason.strip(),
|
||||
headers,
|
||||
raw_headers,
|
||||
close,
|
||||
compression,
|
||||
upgrade,
|
||||
chunked,
|
||||
)
|
||||
|
||||
|
||||
class HttpPayloadParser:
|
||||
def __init__(
|
||||
self,
|
||||
payload: StreamReader,
|
||||
length: Optional[int] = None,
|
||||
chunked: bool = False,
|
||||
compression: Optional[str] = None,
|
||||
code: Optional[int] = None,
|
||||
method: Optional[str] = None,
|
||||
readall: bool = False,
|
||||
response_with_body: bool = True,
|
||||
auto_decompress: bool = True,
|
||||
) -> None:
|
||||
self._length = 0
|
||||
self._type = ParseState.PARSE_NONE
|
||||
self._chunk = ChunkState.PARSE_CHUNKED_SIZE
|
||||
self._chunk_size = 0
|
||||
self._chunk_tail = b""
|
||||
self._auto_decompress = auto_decompress
|
||||
self.done = False
|
||||
|
||||
# payload decompression wrapper
|
||||
if response_with_body and compression and self._auto_decompress:
|
||||
real_payload = DeflateBuffer(
|
||||
payload, compression
|
||||
) # type: Union[StreamReader, DeflateBuffer]
|
||||
else:
|
||||
real_payload = payload
|
||||
|
||||
# payload parser
|
||||
if not response_with_body:
|
||||
# don't parse payload if it's not expected to be received
|
||||
self._type = ParseState.PARSE_NONE
|
||||
real_payload.feed_eof()
|
||||
self.done = True
|
||||
|
||||
elif chunked:
|
||||
self._type = ParseState.PARSE_CHUNKED
|
||||
elif length is not None:
|
||||
self._type = ParseState.PARSE_LENGTH
|
||||
self._length = length
|
||||
if self._length == 0:
|
||||
real_payload.feed_eof()
|
||||
self.done = True
|
||||
else:
|
||||
if readall and code != 204:
|
||||
self._type = ParseState.PARSE_UNTIL_EOF
|
||||
elif method in ("PUT", "POST"):
|
||||
internal_logger.warning( # pragma: no cover
|
||||
"Content-Length or Transfer-Encoding header is required"
|
||||
)
|
||||
self._type = ParseState.PARSE_NONE
|
||||
real_payload.feed_eof()
|
||||
self.done = True
|
||||
|
||||
self.payload = real_payload
|
||||
|
||||
def feed_eof(self) -> None:
|
||||
if self._type == ParseState.PARSE_UNTIL_EOF:
|
||||
self.payload.feed_eof()
|
||||
elif self._type == ParseState.PARSE_LENGTH:
|
||||
raise ContentLengthError(
|
||||
"Not enough data for satisfy content length header."
|
||||
)
|
||||
elif self._type == ParseState.PARSE_CHUNKED:
|
||||
raise TransferEncodingError(
|
||||
"Not enough data for satisfy transfer length header."
|
||||
)
|
||||
|
||||
def feed_data(
|
||||
self, chunk: bytes, SEP: bytes = b"\r\n", CHUNK_EXT: bytes = b";"
|
||||
) -> Tuple[bool, bytes]:
|
||||
# Read specified amount of bytes
|
||||
if self._type == ParseState.PARSE_LENGTH:
|
||||
required = self._length
|
||||
chunk_len = len(chunk)
|
||||
|
||||
if required >= chunk_len:
|
||||
self._length = required - chunk_len
|
||||
self.payload.feed_data(chunk, chunk_len)
|
||||
if self._length == 0:
|
||||
self.payload.feed_eof()
|
||||
return True, b""
|
||||
else:
|
||||
self._length = 0
|
||||
self.payload.feed_data(chunk[:required], required)
|
||||
self.payload.feed_eof()
|
||||
return True, chunk[required:]
|
||||
|
||||
# Chunked transfer encoding parser
|
||||
elif self._type == ParseState.PARSE_CHUNKED:
|
||||
if self._chunk_tail:
|
||||
chunk = self._chunk_tail + chunk
|
||||
self._chunk_tail = b""
|
||||
|
||||
while chunk:
|
||||
|
||||
# read next chunk size
|
||||
if self._chunk == ChunkState.PARSE_CHUNKED_SIZE:
|
||||
pos = chunk.find(SEP)
|
||||
if pos >= 0:
|
||||
i = chunk.find(CHUNK_EXT, 0, pos)
|
||||
if i >= 0:
|
||||
size_b = chunk[:i] # strip chunk-extensions
|
||||
else:
|
||||
size_b = chunk[:pos]
|
||||
|
||||
try:
|
||||
size = int(bytes(size_b), 16)
|
||||
except ValueError:
|
||||
exc = TransferEncodingError(
|
||||
chunk[:pos].decode("ascii", "surrogateescape")
|
||||
)
|
||||
self.payload.set_exception(exc)
|
||||
raise exc from None
|
||||
|
||||
chunk = chunk[pos + 2 :]
|
||||
if size == 0: # eof marker
|
||||
self._chunk = ChunkState.PARSE_MAYBE_TRAILERS
|
||||
else:
|
||||
self._chunk = ChunkState.PARSE_CHUNKED_CHUNK
|
||||
self._chunk_size = size
|
||||
self.payload.begin_http_chunk_receiving()
|
||||
else:
|
||||
self._chunk_tail = chunk
|
||||
return False, b""
|
||||
|
||||
# read chunk and feed buffer
|
||||
if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK:
|
||||
required = self._chunk_size
|
||||
chunk_len = len(chunk)
|
||||
|
||||
if required > chunk_len:
|
||||
self._chunk_size = required - chunk_len
|
||||
self.payload.feed_data(chunk, chunk_len)
|
||||
return False, b""
|
||||
else:
|
||||
self._chunk_size = 0
|
||||
self.payload.feed_data(chunk[:required], required)
|
||||
chunk = chunk[required:]
|
||||
self._chunk = ChunkState.PARSE_CHUNKED_CHUNK_EOF
|
||||
self.payload.end_http_chunk_receiving()
|
||||
|
||||
# toss the CRLF at the end of the chunk
|
||||
if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK_EOF:
|
||||
if chunk[:2] == SEP:
|
||||
chunk = chunk[2:]
|
||||
self._chunk = ChunkState.PARSE_CHUNKED_SIZE
|
||||
else:
|
||||
self._chunk_tail = chunk
|
||||
return False, b""
|
||||
|
||||
# if stream does not contain trailer, after 0\r\n
|
||||
# we should get another \r\n otherwise
|
||||
# trailers needs to be skiped until \r\n\r\n
|
||||
if self._chunk == ChunkState.PARSE_MAYBE_TRAILERS:
|
||||
head = chunk[:2]
|
||||
if head == SEP:
|
||||
# end of stream
|
||||
self.payload.feed_eof()
|
||||
return True, chunk[2:]
|
||||
# Both CR and LF, or only LF may not be received yet. It is
|
||||
# expected that CRLF or LF will be shown at the very first
|
||||
# byte next time, otherwise trailers should come. The last
|
||||
# CRLF which marks the end of response might not be
|
||||
# contained in the same TCP segment which delivered the
|
||||
# size indicator.
|
||||
if not head:
|
||||
return False, b""
|
||||
if head == SEP[:1]:
|
||||
self._chunk_tail = head
|
||||
return False, b""
|
||||
self._chunk = ChunkState.PARSE_TRAILERS
|
||||
|
||||
# read and discard trailer up to the CRLF terminator
|
||||
if self._chunk == ChunkState.PARSE_TRAILERS:
|
||||
pos = chunk.find(SEP)
|
||||
if pos >= 0:
|
||||
chunk = chunk[pos + 2 :]
|
||||
self._chunk = ChunkState.PARSE_MAYBE_TRAILERS
|
||||
else:
|
||||
self._chunk_tail = chunk
|
||||
return False, b""
|
||||
|
||||
# Read all bytes until eof
|
||||
elif self._type == ParseState.PARSE_UNTIL_EOF:
|
||||
self.payload.feed_data(chunk, len(chunk))
|
||||
|
||||
return False, b""
|
||||
|
||||
|
||||
class DeflateBuffer:
|
||||
"""DeflateStream decompress stream and feed data into specified stream."""
|
||||
|
||||
def __init__(self, out: StreamReader, encoding: Optional[str]) -> None:
|
||||
self.out = out
|
||||
self.size = 0
|
||||
self.encoding = encoding
|
||||
self._started_decoding = False
|
||||
|
||||
if encoding == "br":
|
||||
if not HAS_BROTLI: # pragma: no cover
|
||||
raise ContentEncodingError(
|
||||
"Can not decode content-encoding: brotli (br). "
|
||||
"Please install `brotlipy`"
|
||||
)
|
||||
self.decompressor = brotli.Decompressor()
|
||||
else:
|
||||
zlib_mode = 16 + zlib.MAX_WBITS if encoding == "gzip" else zlib.MAX_WBITS
|
||||
self.decompressor = zlib.decompressobj(wbits=zlib_mode)
|
||||
|
||||
def set_exception(self, exc: BaseException) -> None:
|
||||
self.out.set_exception(exc)
|
||||
|
||||
def feed_data(self, chunk: bytes, size: int) -> None:
|
||||
if not size:
|
||||
return
|
||||
|
||||
self.size += size
|
||||
|
||||
# RFC1950
|
||||
# bits 0..3 = CM = 0b1000 = 8 = "deflate"
|
||||
# bits 4..7 = CINFO = 1..7 = windows size.
|
||||
if (
|
||||
not self._started_decoding
|
||||
and self.encoding == "deflate"
|
||||
and chunk[0] & 0xF != 8
|
||||
):
|
||||
# Change the decoder to decompress incorrectly compressed data
|
||||
# Actually we should issue a warning about non-RFC-compliant data.
|
||||
self.decompressor = zlib.decompressobj(wbits=-zlib.MAX_WBITS)
|
||||
|
||||
try:
|
||||
chunk = self.decompressor.decompress(chunk)
|
||||
except Exception:
|
||||
raise ContentEncodingError(
|
||||
"Can not decode content-encoding: %s" % self.encoding
|
||||
)
|
||||
|
||||
self._started_decoding = True
|
||||
|
||||
if chunk:
|
||||
self.out.feed_data(chunk, len(chunk))
|
||||
|
||||
def feed_eof(self) -> None:
|
||||
chunk = self.decompressor.flush()
|
||||
|
||||
if chunk or self.size > 0:
|
||||
self.out.feed_data(chunk, len(chunk))
|
||||
if self.encoding == "deflate" and not self.decompressor.eof:
|
||||
raise ContentEncodingError("deflate")
|
||||
|
||||
self.out.feed_eof()
|
||||
|
||||
def begin_http_chunk_receiving(self) -> None:
|
||||
self.out.begin_http_chunk_receiving()
|
||||
|
||||
def end_http_chunk_receiving(self) -> None:
|
||||
self.out.end_http_chunk_receiving()
|
||||
|
||||
|
||||
HttpRequestParserPy = HttpRequestParser
|
||||
HttpResponseParserPy = HttpResponseParser
|
||||
RawRequestMessagePy = RawRequestMessage
|
||||
RawResponseMessagePy = RawResponseMessage
|
||||
|
||||
try:
|
||||
if not NO_EXTENSIONS:
|
||||
from ._http_parser import ( # type: ignore
|
||||
HttpRequestParser,
|
||||
HttpResponseParser,
|
||||
RawRequestMessage,
|
||||
RawResponseMessage,
|
||||
)
|
||||
|
||||
HttpRequestParserC = HttpRequestParser
|
||||
HttpResponseParserC = HttpResponseParser
|
||||
RawRequestMessageC = RawRequestMessage
|
||||
RawResponseMessageC = RawResponseMessage
|
||||
except ImportError: # pragma: no cover
|
||||
pass
|
||||
698
dist/ba_data/python-site-packages/aiohttp/http_websocket.py
vendored
Normal file
698
dist/ba_data/python-site-packages/aiohttp/http_websocket.py
vendored
Normal file
|
|
@ -0,0 +1,698 @@
|
|||
"""WebSocket protocol versions 13 and 8."""
|
||||
|
||||
import asyncio
|
||||
import collections
|
||||
import json
|
||||
import random
|
||||
import re
|
||||
import sys
|
||||
import zlib
|
||||
from enum import IntEnum
|
||||
from struct import Struct
|
||||
from typing import Any, Callable, List, Optional, Tuple, Union
|
||||
|
||||
from .base_protocol import BaseProtocol
|
||||
from .helpers import NO_EXTENSIONS
|
||||
from .streams import DataQueue
|
||||
|
||||
__all__ = (
|
||||
"WS_CLOSED_MESSAGE",
|
||||
"WS_CLOSING_MESSAGE",
|
||||
"WS_KEY",
|
||||
"WebSocketReader",
|
||||
"WebSocketWriter",
|
||||
"WSMessage",
|
||||
"WebSocketError",
|
||||
"WSMsgType",
|
||||
"WSCloseCode",
|
||||
)
|
||||
|
||||
|
||||
class WSCloseCode(IntEnum):
|
||||
OK = 1000
|
||||
GOING_AWAY = 1001
|
||||
PROTOCOL_ERROR = 1002
|
||||
UNSUPPORTED_DATA = 1003
|
||||
INVALID_TEXT = 1007
|
||||
POLICY_VIOLATION = 1008
|
||||
MESSAGE_TOO_BIG = 1009
|
||||
MANDATORY_EXTENSION = 1010
|
||||
INTERNAL_ERROR = 1011
|
||||
SERVICE_RESTART = 1012
|
||||
TRY_AGAIN_LATER = 1013
|
||||
|
||||
|
||||
ALLOWED_CLOSE_CODES = {int(i) for i in WSCloseCode}
|
||||
|
||||
|
||||
class WSMsgType(IntEnum):
|
||||
# websocket spec types
|
||||
CONTINUATION = 0x0
|
||||
TEXT = 0x1
|
||||
BINARY = 0x2
|
||||
PING = 0x9
|
||||
PONG = 0xA
|
||||
CLOSE = 0x8
|
||||
|
||||
# aiohttp specific types
|
||||
CLOSING = 0x100
|
||||
CLOSED = 0x101
|
||||
ERROR = 0x102
|
||||
|
||||
text = TEXT
|
||||
binary = BINARY
|
||||
ping = PING
|
||||
pong = PONG
|
||||
close = CLOSE
|
||||
closing = CLOSING
|
||||
closed = CLOSED
|
||||
error = ERROR
|
||||
|
||||
|
||||
WS_KEY = b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11"
|
||||
|
||||
|
||||
UNPACK_LEN2 = Struct("!H").unpack_from
|
||||
UNPACK_LEN3 = Struct("!Q").unpack_from
|
||||
UNPACK_CLOSE_CODE = Struct("!H").unpack
|
||||
PACK_LEN1 = Struct("!BB").pack
|
||||
PACK_LEN2 = Struct("!BBH").pack
|
||||
PACK_LEN3 = Struct("!BBQ").pack
|
||||
PACK_CLOSE_CODE = Struct("!H").pack
|
||||
MSG_SIZE = 2 ** 14
|
||||
DEFAULT_LIMIT = 2 ** 16
|
||||
|
||||
|
||||
_WSMessageBase = collections.namedtuple("_WSMessageBase", ["type", "data", "extra"])
|
||||
|
||||
|
||||
class WSMessage(_WSMessageBase):
|
||||
def json(self, *, loads: Callable[[Any], Any] = json.loads) -> Any:
|
||||
"""Return parsed JSON data.
|
||||
|
||||
.. versionadded:: 0.22
|
||||
"""
|
||||
return loads(self.data)
|
||||
|
||||
|
||||
WS_CLOSED_MESSAGE = WSMessage(WSMsgType.CLOSED, None, None)
|
||||
WS_CLOSING_MESSAGE = WSMessage(WSMsgType.CLOSING, None, None)
|
||||
|
||||
|
||||
class WebSocketError(Exception):
|
||||
"""WebSocket protocol parser error."""
|
||||
|
||||
def __init__(self, code: int, message: str) -> None:
|
||||
self.code = code
|
||||
super().__init__(code, message)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return self.args[1]
|
||||
|
||||
|
||||
class WSHandshakeError(Exception):
|
||||
"""WebSocket protocol handshake error."""
|
||||
|
||||
|
||||
native_byteorder = sys.byteorder
|
||||
|
||||
|
||||
# Used by _websocket_mask_python
|
||||
_XOR_TABLE = [bytes(a ^ b for a in range(256)) for b in range(256)]
|
||||
|
||||
|
||||
def _websocket_mask_python(mask: bytes, data: bytearray) -> None:
|
||||
"""Websocket masking function.
|
||||
|
||||
`mask` is a `bytes` object of length 4; `data` is a `bytearray`
|
||||
object of any length. The contents of `data` are masked with `mask`,
|
||||
as specified in section 5.3 of RFC 6455.
|
||||
|
||||
Note that this function mutates the `data` argument.
|
||||
|
||||
This pure-python implementation may be replaced by an optimized
|
||||
version when available.
|
||||
|
||||
"""
|
||||
assert isinstance(data, bytearray), data
|
||||
assert len(mask) == 4, mask
|
||||
|
||||
if data:
|
||||
a, b, c, d = (_XOR_TABLE[n] for n in mask)
|
||||
data[::4] = data[::4].translate(a)
|
||||
data[1::4] = data[1::4].translate(b)
|
||||
data[2::4] = data[2::4].translate(c)
|
||||
data[3::4] = data[3::4].translate(d)
|
||||
|
||||
|
||||
if NO_EXTENSIONS: # pragma: no cover
|
||||
_websocket_mask = _websocket_mask_python
|
||||
else:
|
||||
try:
|
||||
from ._websocket import _websocket_mask_cython # type: ignore
|
||||
|
||||
_websocket_mask = _websocket_mask_cython
|
||||
except ImportError: # pragma: no cover
|
||||
_websocket_mask = _websocket_mask_python
|
||||
|
||||
_WS_DEFLATE_TRAILING = bytes([0x00, 0x00, 0xFF, 0xFF])
|
||||
|
||||
|
||||
_WS_EXT_RE = re.compile(
|
||||
r"^(?:;\s*(?:"
|
||||
r"(server_no_context_takeover)|"
|
||||
r"(client_no_context_takeover)|"
|
||||
r"(server_max_window_bits(?:=(\d+))?)|"
|
||||
r"(client_max_window_bits(?:=(\d+))?)))*$"
|
||||
)
|
||||
|
||||
_WS_EXT_RE_SPLIT = re.compile(r"permessage-deflate([^,]+)?")
|
||||
|
||||
|
||||
def ws_ext_parse(extstr: Optional[str], isserver: bool = False) -> Tuple[int, bool]:
|
||||
if not extstr:
|
||||
return 0, False
|
||||
|
||||
compress = 0
|
||||
notakeover = False
|
||||
for ext in _WS_EXT_RE_SPLIT.finditer(extstr):
|
||||
defext = ext.group(1)
|
||||
# Return compress = 15 when get `permessage-deflate`
|
||||
if not defext:
|
||||
compress = 15
|
||||
break
|
||||
match = _WS_EXT_RE.match(defext)
|
||||
if match:
|
||||
compress = 15
|
||||
if isserver:
|
||||
# Server never fail to detect compress handshake.
|
||||
# Server does not need to send max wbit to client
|
||||
if match.group(4):
|
||||
compress = int(match.group(4))
|
||||
# Group3 must match if group4 matches
|
||||
# Compress wbit 8 does not support in zlib
|
||||
# If compress level not support,
|
||||
# CONTINUE to next extension
|
||||
if compress > 15 or compress < 9:
|
||||
compress = 0
|
||||
continue
|
||||
if match.group(1):
|
||||
notakeover = True
|
||||
# Ignore regex group 5 & 6 for client_max_window_bits
|
||||
break
|
||||
else:
|
||||
if match.group(6):
|
||||
compress = int(match.group(6))
|
||||
# Group5 must match if group6 matches
|
||||
# Compress wbit 8 does not support in zlib
|
||||
# If compress level not support,
|
||||
# FAIL the parse progress
|
||||
if compress > 15 or compress < 9:
|
||||
raise WSHandshakeError("Invalid window size")
|
||||
if match.group(2):
|
||||
notakeover = True
|
||||
# Ignore regex group 5 & 6 for client_max_window_bits
|
||||
break
|
||||
# Return Fail if client side and not match
|
||||
elif not isserver:
|
||||
raise WSHandshakeError("Extension for deflate not supported" + ext.group(1))
|
||||
|
||||
return compress, notakeover
|
||||
|
||||
|
||||
def ws_ext_gen(
|
||||
compress: int = 15, isserver: bool = False, server_notakeover: bool = False
|
||||
) -> str:
|
||||
# client_notakeover=False not used for server
|
||||
# compress wbit 8 does not support in zlib
|
||||
if compress < 9 or compress > 15:
|
||||
raise ValueError(
|
||||
"Compress wbits must between 9 and 15, " "zlib does not support wbits=8"
|
||||
)
|
||||
enabledext = ["permessage-deflate"]
|
||||
if not isserver:
|
||||
enabledext.append("client_max_window_bits")
|
||||
|
||||
if compress < 15:
|
||||
enabledext.append("server_max_window_bits=" + str(compress))
|
||||
if server_notakeover:
|
||||
enabledext.append("server_no_context_takeover")
|
||||
# if client_notakeover:
|
||||
# enabledext.append('client_no_context_takeover')
|
||||
return "; ".join(enabledext)
|
||||
|
||||
|
||||
class WSParserState(IntEnum):
|
||||
READ_HEADER = 1
|
||||
READ_PAYLOAD_LENGTH = 2
|
||||
READ_PAYLOAD_MASK = 3
|
||||
READ_PAYLOAD = 4
|
||||
|
||||
|
||||
class WebSocketReader:
|
||||
def __init__(
|
||||
self, queue: DataQueue[WSMessage], max_msg_size: int, compress: bool = True
|
||||
) -> None:
|
||||
self.queue = queue
|
||||
self._max_msg_size = max_msg_size
|
||||
|
||||
self._exc = None # type: Optional[BaseException]
|
||||
self._partial = bytearray()
|
||||
self._state = WSParserState.READ_HEADER
|
||||
|
||||
self._opcode = None # type: Optional[int]
|
||||
self._frame_fin = False
|
||||
self._frame_opcode = None # type: Optional[int]
|
||||
self._frame_payload = bytearray()
|
||||
|
||||
self._tail = b""
|
||||
self._has_mask = False
|
||||
self._frame_mask = None # type: Optional[bytes]
|
||||
self._payload_length = 0
|
||||
self._payload_length_flag = 0
|
||||
self._compressed = None # type: Optional[bool]
|
||||
self._decompressobj = None # type: Any # zlib.decompressobj actually
|
||||
self._compress = compress
|
||||
|
||||
def feed_eof(self) -> None:
|
||||
self.queue.feed_eof()
|
||||
|
||||
def feed_data(self, data: bytes) -> Tuple[bool, bytes]:
|
||||
if self._exc:
|
||||
return True, data
|
||||
|
||||
try:
|
||||
return self._feed_data(data)
|
||||
except Exception as exc:
|
||||
self._exc = exc
|
||||
self.queue.set_exception(exc)
|
||||
return True, b""
|
||||
|
||||
def _feed_data(self, data: bytes) -> Tuple[bool, bytes]:
|
||||
for fin, opcode, payload, compressed in self.parse_frame(data):
|
||||
if compressed and not self._decompressobj:
|
||||
self._decompressobj = zlib.decompressobj(wbits=-zlib.MAX_WBITS)
|
||||
if opcode == WSMsgType.CLOSE:
|
||||
if len(payload) >= 2:
|
||||
close_code = UNPACK_CLOSE_CODE(payload[:2])[0]
|
||||
if close_code < 3000 and close_code not in ALLOWED_CLOSE_CODES:
|
||||
raise WebSocketError(
|
||||
WSCloseCode.PROTOCOL_ERROR,
|
||||
f"Invalid close code: {close_code}",
|
||||
)
|
||||
try:
|
||||
close_message = payload[2:].decode("utf-8")
|
||||
except UnicodeDecodeError as exc:
|
||||
raise WebSocketError(
|
||||
WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message"
|
||||
) from exc
|
||||
msg = WSMessage(WSMsgType.CLOSE, close_code, close_message)
|
||||
elif payload:
|
||||
raise WebSocketError(
|
||||
WSCloseCode.PROTOCOL_ERROR,
|
||||
f"Invalid close frame: {fin} {opcode} {payload!r}",
|
||||
)
|
||||
else:
|
||||
msg = WSMessage(WSMsgType.CLOSE, 0, "")
|
||||
|
||||
self.queue.feed_data(msg, 0)
|
||||
|
||||
elif opcode == WSMsgType.PING:
|
||||
self.queue.feed_data(
|
||||
WSMessage(WSMsgType.PING, payload, ""), len(payload)
|
||||
)
|
||||
|
||||
elif opcode == WSMsgType.PONG:
|
||||
self.queue.feed_data(
|
||||
WSMessage(WSMsgType.PONG, payload, ""), len(payload)
|
||||
)
|
||||
|
||||
elif (
|
||||
opcode not in (WSMsgType.TEXT, WSMsgType.BINARY)
|
||||
and self._opcode is None
|
||||
):
|
||||
raise WebSocketError(
|
||||
WSCloseCode.PROTOCOL_ERROR, f"Unexpected opcode={opcode!r}"
|
||||
)
|
||||
else:
|
||||
# load text/binary
|
||||
if not fin:
|
||||
# got partial frame payload
|
||||
if opcode != WSMsgType.CONTINUATION:
|
||||
self._opcode = opcode
|
||||
self._partial.extend(payload)
|
||||
if self._max_msg_size and len(self._partial) >= self._max_msg_size:
|
||||
raise WebSocketError(
|
||||
WSCloseCode.MESSAGE_TOO_BIG,
|
||||
"Message size {} exceeds limit {}".format(
|
||||
len(self._partial), self._max_msg_size
|
||||
),
|
||||
)
|
||||
else:
|
||||
# previous frame was non finished
|
||||
# we should get continuation opcode
|
||||
if self._partial:
|
||||
if opcode != WSMsgType.CONTINUATION:
|
||||
raise WebSocketError(
|
||||
WSCloseCode.PROTOCOL_ERROR,
|
||||
"The opcode in non-fin frame is expected "
|
||||
"to be zero, got {!r}".format(opcode),
|
||||
)
|
||||
|
||||
if opcode == WSMsgType.CONTINUATION:
|
||||
assert self._opcode is not None
|
||||
opcode = self._opcode
|
||||
self._opcode = None
|
||||
|
||||
self._partial.extend(payload)
|
||||
if self._max_msg_size and len(self._partial) >= self._max_msg_size:
|
||||
raise WebSocketError(
|
||||
WSCloseCode.MESSAGE_TOO_BIG,
|
||||
"Message size {} exceeds limit {}".format(
|
||||
len(self._partial), self._max_msg_size
|
||||
),
|
||||
)
|
||||
|
||||
# Decompress process must to be done after all packets
|
||||
# received.
|
||||
if compressed:
|
||||
self._partial.extend(_WS_DEFLATE_TRAILING)
|
||||
payload_merged = self._decompressobj.decompress(
|
||||
self._partial, self._max_msg_size
|
||||
)
|
||||
if self._decompressobj.unconsumed_tail:
|
||||
left = len(self._decompressobj.unconsumed_tail)
|
||||
raise WebSocketError(
|
||||
WSCloseCode.MESSAGE_TOO_BIG,
|
||||
"Decompressed message size {} exceeds limit {}".format(
|
||||
self._max_msg_size + left, self._max_msg_size
|
||||
),
|
||||
)
|
||||
else:
|
||||
payload_merged = bytes(self._partial)
|
||||
|
||||
self._partial.clear()
|
||||
|
||||
if opcode == WSMsgType.TEXT:
|
||||
try:
|
||||
text = payload_merged.decode("utf-8")
|
||||
self.queue.feed_data(
|
||||
WSMessage(WSMsgType.TEXT, text, ""), len(text)
|
||||
)
|
||||
except UnicodeDecodeError as exc:
|
||||
raise WebSocketError(
|
||||
WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message"
|
||||
) from exc
|
||||
else:
|
||||
self.queue.feed_data(
|
||||
WSMessage(WSMsgType.BINARY, payload_merged, ""),
|
||||
len(payload_merged),
|
||||
)
|
||||
|
||||
return False, b""
|
||||
|
||||
def parse_frame(
|
||||
self, buf: bytes
|
||||
) -> List[Tuple[bool, Optional[int], bytearray, Optional[bool]]]:
|
||||
"""Return the next frame from the socket."""
|
||||
frames = []
|
||||
if self._tail:
|
||||
buf, self._tail = self._tail + buf, b""
|
||||
|
||||
start_pos = 0
|
||||
buf_length = len(buf)
|
||||
|
||||
while True:
|
||||
# read header
|
||||
if self._state == WSParserState.READ_HEADER:
|
||||
if buf_length - start_pos >= 2:
|
||||
data = buf[start_pos : start_pos + 2]
|
||||
start_pos += 2
|
||||
first_byte, second_byte = data
|
||||
|
||||
fin = (first_byte >> 7) & 1
|
||||
rsv1 = (first_byte >> 6) & 1
|
||||
rsv2 = (first_byte >> 5) & 1
|
||||
rsv3 = (first_byte >> 4) & 1
|
||||
opcode = first_byte & 0xF
|
||||
|
||||
# frame-fin = %x0 ; more frames of this message follow
|
||||
# / %x1 ; final frame of this message
|
||||
# frame-rsv1 = %x0 ;
|
||||
# 1 bit, MUST be 0 unless negotiated otherwise
|
||||
# frame-rsv2 = %x0 ;
|
||||
# 1 bit, MUST be 0 unless negotiated otherwise
|
||||
# frame-rsv3 = %x0 ;
|
||||
# 1 bit, MUST be 0 unless negotiated otherwise
|
||||
#
|
||||
# Remove rsv1 from this test for deflate development
|
||||
if rsv2 or rsv3 or (rsv1 and not self._compress):
|
||||
raise WebSocketError(
|
||||
WSCloseCode.PROTOCOL_ERROR,
|
||||
"Received frame with non-zero reserved bits",
|
||||
)
|
||||
|
||||
if opcode > 0x7 and fin == 0:
|
||||
raise WebSocketError(
|
||||
WSCloseCode.PROTOCOL_ERROR,
|
||||
"Received fragmented control frame",
|
||||
)
|
||||
|
||||
has_mask = (second_byte >> 7) & 1
|
||||
length = second_byte & 0x7F
|
||||
|
||||
# Control frames MUST have a payload
|
||||
# length of 125 bytes or less
|
||||
if opcode > 0x7 and length > 125:
|
||||
raise WebSocketError(
|
||||
WSCloseCode.PROTOCOL_ERROR,
|
||||
"Control frame payload cannot be " "larger than 125 bytes",
|
||||
)
|
||||
|
||||
# Set compress status if last package is FIN
|
||||
# OR set compress status if this is first fragment
|
||||
# Raise error if not first fragment with rsv1 = 0x1
|
||||
if self._frame_fin or self._compressed is None:
|
||||
self._compressed = True if rsv1 else False
|
||||
elif rsv1:
|
||||
raise WebSocketError(
|
||||
WSCloseCode.PROTOCOL_ERROR,
|
||||
"Received frame with non-zero reserved bits",
|
||||
)
|
||||
|
||||
self._frame_fin = bool(fin)
|
||||
self._frame_opcode = opcode
|
||||
self._has_mask = bool(has_mask)
|
||||
self._payload_length_flag = length
|
||||
self._state = WSParserState.READ_PAYLOAD_LENGTH
|
||||
else:
|
||||
break
|
||||
|
||||
# read payload length
|
||||
if self._state == WSParserState.READ_PAYLOAD_LENGTH:
|
||||
length = self._payload_length_flag
|
||||
if length == 126:
|
||||
if buf_length - start_pos >= 2:
|
||||
data = buf[start_pos : start_pos + 2]
|
||||
start_pos += 2
|
||||
length = UNPACK_LEN2(data)[0]
|
||||
self._payload_length = length
|
||||
self._state = (
|
||||
WSParserState.READ_PAYLOAD_MASK
|
||||
if self._has_mask
|
||||
else WSParserState.READ_PAYLOAD
|
||||
)
|
||||
else:
|
||||
break
|
||||
elif length > 126:
|
||||
if buf_length - start_pos >= 8:
|
||||
data = buf[start_pos : start_pos + 8]
|
||||
start_pos += 8
|
||||
length = UNPACK_LEN3(data)[0]
|
||||
self._payload_length = length
|
||||
self._state = (
|
||||
WSParserState.READ_PAYLOAD_MASK
|
||||
if self._has_mask
|
||||
else WSParserState.READ_PAYLOAD
|
||||
)
|
||||
else:
|
||||
break
|
||||
else:
|
||||
self._payload_length = length
|
||||
self._state = (
|
||||
WSParserState.READ_PAYLOAD_MASK
|
||||
if self._has_mask
|
||||
else WSParserState.READ_PAYLOAD
|
||||
)
|
||||
|
||||
# read payload mask
|
||||
if self._state == WSParserState.READ_PAYLOAD_MASK:
|
||||
if buf_length - start_pos >= 4:
|
||||
self._frame_mask = buf[start_pos : start_pos + 4]
|
||||
start_pos += 4
|
||||
self._state = WSParserState.READ_PAYLOAD
|
||||
else:
|
||||
break
|
||||
|
||||
if self._state == WSParserState.READ_PAYLOAD:
|
||||
length = self._payload_length
|
||||
payload = self._frame_payload
|
||||
|
||||
chunk_len = buf_length - start_pos
|
||||
if length >= chunk_len:
|
||||
self._payload_length = length - chunk_len
|
||||
payload.extend(buf[start_pos:])
|
||||
start_pos = buf_length
|
||||
else:
|
||||
self._payload_length = 0
|
||||
payload.extend(buf[start_pos : start_pos + length])
|
||||
start_pos = start_pos + length
|
||||
|
||||
if self._payload_length == 0:
|
||||
if self._has_mask:
|
||||
assert self._frame_mask is not None
|
||||
_websocket_mask(self._frame_mask, payload)
|
||||
|
||||
frames.append(
|
||||
(self._frame_fin, self._frame_opcode, payload, self._compressed)
|
||||
)
|
||||
|
||||
self._frame_payload = bytearray()
|
||||
self._state = WSParserState.READ_HEADER
|
||||
else:
|
||||
break
|
||||
|
||||
self._tail = buf[start_pos:]
|
||||
|
||||
return frames
|
||||
|
||||
|
||||
class WebSocketWriter:
|
||||
def __init__(
|
||||
self,
|
||||
protocol: BaseProtocol,
|
||||
transport: asyncio.Transport,
|
||||
*,
|
||||
use_mask: bool = False,
|
||||
limit: int = DEFAULT_LIMIT,
|
||||
random: Any = random.Random(),
|
||||
compress: int = 0,
|
||||
notakeover: bool = False,
|
||||
) -> None:
|
||||
self.protocol = protocol
|
||||
self.transport = transport
|
||||
self.use_mask = use_mask
|
||||
self.randrange = random.randrange
|
||||
self.compress = compress
|
||||
self.notakeover = notakeover
|
||||
self._closing = False
|
||||
self._limit = limit
|
||||
self._output_size = 0
|
||||
self._compressobj = None # type: Any # actually compressobj
|
||||
|
||||
async def _send_frame(
|
||||
self, message: bytes, opcode: int, compress: Optional[int] = None
|
||||
) -> None:
|
||||
"""Send a frame over the websocket with message as its payload."""
|
||||
if self._closing and not (opcode & WSMsgType.CLOSE):
|
||||
raise ConnectionResetError("Cannot write to closing transport")
|
||||
|
||||
rsv = 0
|
||||
|
||||
# Only compress larger packets (disabled)
|
||||
# Does small packet needs to be compressed?
|
||||
# if self.compress and opcode < 8 and len(message) > 124:
|
||||
if (compress or self.compress) and opcode < 8:
|
||||
if compress:
|
||||
# Do not set self._compress if compressing is for this frame
|
||||
compressobj = zlib.compressobj(level=zlib.Z_BEST_SPEED, wbits=-compress)
|
||||
else: # self.compress
|
||||
if not self._compressobj:
|
||||
self._compressobj = zlib.compressobj(
|
||||
level=zlib.Z_BEST_SPEED, wbits=-self.compress
|
||||
)
|
||||
compressobj = self._compressobj
|
||||
|
||||
message = compressobj.compress(message)
|
||||
message = message + compressobj.flush(
|
||||
zlib.Z_FULL_FLUSH if self.notakeover else zlib.Z_SYNC_FLUSH
|
||||
)
|
||||
if message.endswith(_WS_DEFLATE_TRAILING):
|
||||
message = message[:-4]
|
||||
rsv = rsv | 0x40
|
||||
|
||||
msg_length = len(message)
|
||||
|
||||
use_mask = self.use_mask
|
||||
if use_mask:
|
||||
mask_bit = 0x80
|
||||
else:
|
||||
mask_bit = 0
|
||||
|
||||
if msg_length < 126:
|
||||
header = PACK_LEN1(0x80 | rsv | opcode, msg_length | mask_bit)
|
||||
elif msg_length < (1 << 16):
|
||||
header = PACK_LEN2(0x80 | rsv | opcode, 126 | mask_bit, msg_length)
|
||||
else:
|
||||
header = PACK_LEN3(0x80 | rsv | opcode, 127 | mask_bit, msg_length)
|
||||
if use_mask:
|
||||
mask = self.randrange(0, 0xFFFFFFFF)
|
||||
mask = mask.to_bytes(4, "big")
|
||||
message = bytearray(message)
|
||||
_websocket_mask(mask, message)
|
||||
self._write(header + mask + message)
|
||||
self._output_size += len(header) + len(mask) + len(message)
|
||||
else:
|
||||
if len(message) > MSG_SIZE:
|
||||
self._write(header)
|
||||
self._write(message)
|
||||
else:
|
||||
self._write(header + message)
|
||||
|
||||
self._output_size += len(header) + len(message)
|
||||
|
||||
if self._output_size > self._limit:
|
||||
self._output_size = 0
|
||||
await self.protocol._drain_helper()
|
||||
|
||||
def _write(self, data: bytes) -> None:
|
||||
if self.transport is None or self.transport.is_closing():
|
||||
raise ConnectionResetError("Cannot write to closing transport")
|
||||
self.transport.write(data)
|
||||
|
||||
async def pong(self, message: bytes = b"") -> None:
|
||||
"""Send pong message."""
|
||||
if isinstance(message, str):
|
||||
message = message.encode("utf-8")
|
||||
await self._send_frame(message, WSMsgType.PONG)
|
||||
|
||||
async def ping(self, message: bytes = b"") -> None:
|
||||
"""Send ping message."""
|
||||
if isinstance(message, str):
|
||||
message = message.encode("utf-8")
|
||||
await self._send_frame(message, WSMsgType.PING)
|
||||
|
||||
async def send(
|
||||
self,
|
||||
message: Union[str, bytes],
|
||||
binary: bool = False,
|
||||
compress: Optional[int] = None,
|
||||
) -> None:
|
||||
"""Send a frame over the websocket with message as its payload."""
|
||||
if isinstance(message, str):
|
||||
message = message.encode("utf-8")
|
||||
if binary:
|
||||
await self._send_frame(message, WSMsgType.BINARY, compress)
|
||||
else:
|
||||
await self._send_frame(message, WSMsgType.TEXT, compress)
|
||||
|
||||
async def close(self, code: int = 1000, message: bytes = b"") -> None:
|
||||
"""Close the websocket, sending the specified code and message."""
|
||||
if isinstance(message, str):
|
||||
message = message.encode("utf-8")
|
||||
try:
|
||||
await self._send_frame(
|
||||
PACK_CLOSE_CODE(code) + message, opcode=WSMsgType.CLOSE
|
||||
)
|
||||
finally:
|
||||
self._closing = True
|
||||
182
dist/ba_data/python-site-packages/aiohttp/http_writer.py
vendored
Normal file
182
dist/ba_data/python-site-packages/aiohttp/http_writer.py
vendored
Normal file
|
|
@ -0,0 +1,182 @@
|
|||
"""Http related parsers and protocol."""
|
||||
|
||||
import asyncio
|
||||
import collections
|
||||
import zlib
|
||||
from typing import Any, Awaitable, Callable, Optional, Union # noqa
|
||||
|
||||
from multidict import CIMultiDict
|
||||
|
||||
from .abc import AbstractStreamWriter
|
||||
from .base_protocol import BaseProtocol
|
||||
from .helpers import NO_EXTENSIONS
|
||||
|
||||
__all__ = ("StreamWriter", "HttpVersion", "HttpVersion10", "HttpVersion11")
|
||||
|
||||
HttpVersion = collections.namedtuple("HttpVersion", ["major", "minor"])
|
||||
HttpVersion10 = HttpVersion(1, 0)
|
||||
HttpVersion11 = HttpVersion(1, 1)
|
||||
|
||||
|
||||
_T_OnChunkSent = Optional[Callable[[bytes], Awaitable[None]]]
|
||||
|
||||
|
||||
class StreamWriter(AbstractStreamWriter):
|
||||
def __init__(
|
||||
self,
|
||||
protocol: BaseProtocol,
|
||||
loop: asyncio.AbstractEventLoop,
|
||||
on_chunk_sent: _T_OnChunkSent = None,
|
||||
) -> None:
|
||||
self._protocol = protocol
|
||||
self._transport = protocol.transport
|
||||
|
||||
self.loop = loop
|
||||
self.length = None
|
||||
self.chunked = False
|
||||
self.buffer_size = 0
|
||||
self.output_size = 0
|
||||
|
||||
self._eof = False
|
||||
self._compress = None # type: Any
|
||||
self._drain_waiter = None
|
||||
|
||||
self._on_chunk_sent = on_chunk_sent # type: _T_OnChunkSent
|
||||
|
||||
@property
|
||||
def transport(self) -> Optional[asyncio.Transport]:
|
||||
return self._transport
|
||||
|
||||
@property
|
||||
def protocol(self) -> BaseProtocol:
|
||||
return self._protocol
|
||||
|
||||
def enable_chunking(self) -> None:
|
||||
self.chunked = True
|
||||
|
||||
def enable_compression(self, encoding: str = "deflate") -> None:
|
||||
zlib_mode = 16 + zlib.MAX_WBITS if encoding == "gzip" else zlib.MAX_WBITS
|
||||
self._compress = zlib.compressobj(wbits=zlib_mode)
|
||||
|
||||
def _write(self, chunk: bytes) -> None:
|
||||
size = len(chunk)
|
||||
self.buffer_size += size
|
||||
self.output_size += size
|
||||
|
||||
if self._transport is None or self._transport.is_closing():
|
||||
raise ConnectionResetError("Cannot write to closing transport")
|
||||
self._transport.write(chunk)
|
||||
|
||||
async def write(
|
||||
self, chunk: bytes, *, drain: bool = True, LIMIT: int = 0x10000
|
||||
) -> None:
|
||||
"""Writes chunk of data to a stream.
|
||||
|
||||
write_eof() indicates end of stream.
|
||||
writer can't be used after write_eof() method being called.
|
||||
write() return drain future.
|
||||
"""
|
||||
if self._on_chunk_sent is not None:
|
||||
await self._on_chunk_sent(chunk)
|
||||
|
||||
if isinstance(chunk, memoryview):
|
||||
if chunk.nbytes != len(chunk):
|
||||
# just reshape it
|
||||
chunk = chunk.cast("c")
|
||||
|
||||
if self._compress is not None:
|
||||
chunk = self._compress.compress(chunk)
|
||||
if not chunk:
|
||||
return
|
||||
|
||||
if self.length is not None:
|
||||
chunk_len = len(chunk)
|
||||
if self.length >= chunk_len:
|
||||
self.length = self.length - chunk_len
|
||||
else:
|
||||
chunk = chunk[: self.length]
|
||||
self.length = 0
|
||||
if not chunk:
|
||||
return
|
||||
|
||||
if chunk:
|
||||
if self.chunked:
|
||||
chunk_len_pre = ("%x\r\n" % len(chunk)).encode("ascii")
|
||||
chunk = chunk_len_pre + chunk + b"\r\n"
|
||||
|
||||
self._write(chunk)
|
||||
|
||||
if self.buffer_size > LIMIT and drain:
|
||||
self.buffer_size = 0
|
||||
await self.drain()
|
||||
|
||||
async def write_headers(
|
||||
self, status_line: str, headers: "CIMultiDict[str]"
|
||||
) -> None:
|
||||
"""Write request/response status and headers."""
|
||||
# status + headers
|
||||
buf = _serialize_headers(status_line, headers)
|
||||
self._write(buf)
|
||||
|
||||
async def write_eof(self, chunk: bytes = b"") -> None:
|
||||
if self._eof:
|
||||
return
|
||||
|
||||
if chunk and self._on_chunk_sent is not None:
|
||||
await self._on_chunk_sent(chunk)
|
||||
|
||||
if self._compress:
|
||||
if chunk:
|
||||
chunk = self._compress.compress(chunk)
|
||||
|
||||
chunk = chunk + self._compress.flush()
|
||||
if chunk and self.chunked:
|
||||
chunk_len = ("%x\r\n" % len(chunk)).encode("ascii")
|
||||
chunk = chunk_len + chunk + b"\r\n0\r\n\r\n"
|
||||
else:
|
||||
if self.chunked:
|
||||
if chunk:
|
||||
chunk_len = ("%x\r\n" % len(chunk)).encode("ascii")
|
||||
chunk = chunk_len + chunk + b"\r\n0\r\n\r\n"
|
||||
else:
|
||||
chunk = b"0\r\n\r\n"
|
||||
|
||||
if chunk:
|
||||
self._write(chunk)
|
||||
|
||||
await self.drain()
|
||||
|
||||
self._eof = True
|
||||
self._transport = None
|
||||
|
||||
async def drain(self) -> None:
|
||||
"""Flush the write buffer.
|
||||
|
||||
The intended use is to write
|
||||
|
||||
await w.write(data)
|
||||
await w.drain()
|
||||
"""
|
||||
if self._protocol.transport is not None:
|
||||
await self._protocol._drain_helper()
|
||||
|
||||
|
||||
def _py_serialize_headers(status_line: str, headers: "CIMultiDict[str]") -> bytes:
|
||||
line = (
|
||||
status_line
|
||||
+ "\r\n"
|
||||
+ "".join([k + ": " + v + "\r\n" for k, v in headers.items()])
|
||||
)
|
||||
return line.encode("utf-8") + b"\r\n"
|
||||
|
||||
|
||||
_serialize_headers = _py_serialize_headers
|
||||
|
||||
try:
|
||||
import aiohttp._http_writer as _http_writer # type: ignore
|
||||
|
||||
_c_serialize_headers = _http_writer._serialize_headers
|
||||
if not NO_EXTENSIONS:
|
||||
_serialize_headers = _c_serialize_headers
|
||||
except ImportError:
|
||||
pass
|
||||
45
dist/ba_data/python-site-packages/aiohttp/locks.py
vendored
Normal file
45
dist/ba_data/python-site-packages/aiohttp/locks.py
vendored
Normal file
|
|
@ -0,0 +1,45 @@
|
|||
import asyncio
|
||||
import collections
|
||||
from typing import Any, Optional
|
||||
|
||||
try:
|
||||
from typing import Deque
|
||||
except ImportError:
|
||||
from typing_extensions import Deque
|
||||
|
||||
|
||||
class EventResultOrError:
|
||||
"""
|
||||
This class wrappers the Event asyncio lock allowing either awake the
|
||||
locked Tasks without any error or raising an exception.
|
||||
|
||||
thanks to @vorpalsmith for the simple design.
|
||||
"""
|
||||
|
||||
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
|
||||
self._loop = loop
|
||||
self._exc = None # type: Optional[BaseException]
|
||||
self._event = asyncio.Event()
|
||||
self._waiters = collections.deque() # type: Deque[asyncio.Future[Any]]
|
||||
|
||||
def set(self, exc: Optional[BaseException] = None) -> None:
|
||||
self._exc = exc
|
||||
self._event.set()
|
||||
|
||||
async def wait(self) -> Any:
|
||||
waiter = self._loop.create_task(self._event.wait())
|
||||
self._waiters.append(waiter)
|
||||
try:
|
||||
val = await waiter
|
||||
finally:
|
||||
self._waiters.remove(waiter)
|
||||
|
||||
if self._exc is not None:
|
||||
raise self._exc
|
||||
|
||||
return val
|
||||
|
||||
def cancel(self) -> None:
|
||||
""" Cancel all waiters """
|
||||
for waiter in self._waiters:
|
||||
waiter.cancel()
|
||||
8
dist/ba_data/python-site-packages/aiohttp/log.py
vendored
Normal file
8
dist/ba_data/python-site-packages/aiohttp/log.py
vendored
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
import logging
|
||||
|
||||
access_logger = logging.getLogger("aiohttp.access")
|
||||
client_logger = logging.getLogger("aiohttp.client")
|
||||
internal_logger = logging.getLogger("aiohttp.internal")
|
||||
server_logger = logging.getLogger("aiohttp.server")
|
||||
web_logger = logging.getLogger("aiohttp.web")
|
||||
ws_logger = logging.getLogger("aiohttp.websocket")
|
||||
957
dist/ba_data/python-site-packages/aiohttp/multipart.py
vendored
Normal file
957
dist/ba_data/python-site-packages/aiohttp/multipart.py
vendored
Normal file
|
|
@ -0,0 +1,957 @@
|
|||
import base64
|
||||
import binascii
|
||||
import json
|
||||
import re
|
||||
import uuid
|
||||
import warnings
|
||||
import zlib
|
||||
from collections import deque
|
||||
from types import TracebackType
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
AsyncIterator,
|
||||
Dict,
|
||||
Iterator,
|
||||
List,
|
||||
Mapping,
|
||||
Optional,
|
||||
Sequence,
|
||||
Tuple,
|
||||
Type,
|
||||
Union,
|
||||
)
|
||||
from urllib.parse import parse_qsl, unquote, urlencode
|
||||
|
||||
from multidict import CIMultiDict, CIMultiDictProxy, MultiMapping
|
||||
|
||||
from .hdrs import (
|
||||
CONTENT_DISPOSITION,
|
||||
CONTENT_ENCODING,
|
||||
CONTENT_LENGTH,
|
||||
CONTENT_TRANSFER_ENCODING,
|
||||
CONTENT_TYPE,
|
||||
)
|
||||
from .helpers import CHAR, TOKEN, parse_mimetype, reify
|
||||
from .http import HeadersParser
|
||||
from .payload import (
|
||||
JsonPayload,
|
||||
LookupError,
|
||||
Order,
|
||||
Payload,
|
||||
StringPayload,
|
||||
get_payload,
|
||||
payload_type,
|
||||
)
|
||||
from .streams import StreamReader
|
||||
|
||||
__all__ = (
|
||||
"MultipartReader",
|
||||
"MultipartWriter",
|
||||
"BodyPartReader",
|
||||
"BadContentDispositionHeader",
|
||||
"BadContentDispositionParam",
|
||||
"parse_content_disposition",
|
||||
"content_disposition_filename",
|
||||
)
|
||||
|
||||
|
||||
if TYPE_CHECKING: # pragma: no cover
|
||||
from .client_reqrep import ClientResponse
|
||||
|
||||
|
||||
class BadContentDispositionHeader(RuntimeWarning):
|
||||
pass
|
||||
|
||||
|
||||
class BadContentDispositionParam(RuntimeWarning):
|
||||
pass
|
||||
|
||||
|
||||
def parse_content_disposition(
|
||||
header: Optional[str],
|
||||
) -> Tuple[Optional[str], Dict[str, str]]:
|
||||
def is_token(string: str) -> bool:
|
||||
return bool(string) and TOKEN >= set(string)
|
||||
|
||||
def is_quoted(string: str) -> bool:
|
||||
return string[0] == string[-1] == '"'
|
||||
|
||||
def is_rfc5987(string: str) -> bool:
|
||||
return is_token(string) and string.count("'") == 2
|
||||
|
||||
def is_extended_param(string: str) -> bool:
|
||||
return string.endswith("*")
|
||||
|
||||
def is_continuous_param(string: str) -> bool:
|
||||
pos = string.find("*") + 1
|
||||
if not pos:
|
||||
return False
|
||||
substring = string[pos:-1] if string.endswith("*") else string[pos:]
|
||||
return substring.isdigit()
|
||||
|
||||
def unescape(text: str, *, chars: str = "".join(map(re.escape, CHAR))) -> str:
|
||||
return re.sub(f"\\\\([{chars}])", "\\1", text)
|
||||
|
||||
if not header:
|
||||
return None, {}
|
||||
|
||||
disptype, *parts = header.split(";")
|
||||
if not is_token(disptype):
|
||||
warnings.warn(BadContentDispositionHeader(header))
|
||||
return None, {}
|
||||
|
||||
params = {} # type: Dict[str, str]
|
||||
while parts:
|
||||
item = parts.pop(0)
|
||||
|
||||
if "=" not in item:
|
||||
warnings.warn(BadContentDispositionHeader(header))
|
||||
return None, {}
|
||||
|
||||
key, value = item.split("=", 1)
|
||||
key = key.lower().strip()
|
||||
value = value.lstrip()
|
||||
|
||||
if key in params:
|
||||
warnings.warn(BadContentDispositionHeader(header))
|
||||
return None, {}
|
||||
|
||||
if not is_token(key):
|
||||
warnings.warn(BadContentDispositionParam(item))
|
||||
continue
|
||||
|
||||
elif is_continuous_param(key):
|
||||
if is_quoted(value):
|
||||
value = unescape(value[1:-1])
|
||||
elif not is_token(value):
|
||||
warnings.warn(BadContentDispositionParam(item))
|
||||
continue
|
||||
|
||||
elif is_extended_param(key):
|
||||
if is_rfc5987(value):
|
||||
encoding, _, value = value.split("'", 2)
|
||||
encoding = encoding or "utf-8"
|
||||
else:
|
||||
warnings.warn(BadContentDispositionParam(item))
|
||||
continue
|
||||
|
||||
try:
|
||||
value = unquote(value, encoding, "strict")
|
||||
except UnicodeDecodeError: # pragma: nocover
|
||||
warnings.warn(BadContentDispositionParam(item))
|
||||
continue
|
||||
|
||||
else:
|
||||
failed = True
|
||||
if is_quoted(value):
|
||||
failed = False
|
||||
value = unescape(value[1:-1].lstrip("\\/"))
|
||||
elif is_token(value):
|
||||
failed = False
|
||||
elif parts:
|
||||
# maybe just ; in filename, in any case this is just
|
||||
# one case fix, for proper fix we need to redesign parser
|
||||
_value = "{};{}".format(value, parts[0])
|
||||
if is_quoted(_value):
|
||||
parts.pop(0)
|
||||
value = unescape(_value[1:-1].lstrip("\\/"))
|
||||
failed = False
|
||||
|
||||
if failed:
|
||||
warnings.warn(BadContentDispositionHeader(header))
|
||||
return None, {}
|
||||
|
||||
params[key] = value
|
||||
|
||||
return disptype.lower(), params
|
||||
|
||||
|
||||
def content_disposition_filename(
|
||||
params: Mapping[str, str], name: str = "filename"
|
||||
) -> Optional[str]:
|
||||
name_suf = "%s*" % name
|
||||
if not params:
|
||||
return None
|
||||
elif name_suf in params:
|
||||
return params[name_suf]
|
||||
elif name in params:
|
||||
return params[name]
|
||||
else:
|
||||
parts = []
|
||||
fnparams = sorted(
|
||||
(key, value) for key, value in params.items() if key.startswith(name_suf)
|
||||
)
|
||||
for num, (key, value) in enumerate(fnparams):
|
||||
_, tail = key.split("*", 1)
|
||||
if tail.endswith("*"):
|
||||
tail = tail[:-1]
|
||||
if tail == str(num):
|
||||
parts.append(value)
|
||||
else:
|
||||
break
|
||||
if not parts:
|
||||
return None
|
||||
value = "".join(parts)
|
||||
if "'" in value:
|
||||
encoding, _, value = value.split("'", 2)
|
||||
encoding = encoding or "utf-8"
|
||||
return unquote(value, encoding, "strict")
|
||||
return value
|
||||
|
||||
|
||||
class MultipartResponseWrapper:
|
||||
"""Wrapper around the MultipartReader.
|
||||
|
||||
It takes care about
|
||||
underlying connection and close it when it needs in.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
resp: "ClientResponse",
|
||||
stream: "MultipartReader",
|
||||
) -> None:
|
||||
self.resp = resp
|
||||
self.stream = stream
|
||||
|
||||
def __aiter__(self) -> "MultipartResponseWrapper":
|
||||
return self
|
||||
|
||||
async def __anext__(
|
||||
self,
|
||||
) -> Union["MultipartReader", "BodyPartReader"]:
|
||||
part = await self.next()
|
||||
if part is None:
|
||||
raise StopAsyncIteration
|
||||
return part
|
||||
|
||||
def at_eof(self) -> bool:
|
||||
"""Returns True when all response data had been read."""
|
||||
return self.resp.content.at_eof()
|
||||
|
||||
async def next(
|
||||
self,
|
||||
) -> Optional[Union["MultipartReader", "BodyPartReader"]]:
|
||||
"""Emits next multipart reader object."""
|
||||
item = await self.stream.next()
|
||||
if self.stream.at_eof():
|
||||
await self.release()
|
||||
return item
|
||||
|
||||
async def release(self) -> None:
|
||||
"""Releases the connection gracefully, reading all the content
|
||||
to the void."""
|
||||
await self.resp.release()
|
||||
|
||||
|
||||
class BodyPartReader:
|
||||
"""Multipart reader for single body part."""
|
||||
|
||||
chunk_size = 8192
|
||||
|
||||
def __init__(
|
||||
self, boundary: bytes, headers: "CIMultiDictProxy[str]", content: StreamReader
|
||||
) -> None:
|
||||
self.headers = headers
|
||||
self._boundary = boundary
|
||||
self._content = content
|
||||
self._at_eof = False
|
||||
length = self.headers.get(CONTENT_LENGTH, None)
|
||||
self._length = int(length) if length is not None else None
|
||||
self._read_bytes = 0
|
||||
# TODO: typeing.Deque is not supported by Python 3.5
|
||||
self._unread = deque() # type: Any
|
||||
self._prev_chunk = None # type: Optional[bytes]
|
||||
self._content_eof = 0
|
||||
self._cache = {} # type: Dict[str, Any]
|
||||
|
||||
def __aiter__(self) -> AsyncIterator["BodyPartReader"]:
|
||||
return self # type: ignore
|
||||
|
||||
async def __anext__(self) -> bytes:
|
||||
part = await self.next()
|
||||
if part is None:
|
||||
raise StopAsyncIteration
|
||||
return part
|
||||
|
||||
async def next(self) -> Optional[bytes]:
|
||||
item = await self.read()
|
||||
if not item:
|
||||
return None
|
||||
return item
|
||||
|
||||
async def read(self, *, decode: bool = False) -> bytes:
|
||||
"""Reads body part data.
|
||||
|
||||
decode: Decodes data following by encoding
|
||||
method from Content-Encoding header. If it missed
|
||||
data remains untouched
|
||||
"""
|
||||
if self._at_eof:
|
||||
return b""
|
||||
data = bytearray()
|
||||
while not self._at_eof:
|
||||
data.extend(await self.read_chunk(self.chunk_size))
|
||||
if decode:
|
||||
return self.decode(data)
|
||||
return data
|
||||
|
||||
async def read_chunk(self, size: int = chunk_size) -> bytes:
|
||||
"""Reads body part content chunk of the specified size.
|
||||
|
||||
size: chunk size
|
||||
"""
|
||||
if self._at_eof:
|
||||
return b""
|
||||
if self._length:
|
||||
chunk = await self._read_chunk_from_length(size)
|
||||
else:
|
||||
chunk = await self._read_chunk_from_stream(size)
|
||||
|
||||
self._read_bytes += len(chunk)
|
||||
if self._read_bytes == self._length:
|
||||
self._at_eof = True
|
||||
if self._at_eof:
|
||||
clrf = await self._content.readline()
|
||||
assert (
|
||||
b"\r\n" == clrf
|
||||
), "reader did not read all the data or it is malformed"
|
||||
return chunk
|
||||
|
||||
async def _read_chunk_from_length(self, size: int) -> bytes:
|
||||
# Reads body part content chunk of the specified size.
|
||||
# The body part must has Content-Length header with proper value.
|
||||
assert self._length is not None, "Content-Length required for chunked read"
|
||||
chunk_size = min(size, self._length - self._read_bytes)
|
||||
chunk = await self._content.read(chunk_size)
|
||||
return chunk
|
||||
|
||||
async def _read_chunk_from_stream(self, size: int) -> bytes:
|
||||
# Reads content chunk of body part with unknown length.
|
||||
# The Content-Length header for body part is not necessary.
|
||||
assert (
|
||||
size >= len(self._boundary) + 2
|
||||
), "Chunk size must be greater or equal than boundary length + 2"
|
||||
first_chunk = self._prev_chunk is None
|
||||
if first_chunk:
|
||||
self._prev_chunk = await self._content.read(size)
|
||||
|
||||
chunk = await self._content.read(size)
|
||||
self._content_eof += int(self._content.at_eof())
|
||||
assert self._content_eof < 3, "Reading after EOF"
|
||||
assert self._prev_chunk is not None
|
||||
window = self._prev_chunk + chunk
|
||||
sub = b"\r\n" + self._boundary
|
||||
if first_chunk:
|
||||
idx = window.find(sub)
|
||||
else:
|
||||
idx = window.find(sub, max(0, len(self._prev_chunk) - len(sub)))
|
||||
if idx >= 0:
|
||||
# pushing boundary back to content
|
||||
with warnings.catch_warnings():
|
||||
warnings.filterwarnings("ignore", category=DeprecationWarning)
|
||||
self._content.unread_data(window[idx:])
|
||||
if size > idx:
|
||||
self._prev_chunk = self._prev_chunk[:idx]
|
||||
chunk = window[len(self._prev_chunk) : idx]
|
||||
if not chunk:
|
||||
self._at_eof = True
|
||||
result = self._prev_chunk
|
||||
self._prev_chunk = chunk
|
||||
return result
|
||||
|
||||
async def readline(self) -> bytes:
|
||||
"""Reads body part by line by line."""
|
||||
if self._at_eof:
|
||||
return b""
|
||||
|
||||
if self._unread:
|
||||
line = self._unread.popleft()
|
||||
else:
|
||||
line = await self._content.readline()
|
||||
|
||||
if line.startswith(self._boundary):
|
||||
# the very last boundary may not come with \r\n,
|
||||
# so set single rules for everyone
|
||||
sline = line.rstrip(b"\r\n")
|
||||
boundary = self._boundary
|
||||
last_boundary = self._boundary + b"--"
|
||||
# ensure that we read exactly the boundary, not something alike
|
||||
if sline == boundary or sline == last_boundary:
|
||||
self._at_eof = True
|
||||
self._unread.append(line)
|
||||
return b""
|
||||
else:
|
||||
next_line = await self._content.readline()
|
||||
if next_line.startswith(self._boundary):
|
||||
line = line[:-2] # strip CRLF but only once
|
||||
self._unread.append(next_line)
|
||||
|
||||
return line
|
||||
|
||||
async def release(self) -> None:
|
||||
"""Like read(), but reads all the data to the void."""
|
||||
if self._at_eof:
|
||||
return
|
||||
while not self._at_eof:
|
||||
await self.read_chunk(self.chunk_size)
|
||||
|
||||
async def text(self, *, encoding: Optional[str] = None) -> str:
|
||||
"""Like read(), but assumes that body part contains text data."""
|
||||
data = await self.read(decode=True)
|
||||
# see https://www.w3.org/TR/html5/forms.html#multipart/form-data-encoding-algorithm # NOQA
|
||||
# and https://dvcs.w3.org/hg/xhr/raw-file/tip/Overview.html#dom-xmlhttprequest-send # NOQA
|
||||
encoding = encoding or self.get_charset(default="utf-8")
|
||||
return data.decode(encoding)
|
||||
|
||||
async def json(self, *, encoding: Optional[str] = None) -> Optional[Dict[str, Any]]:
|
||||
"""Like read(), but assumes that body parts contains JSON data."""
|
||||
data = await self.read(decode=True)
|
||||
if not data:
|
||||
return None
|
||||
encoding = encoding or self.get_charset(default="utf-8")
|
||||
return json.loads(data.decode(encoding))
|
||||
|
||||
async def form(self, *, encoding: Optional[str] = None) -> List[Tuple[str, str]]:
|
||||
"""Like read(), but assumes that body parts contains form
|
||||
urlencoded data.
|
||||
"""
|
||||
data = await self.read(decode=True)
|
||||
if not data:
|
||||
return []
|
||||
if encoding is not None:
|
||||
real_encoding = encoding
|
||||
else:
|
||||
real_encoding = self.get_charset(default="utf-8")
|
||||
return parse_qsl(
|
||||
data.rstrip().decode(real_encoding),
|
||||
keep_blank_values=True,
|
||||
encoding=real_encoding,
|
||||
)
|
||||
|
||||
def at_eof(self) -> bool:
|
||||
"""Returns True if the boundary was reached or False otherwise."""
|
||||
return self._at_eof
|
||||
|
||||
def decode(self, data: bytes) -> bytes:
|
||||
"""Decodes data according the specified Content-Encoding
|
||||
or Content-Transfer-Encoding headers value.
|
||||
"""
|
||||
if CONTENT_TRANSFER_ENCODING in self.headers:
|
||||
data = self._decode_content_transfer(data)
|
||||
if CONTENT_ENCODING in self.headers:
|
||||
return self._decode_content(data)
|
||||
return data
|
||||
|
||||
def _decode_content(self, data: bytes) -> bytes:
|
||||
encoding = self.headers.get(CONTENT_ENCODING, "").lower()
|
||||
|
||||
if encoding == "deflate":
|
||||
return zlib.decompress(data, -zlib.MAX_WBITS)
|
||||
elif encoding == "gzip":
|
||||
return zlib.decompress(data, 16 + zlib.MAX_WBITS)
|
||||
elif encoding == "identity":
|
||||
return data
|
||||
else:
|
||||
raise RuntimeError(f"unknown content encoding: {encoding}")
|
||||
|
||||
def _decode_content_transfer(self, data: bytes) -> bytes:
|
||||
encoding = self.headers.get(CONTENT_TRANSFER_ENCODING, "").lower()
|
||||
|
||||
if encoding == "base64":
|
||||
return base64.b64decode(data)
|
||||
elif encoding == "quoted-printable":
|
||||
return binascii.a2b_qp(data)
|
||||
elif encoding in ("binary", "8bit", "7bit"):
|
||||
return data
|
||||
else:
|
||||
raise RuntimeError(
|
||||
"unknown content transfer encoding: {}" "".format(encoding)
|
||||
)
|
||||
|
||||
def get_charset(self, default: str) -> str:
|
||||
"""Returns charset parameter from Content-Type header or default."""
|
||||
ctype = self.headers.get(CONTENT_TYPE, "")
|
||||
mimetype = parse_mimetype(ctype)
|
||||
return mimetype.parameters.get("charset", default)
|
||||
|
||||
@reify
|
||||
def name(self) -> Optional[str]:
|
||||
"""Returns name specified in Content-Disposition header or None
|
||||
if missed or header is malformed.
|
||||
"""
|
||||
|
||||
_, params = parse_content_disposition(self.headers.get(CONTENT_DISPOSITION))
|
||||
return content_disposition_filename(params, "name")
|
||||
|
||||
@reify
|
||||
def filename(self) -> Optional[str]:
|
||||
"""Returns filename specified in Content-Disposition header or None
|
||||
if missed or header is malformed.
|
||||
"""
|
||||
_, params = parse_content_disposition(self.headers.get(CONTENT_DISPOSITION))
|
||||
return content_disposition_filename(params, "filename")
|
||||
|
||||
|
||||
@payload_type(BodyPartReader, order=Order.try_first)
|
||||
class BodyPartReaderPayload(Payload):
|
||||
def __init__(self, value: BodyPartReader, *args: Any, **kwargs: Any) -> None:
|
||||
super().__init__(value, *args, **kwargs)
|
||||
|
||||
params = {} # type: Dict[str, str]
|
||||
if value.name is not None:
|
||||
params["name"] = value.name
|
||||
if value.filename is not None:
|
||||
params["filename"] = value.filename
|
||||
|
||||
if params:
|
||||
self.set_content_disposition("attachment", True, **params)
|
||||
|
||||
async def write(self, writer: Any) -> None:
|
||||
field = self._value
|
||||
chunk = await field.read_chunk(size=2 ** 16)
|
||||
while chunk:
|
||||
await writer.write(field.decode(chunk))
|
||||
chunk = await field.read_chunk(size=2 ** 16)
|
||||
|
||||
|
||||
class MultipartReader:
|
||||
"""Multipart body reader."""
|
||||
|
||||
#: Response wrapper, used when multipart readers constructs from response.
|
||||
response_wrapper_cls = MultipartResponseWrapper
|
||||
#: Multipart reader class, used to handle multipart/* body parts.
|
||||
#: None points to type(self)
|
||||
multipart_reader_cls = None
|
||||
#: Body part reader class for non multipart/* content types.
|
||||
part_reader_cls = BodyPartReader
|
||||
|
||||
def __init__(self, headers: Mapping[str, str], content: StreamReader) -> None:
|
||||
self.headers = headers
|
||||
self._boundary = ("--" + self._get_boundary()).encode()
|
||||
self._content = content
|
||||
self._last_part = (
|
||||
None
|
||||
) # type: Optional[Union['MultipartReader', BodyPartReader]]
|
||||
self._at_eof = False
|
||||
self._at_bof = True
|
||||
self._unread = [] # type: List[bytes]
|
||||
|
||||
def __aiter__(
|
||||
self,
|
||||
) -> AsyncIterator["BodyPartReader"]:
|
||||
return self # type: ignore
|
||||
|
||||
async def __anext__(
|
||||
self,
|
||||
) -> Optional[Union["MultipartReader", BodyPartReader]]:
|
||||
part = await self.next()
|
||||
if part is None:
|
||||
raise StopAsyncIteration
|
||||
return part
|
||||
|
||||
@classmethod
|
||||
def from_response(
|
||||
cls,
|
||||
response: "ClientResponse",
|
||||
) -> MultipartResponseWrapper:
|
||||
"""Constructs reader instance from HTTP response.
|
||||
|
||||
:param response: :class:`~aiohttp.client.ClientResponse` instance
|
||||
"""
|
||||
obj = cls.response_wrapper_cls(
|
||||
response, cls(response.headers, response.content)
|
||||
)
|
||||
return obj
|
||||
|
||||
def at_eof(self) -> bool:
|
||||
"""Returns True if the final boundary was reached or
|
||||
False otherwise.
|
||||
"""
|
||||
return self._at_eof
|
||||
|
||||
async def next(
|
||||
self,
|
||||
) -> Optional[Union["MultipartReader", BodyPartReader]]:
|
||||
"""Emits the next multipart body part."""
|
||||
# So, if we're at BOF, we need to skip till the boundary.
|
||||
if self._at_eof:
|
||||
return None
|
||||
await self._maybe_release_last_part()
|
||||
if self._at_bof:
|
||||
await self._read_until_first_boundary()
|
||||
self._at_bof = False
|
||||
else:
|
||||
await self._read_boundary()
|
||||
if self._at_eof: # we just read the last boundary, nothing to do there
|
||||
return None
|
||||
self._last_part = await self.fetch_next_part()
|
||||
return self._last_part
|
||||
|
||||
async def release(self) -> None:
|
||||
"""Reads all the body parts to the void till the final boundary."""
|
||||
while not self._at_eof:
|
||||
item = await self.next()
|
||||
if item is None:
|
||||
break
|
||||
await item.release()
|
||||
|
||||
async def fetch_next_part(
|
||||
self,
|
||||
) -> Union["MultipartReader", BodyPartReader]:
|
||||
"""Returns the next body part reader."""
|
||||
headers = await self._read_headers()
|
||||
return self._get_part_reader(headers)
|
||||
|
||||
def _get_part_reader(
|
||||
self,
|
||||
headers: "CIMultiDictProxy[str]",
|
||||
) -> Union["MultipartReader", BodyPartReader]:
|
||||
"""Dispatches the response by the `Content-Type` header, returning
|
||||
suitable reader instance.
|
||||
|
||||
:param dict headers: Response headers
|
||||
"""
|
||||
ctype = headers.get(CONTENT_TYPE, "")
|
||||
mimetype = parse_mimetype(ctype)
|
||||
|
||||
if mimetype.type == "multipart":
|
||||
if self.multipart_reader_cls is None:
|
||||
return type(self)(headers, self._content)
|
||||
return self.multipart_reader_cls(headers, self._content)
|
||||
else:
|
||||
return self.part_reader_cls(self._boundary, headers, self._content)
|
||||
|
||||
def _get_boundary(self) -> str:
|
||||
mimetype = parse_mimetype(self.headers[CONTENT_TYPE])
|
||||
|
||||
assert mimetype.type == "multipart", "multipart/* content type expected"
|
||||
|
||||
if "boundary" not in mimetype.parameters:
|
||||
raise ValueError(
|
||||
"boundary missed for Content-Type: %s" % self.headers[CONTENT_TYPE]
|
||||
)
|
||||
|
||||
boundary = mimetype.parameters["boundary"]
|
||||
if len(boundary) > 70:
|
||||
raise ValueError("boundary %r is too long (70 chars max)" % boundary)
|
||||
|
||||
return boundary
|
||||
|
||||
async def _readline(self) -> bytes:
|
||||
if self._unread:
|
||||
return self._unread.pop()
|
||||
return await self._content.readline()
|
||||
|
||||
async def _read_until_first_boundary(self) -> None:
|
||||
while True:
|
||||
chunk = await self._readline()
|
||||
if chunk == b"":
|
||||
raise ValueError(
|
||||
"Could not find starting boundary %r" % (self._boundary)
|
||||
)
|
||||
chunk = chunk.rstrip()
|
||||
if chunk == self._boundary:
|
||||
return
|
||||
elif chunk == self._boundary + b"--":
|
||||
self._at_eof = True
|
||||
return
|
||||
|
||||
async def _read_boundary(self) -> None:
|
||||
chunk = (await self._readline()).rstrip()
|
||||
if chunk == self._boundary:
|
||||
pass
|
||||
elif chunk == self._boundary + b"--":
|
||||
self._at_eof = True
|
||||
epilogue = await self._readline()
|
||||
next_line = await self._readline()
|
||||
|
||||
# the epilogue is expected and then either the end of input or the
|
||||
# parent multipart boundary, if the parent boundary is found then
|
||||
# it should be marked as unread and handed to the parent for
|
||||
# processing
|
||||
if next_line[:2] == b"--":
|
||||
self._unread.append(next_line)
|
||||
# otherwise the request is likely missing an epilogue and both
|
||||
# lines should be passed to the parent for processing
|
||||
# (this handles the old behavior gracefully)
|
||||
else:
|
||||
self._unread.extend([next_line, epilogue])
|
||||
else:
|
||||
raise ValueError(f"Invalid boundary {chunk!r}, expected {self._boundary!r}")
|
||||
|
||||
async def _read_headers(self) -> "CIMultiDictProxy[str]":
|
||||
lines = [b""]
|
||||
while True:
|
||||
chunk = await self._content.readline()
|
||||
chunk = chunk.strip()
|
||||
lines.append(chunk)
|
||||
if not chunk:
|
||||
break
|
||||
parser = HeadersParser()
|
||||
headers, raw_headers = parser.parse_headers(lines)
|
||||
return headers
|
||||
|
||||
async def _maybe_release_last_part(self) -> None:
|
||||
"""Ensures that the last read body part is read completely."""
|
||||
if self._last_part is not None:
|
||||
if not self._last_part.at_eof():
|
||||
await self._last_part.release()
|
||||
self._unread.extend(self._last_part._unread)
|
||||
self._last_part = None
|
||||
|
||||
|
||||
_Part = Tuple[Payload, str, str]
|
||||
|
||||
|
||||
class MultipartWriter(Payload):
|
||||
"""Multipart body writer."""
|
||||
|
||||
def __init__(self, subtype: str = "mixed", boundary: Optional[str] = None) -> None:
|
||||
boundary = boundary if boundary is not None else uuid.uuid4().hex
|
||||
# The underlying Payload API demands a str (utf-8), not bytes,
|
||||
# so we need to ensure we don't lose anything during conversion.
|
||||
# As a result, require the boundary to be ASCII only.
|
||||
# In both situations.
|
||||
|
||||
try:
|
||||
self._boundary = boundary.encode("ascii")
|
||||
except UnicodeEncodeError:
|
||||
raise ValueError("boundary should contain ASCII only chars") from None
|
||||
ctype = f"multipart/{subtype}; boundary={self._boundary_value}"
|
||||
|
||||
super().__init__(None, content_type=ctype)
|
||||
|
||||
self._parts = [] # type: List[_Part]
|
||||
|
||||
def __enter__(self) -> "MultipartWriter":
|
||||
return self
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: Optional[Type[BaseException]],
|
||||
exc_val: Optional[BaseException],
|
||||
exc_tb: Optional[TracebackType],
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
def __iter__(self) -> Iterator[_Part]:
|
||||
return iter(self._parts)
|
||||
|
||||
def __len__(self) -> int:
|
||||
return len(self._parts)
|
||||
|
||||
def __bool__(self) -> bool:
|
||||
return True
|
||||
|
||||
_valid_tchar_regex = re.compile(br"\A[!#$%&'*+\-.^_`|~\w]+\Z")
|
||||
_invalid_qdtext_char_regex = re.compile(br"[\x00-\x08\x0A-\x1F\x7F]")
|
||||
|
||||
@property
|
||||
def _boundary_value(self) -> str:
|
||||
"""Wrap boundary parameter value in quotes, if necessary.
|
||||
|
||||
Reads self.boundary and returns a unicode sting.
|
||||
"""
|
||||
# Refer to RFCs 7231, 7230, 5234.
|
||||
#
|
||||
# parameter = token "=" ( token / quoted-string )
|
||||
# token = 1*tchar
|
||||
# quoted-string = DQUOTE *( qdtext / quoted-pair ) DQUOTE
|
||||
# qdtext = HTAB / SP / %x21 / %x23-5B / %x5D-7E / obs-text
|
||||
# obs-text = %x80-FF
|
||||
# quoted-pair = "\" ( HTAB / SP / VCHAR / obs-text )
|
||||
# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*"
|
||||
# / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~"
|
||||
# / DIGIT / ALPHA
|
||||
# ; any VCHAR, except delimiters
|
||||
# VCHAR = %x21-7E
|
||||
value = self._boundary
|
||||
if re.match(self._valid_tchar_regex, value):
|
||||
return value.decode("ascii") # cannot fail
|
||||
|
||||
if re.search(self._invalid_qdtext_char_regex, value):
|
||||
raise ValueError("boundary value contains invalid characters")
|
||||
|
||||
# escape %x5C and %x22
|
||||
quoted_value_content = value.replace(b"\\", b"\\\\")
|
||||
quoted_value_content = quoted_value_content.replace(b'"', b'\\"')
|
||||
|
||||
return '"' + quoted_value_content.decode("ascii") + '"'
|
||||
|
||||
@property
|
||||
def boundary(self) -> str:
|
||||
return self._boundary.decode("ascii")
|
||||
|
||||
def append(self, obj: Any, headers: Optional[MultiMapping[str]] = None) -> Payload:
|
||||
if headers is None:
|
||||
headers = CIMultiDict()
|
||||
|
||||
if isinstance(obj, Payload):
|
||||
obj.headers.update(headers)
|
||||
return self.append_payload(obj)
|
||||
else:
|
||||
try:
|
||||
payload = get_payload(obj, headers=headers)
|
||||
except LookupError:
|
||||
raise TypeError("Cannot create payload from %r" % obj)
|
||||
else:
|
||||
return self.append_payload(payload)
|
||||
|
||||
def append_payload(self, payload: Payload) -> Payload:
|
||||
"""Adds a new body part to multipart writer."""
|
||||
# compression
|
||||
encoding = payload.headers.get(
|
||||
CONTENT_ENCODING,
|
||||
"",
|
||||
).lower() # type: Optional[str]
|
||||
if encoding and encoding not in ("deflate", "gzip", "identity"):
|
||||
raise RuntimeError(f"unknown content encoding: {encoding}")
|
||||
if encoding == "identity":
|
||||
encoding = None
|
||||
|
||||
# te encoding
|
||||
te_encoding = payload.headers.get(
|
||||
CONTENT_TRANSFER_ENCODING,
|
||||
"",
|
||||
).lower() # type: Optional[str]
|
||||
if te_encoding not in ("", "base64", "quoted-printable", "binary"):
|
||||
raise RuntimeError(
|
||||
"unknown content transfer encoding: {}" "".format(te_encoding)
|
||||
)
|
||||
if te_encoding == "binary":
|
||||
te_encoding = None
|
||||
|
||||
# size
|
||||
size = payload.size
|
||||
if size is not None and not (encoding or te_encoding):
|
||||
payload.headers[CONTENT_LENGTH] = str(size)
|
||||
|
||||
self._parts.append((payload, encoding, te_encoding)) # type: ignore
|
||||
return payload
|
||||
|
||||
def append_json(
|
||||
self, obj: Any, headers: Optional[MultiMapping[str]] = None
|
||||
) -> Payload:
|
||||
"""Helper to append JSON part."""
|
||||
if headers is None:
|
||||
headers = CIMultiDict()
|
||||
|
||||
return self.append_payload(JsonPayload(obj, headers=headers))
|
||||
|
||||
def append_form(
|
||||
self,
|
||||
obj: Union[Sequence[Tuple[str, str]], Mapping[str, str]],
|
||||
headers: Optional[MultiMapping[str]] = None,
|
||||
) -> Payload:
|
||||
"""Helper to append form urlencoded part."""
|
||||
assert isinstance(obj, (Sequence, Mapping))
|
||||
|
||||
if headers is None:
|
||||
headers = CIMultiDict()
|
||||
|
||||
if isinstance(obj, Mapping):
|
||||
obj = list(obj.items())
|
||||
data = urlencode(obj, doseq=True)
|
||||
|
||||
return self.append_payload(
|
||||
StringPayload(
|
||||
data, headers=headers, content_type="application/x-www-form-urlencoded"
|
||||
)
|
||||
)
|
||||
|
||||
@property
|
||||
def size(self) -> Optional[int]:
|
||||
"""Size of the payload."""
|
||||
total = 0
|
||||
for part, encoding, te_encoding in self._parts:
|
||||
if encoding or te_encoding or part.size is None:
|
||||
return None
|
||||
|
||||
total += int(
|
||||
2
|
||||
+ len(self._boundary)
|
||||
+ 2
|
||||
+ part.size # b'--'+self._boundary+b'\r\n'
|
||||
+ len(part._binary_headers)
|
||||
+ 2 # b'\r\n'
|
||||
)
|
||||
|
||||
total += 2 + len(self._boundary) + 4 # b'--'+self._boundary+b'--\r\n'
|
||||
return total
|
||||
|
||||
async def write(self, writer: Any, close_boundary: bool = True) -> None:
|
||||
"""Write body."""
|
||||
for part, encoding, te_encoding in self._parts:
|
||||
await writer.write(b"--" + self._boundary + b"\r\n")
|
||||
await writer.write(part._binary_headers)
|
||||
|
||||
if encoding or te_encoding:
|
||||
w = MultipartPayloadWriter(writer)
|
||||
if encoding:
|
||||
w.enable_compression(encoding)
|
||||
if te_encoding:
|
||||
w.enable_encoding(te_encoding)
|
||||
await part.write(w) # type: ignore
|
||||
await w.write_eof()
|
||||
else:
|
||||
await part.write(writer)
|
||||
|
||||
await writer.write(b"\r\n")
|
||||
|
||||
if close_boundary:
|
||||
await writer.write(b"--" + self._boundary + b"--\r\n")
|
||||
|
||||
|
||||
class MultipartPayloadWriter:
|
||||
def __init__(self, writer: Any) -> None:
|
||||
self._writer = writer
|
||||
self._encoding = None # type: Optional[str]
|
||||
self._compress = None # type: Any
|
||||
self._encoding_buffer = None # type: Optional[bytearray]
|
||||
|
||||
def enable_encoding(self, encoding: str) -> None:
|
||||
if encoding == "base64":
|
||||
self._encoding = encoding
|
||||
self._encoding_buffer = bytearray()
|
||||
elif encoding == "quoted-printable":
|
||||
self._encoding = "quoted-printable"
|
||||
|
||||
def enable_compression(self, encoding: str = "deflate") -> None:
|
||||
zlib_mode = 16 + zlib.MAX_WBITS if encoding == "gzip" else -zlib.MAX_WBITS
|
||||
self._compress = zlib.compressobj(wbits=zlib_mode)
|
||||
|
||||
async def write_eof(self) -> None:
|
||||
if self._compress is not None:
|
||||
chunk = self._compress.flush()
|
||||
if chunk:
|
||||
self._compress = None
|
||||
await self.write(chunk)
|
||||
|
||||
if self._encoding == "base64":
|
||||
if self._encoding_buffer:
|
||||
await self._writer.write(base64.b64encode(self._encoding_buffer))
|
||||
|
||||
async def write(self, chunk: bytes) -> None:
|
||||
if self._compress is not None:
|
||||
if chunk:
|
||||
chunk = self._compress.compress(chunk)
|
||||
if not chunk:
|
||||
return
|
||||
|
||||
if self._encoding == "base64":
|
||||
buf = self._encoding_buffer
|
||||
assert buf is not None
|
||||
buf.extend(chunk)
|
||||
|
||||
if buf:
|
||||
div, mod = divmod(len(buf), 3)
|
||||
enc_chunk, self._encoding_buffer = (buf[: div * 3], buf[div * 3 :])
|
||||
if enc_chunk:
|
||||
b64chunk = base64.b64encode(enc_chunk)
|
||||
await self._writer.write(b64chunk)
|
||||
elif self._encoding == "quoted-printable":
|
||||
await self._writer.write(binascii.b2a_qp(chunk))
|
||||
else:
|
||||
await self._writer.write(chunk)
|
||||
448
dist/ba_data/python-site-packages/aiohttp/payload.py
vendored
Normal file
448
dist/ba_data/python-site-packages/aiohttp/payload.py
vendored
Normal file
|
|
@ -0,0 +1,448 @@
|
|||
import asyncio
|
||||
import enum
|
||||
import io
|
||||
import json
|
||||
import mimetypes
|
||||
import os
|
||||
import warnings
|
||||
from abc import ABC, abstractmethod
|
||||
from itertools import chain
|
||||
from typing import (
|
||||
IO,
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
ByteString,
|
||||
Dict,
|
||||
Iterable,
|
||||
Optional,
|
||||
Text,
|
||||
TextIO,
|
||||
Tuple,
|
||||
Type,
|
||||
Union,
|
||||
)
|
||||
|
||||
from multidict import CIMultiDict
|
||||
|
||||
from . import hdrs
|
||||
from .abc import AbstractStreamWriter
|
||||
from .helpers import (
|
||||
PY_36,
|
||||
content_disposition_header,
|
||||
guess_filename,
|
||||
parse_mimetype,
|
||||
sentinel,
|
||||
)
|
||||
from .streams import StreamReader
|
||||
from .typedefs import JSONEncoder, _CIMultiDict
|
||||
|
||||
__all__ = (
|
||||
"PAYLOAD_REGISTRY",
|
||||
"get_payload",
|
||||
"payload_type",
|
||||
"Payload",
|
||||
"BytesPayload",
|
||||
"StringPayload",
|
||||
"IOBasePayload",
|
||||
"BytesIOPayload",
|
||||
"BufferedReaderPayload",
|
||||
"TextIOPayload",
|
||||
"StringIOPayload",
|
||||
"JsonPayload",
|
||||
"AsyncIterablePayload",
|
||||
)
|
||||
|
||||
TOO_LARGE_BYTES_BODY = 2 ** 20 # 1 MB
|
||||
|
||||
|
||||
if TYPE_CHECKING: # pragma: no cover
|
||||
from typing import List
|
||||
|
||||
|
||||
class LookupError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class Order(str, enum.Enum):
|
||||
normal = "normal"
|
||||
try_first = "try_first"
|
||||
try_last = "try_last"
|
||||
|
||||
|
||||
def get_payload(data: Any, *args: Any, **kwargs: Any) -> "Payload":
|
||||
return PAYLOAD_REGISTRY.get(data, *args, **kwargs)
|
||||
|
||||
|
||||
def register_payload(
|
||||
factory: Type["Payload"], type: Any, *, order: Order = Order.normal
|
||||
) -> None:
|
||||
PAYLOAD_REGISTRY.register(factory, type, order=order)
|
||||
|
||||
|
||||
class payload_type:
|
||||
def __init__(self, type: Any, *, order: Order = Order.normal) -> None:
|
||||
self.type = type
|
||||
self.order = order
|
||||
|
||||
def __call__(self, factory: Type["Payload"]) -> Type["Payload"]:
|
||||
register_payload(factory, self.type, order=self.order)
|
||||
return factory
|
||||
|
||||
|
||||
class PayloadRegistry:
|
||||
"""Payload registry.
|
||||
|
||||
note: we need zope.interface for more efficient adapter search
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._first = [] # type: List[Tuple[Type[Payload], Any]]
|
||||
self._normal = [] # type: List[Tuple[Type[Payload], Any]]
|
||||
self._last = [] # type: List[Tuple[Type[Payload], Any]]
|
||||
|
||||
def get(
|
||||
self, data: Any, *args: Any, _CHAIN: Any = chain, **kwargs: Any
|
||||
) -> "Payload":
|
||||
if isinstance(data, Payload):
|
||||
return data
|
||||
for factory, type in _CHAIN(self._first, self._normal, self._last):
|
||||
if isinstance(data, type):
|
||||
return factory(data, *args, **kwargs)
|
||||
|
||||
raise LookupError()
|
||||
|
||||
def register(
|
||||
self, factory: Type["Payload"], type: Any, *, order: Order = Order.normal
|
||||
) -> None:
|
||||
if order is Order.try_first:
|
||||
self._first.append((factory, type))
|
||||
elif order is Order.normal:
|
||||
self._normal.append((factory, type))
|
||||
elif order is Order.try_last:
|
||||
self._last.append((factory, type))
|
||||
else:
|
||||
raise ValueError(f"Unsupported order {order!r}")
|
||||
|
||||
|
||||
class Payload(ABC):
|
||||
|
||||
_default_content_type = "application/octet-stream" # type: str
|
||||
_size = None # type: Optional[int]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
value: Any,
|
||||
headers: Optional[
|
||||
Union[_CIMultiDict, Dict[str, str], Iterable[Tuple[str, str]]]
|
||||
] = None,
|
||||
content_type: Optional[str] = sentinel,
|
||||
filename: Optional[str] = None,
|
||||
encoding: Optional[str] = None,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
self._encoding = encoding
|
||||
self._filename = filename
|
||||
self._headers = CIMultiDict() # type: _CIMultiDict
|
||||
self._value = value
|
||||
if content_type is not sentinel and content_type is not None:
|
||||
self._headers[hdrs.CONTENT_TYPE] = content_type
|
||||
elif self._filename is not None:
|
||||
content_type = mimetypes.guess_type(self._filename)[0]
|
||||
if content_type is None:
|
||||
content_type = self._default_content_type
|
||||
self._headers[hdrs.CONTENT_TYPE] = content_type
|
||||
else:
|
||||
self._headers[hdrs.CONTENT_TYPE] = self._default_content_type
|
||||
self._headers.update(headers or {})
|
||||
|
||||
@property
|
||||
def size(self) -> Optional[int]:
|
||||
"""Size of the payload."""
|
||||
return self._size
|
||||
|
||||
@property
|
||||
def filename(self) -> Optional[str]:
|
||||
"""Filename of the payload."""
|
||||
return self._filename
|
||||
|
||||
@property
|
||||
def headers(self) -> _CIMultiDict:
|
||||
"""Custom item headers"""
|
||||
return self._headers
|
||||
|
||||
@property
|
||||
def _binary_headers(self) -> bytes:
|
||||
return (
|
||||
"".join([k + ": " + v + "\r\n" for k, v in self.headers.items()]).encode(
|
||||
"utf-8"
|
||||
)
|
||||
+ b"\r\n"
|
||||
)
|
||||
|
||||
@property
|
||||
def encoding(self) -> Optional[str]:
|
||||
"""Payload encoding"""
|
||||
return self._encoding
|
||||
|
||||
@property
|
||||
def content_type(self) -> str:
|
||||
"""Content type"""
|
||||
return self._headers[hdrs.CONTENT_TYPE]
|
||||
|
||||
def set_content_disposition(
|
||||
self, disptype: str, quote_fields: bool = True, **params: Any
|
||||
) -> None:
|
||||
"""Sets ``Content-Disposition`` header."""
|
||||
self._headers[hdrs.CONTENT_DISPOSITION] = content_disposition_header(
|
||||
disptype, quote_fields=quote_fields, **params
|
||||
)
|
||||
|
||||
@abstractmethod
|
||||
async def write(self, writer: AbstractStreamWriter) -> None:
|
||||
"""Write payload.
|
||||
|
||||
writer is an AbstractStreamWriter instance:
|
||||
"""
|
||||
|
||||
|
||||
class BytesPayload(Payload):
|
||||
def __init__(self, value: ByteString, *args: Any, **kwargs: Any) -> None:
|
||||
if not isinstance(value, (bytes, bytearray, memoryview)):
|
||||
raise TypeError(
|
||||
"value argument must be byte-ish, not {!r}".format(type(value))
|
||||
)
|
||||
|
||||
if "content_type" not in kwargs:
|
||||
kwargs["content_type"] = "application/octet-stream"
|
||||
|
||||
super().__init__(value, *args, **kwargs)
|
||||
|
||||
if isinstance(value, memoryview):
|
||||
self._size = value.nbytes
|
||||
else:
|
||||
self._size = len(value)
|
||||
|
||||
if self._size > TOO_LARGE_BYTES_BODY:
|
||||
if PY_36:
|
||||
kwargs = {"source": self}
|
||||
else:
|
||||
kwargs = {}
|
||||
warnings.warn(
|
||||
"Sending a large body directly with raw bytes might"
|
||||
" lock the event loop. You should probably pass an "
|
||||
"io.BytesIO object instead",
|
||||
ResourceWarning,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
async def write(self, writer: AbstractStreamWriter) -> None:
|
||||
await writer.write(self._value)
|
||||
|
||||
|
||||
class StringPayload(BytesPayload):
|
||||
def __init__(
|
||||
self,
|
||||
value: Text,
|
||||
*args: Any,
|
||||
encoding: Optional[str] = None,
|
||||
content_type: Optional[str] = None,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
|
||||
if encoding is None:
|
||||
if content_type is None:
|
||||
real_encoding = "utf-8"
|
||||
content_type = "text/plain; charset=utf-8"
|
||||
else:
|
||||
mimetype = parse_mimetype(content_type)
|
||||
real_encoding = mimetype.parameters.get("charset", "utf-8")
|
||||
else:
|
||||
if content_type is None:
|
||||
content_type = "text/plain; charset=%s" % encoding
|
||||
real_encoding = encoding
|
||||
|
||||
super().__init__(
|
||||
value.encode(real_encoding),
|
||||
encoding=real_encoding,
|
||||
content_type=content_type,
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
|
||||
class StringIOPayload(StringPayload):
|
||||
def __init__(self, value: IO[str], *args: Any, **kwargs: Any) -> None:
|
||||
super().__init__(value.read(), *args, **kwargs)
|
||||
|
||||
|
||||
class IOBasePayload(Payload):
|
||||
def __init__(
|
||||
self, value: IO[Any], disposition: str = "attachment", *args: Any, **kwargs: Any
|
||||
) -> None:
|
||||
if "filename" not in kwargs:
|
||||
kwargs["filename"] = guess_filename(value)
|
||||
|
||||
super().__init__(value, *args, **kwargs)
|
||||
|
||||
if self._filename is not None and disposition is not None:
|
||||
if hdrs.CONTENT_DISPOSITION not in self.headers:
|
||||
self.set_content_disposition(disposition, filename=self._filename)
|
||||
|
||||
async def write(self, writer: AbstractStreamWriter) -> None:
|
||||
loop = asyncio.get_event_loop()
|
||||
try:
|
||||
chunk = await loop.run_in_executor(None, self._value.read, 2 ** 16)
|
||||
while chunk:
|
||||
await writer.write(chunk)
|
||||
chunk = await loop.run_in_executor(None, self._value.read, 2 ** 16)
|
||||
finally:
|
||||
await loop.run_in_executor(None, self._value.close)
|
||||
|
||||
|
||||
class TextIOPayload(IOBasePayload):
|
||||
def __init__(
|
||||
self,
|
||||
value: TextIO,
|
||||
*args: Any,
|
||||
encoding: Optional[str] = None,
|
||||
content_type: Optional[str] = None,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
|
||||
if encoding is None:
|
||||
if content_type is None:
|
||||
encoding = "utf-8"
|
||||
content_type = "text/plain; charset=utf-8"
|
||||
else:
|
||||
mimetype = parse_mimetype(content_type)
|
||||
encoding = mimetype.parameters.get("charset", "utf-8")
|
||||
else:
|
||||
if content_type is None:
|
||||
content_type = "text/plain; charset=%s" % encoding
|
||||
|
||||
super().__init__(
|
||||
value,
|
||||
content_type=content_type,
|
||||
encoding=encoding,
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
@property
|
||||
def size(self) -> Optional[int]:
|
||||
try:
|
||||
return os.fstat(self._value.fileno()).st_size - self._value.tell()
|
||||
except OSError:
|
||||
return None
|
||||
|
||||
async def write(self, writer: AbstractStreamWriter) -> None:
|
||||
loop = asyncio.get_event_loop()
|
||||
try:
|
||||
chunk = await loop.run_in_executor(None, self._value.read, 2 ** 16)
|
||||
while chunk:
|
||||
await writer.write(chunk.encode(self._encoding))
|
||||
chunk = await loop.run_in_executor(None, self._value.read, 2 ** 16)
|
||||
finally:
|
||||
await loop.run_in_executor(None, self._value.close)
|
||||
|
||||
|
||||
class BytesIOPayload(IOBasePayload):
|
||||
@property
|
||||
def size(self) -> int:
|
||||
position = self._value.tell()
|
||||
end = self._value.seek(0, os.SEEK_END)
|
||||
self._value.seek(position)
|
||||
return end - position
|
||||
|
||||
|
||||
class BufferedReaderPayload(IOBasePayload):
|
||||
@property
|
||||
def size(self) -> Optional[int]:
|
||||
try:
|
||||
return os.fstat(self._value.fileno()).st_size - self._value.tell()
|
||||
except OSError:
|
||||
# data.fileno() is not supported, e.g.
|
||||
# io.BufferedReader(io.BytesIO(b'data'))
|
||||
return None
|
||||
|
||||
|
||||
class JsonPayload(BytesPayload):
|
||||
def __init__(
|
||||
self,
|
||||
value: Any,
|
||||
encoding: str = "utf-8",
|
||||
content_type: str = "application/json",
|
||||
dumps: JSONEncoder = json.dumps,
|
||||
*args: Any,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
|
||||
super().__init__(
|
||||
dumps(value).encode(encoding),
|
||||
content_type=content_type,
|
||||
encoding=encoding,
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
|
||||
if TYPE_CHECKING: # pragma: no cover
|
||||
from typing import AsyncIterable, AsyncIterator
|
||||
|
||||
_AsyncIterator = AsyncIterator[bytes]
|
||||
_AsyncIterable = AsyncIterable[bytes]
|
||||
else:
|
||||
from collections.abc import AsyncIterable, AsyncIterator
|
||||
|
||||
_AsyncIterator = AsyncIterator
|
||||
_AsyncIterable = AsyncIterable
|
||||
|
||||
|
||||
class AsyncIterablePayload(Payload):
|
||||
|
||||
_iter = None # type: Optional[_AsyncIterator]
|
||||
|
||||
def __init__(self, value: _AsyncIterable, *args: Any, **kwargs: Any) -> None:
|
||||
if not isinstance(value, AsyncIterable):
|
||||
raise TypeError(
|
||||
"value argument must support "
|
||||
"collections.abc.AsyncIterablebe interface, "
|
||||
"got {!r}".format(type(value))
|
||||
)
|
||||
|
||||
if "content_type" not in kwargs:
|
||||
kwargs["content_type"] = "application/octet-stream"
|
||||
|
||||
super().__init__(value, *args, **kwargs)
|
||||
|
||||
self._iter = value.__aiter__()
|
||||
|
||||
async def write(self, writer: AbstractStreamWriter) -> None:
|
||||
if self._iter:
|
||||
try:
|
||||
# iter is not None check prevents rare cases
|
||||
# when the case iterable is used twice
|
||||
while True:
|
||||
chunk = await self._iter.__anext__()
|
||||
await writer.write(chunk)
|
||||
except StopAsyncIteration:
|
||||
self._iter = None
|
||||
|
||||
|
||||
class StreamReaderPayload(AsyncIterablePayload):
|
||||
def __init__(self, value: StreamReader, *args: Any, **kwargs: Any) -> None:
|
||||
super().__init__(value.iter_any(), *args, **kwargs)
|
||||
|
||||
|
||||
PAYLOAD_REGISTRY = PayloadRegistry()
|
||||
PAYLOAD_REGISTRY.register(BytesPayload, (bytes, bytearray, memoryview))
|
||||
PAYLOAD_REGISTRY.register(StringPayload, str)
|
||||
PAYLOAD_REGISTRY.register(StringIOPayload, io.StringIO)
|
||||
PAYLOAD_REGISTRY.register(TextIOPayload, io.TextIOBase)
|
||||
PAYLOAD_REGISTRY.register(BytesIOPayload, io.BytesIO)
|
||||
PAYLOAD_REGISTRY.register(BufferedReaderPayload, (io.BufferedReader, io.BufferedRandom))
|
||||
PAYLOAD_REGISTRY.register(IOBasePayload, io.IOBase)
|
||||
PAYLOAD_REGISTRY.register(StreamReaderPayload, StreamReader)
|
||||
# try_last for giving a chance to more specialized async interables like
|
||||
# multidict.BodyPartReaderPayload override the default
|
||||
PAYLOAD_REGISTRY.register(AsyncIterablePayload, AsyncIterable, order=Order.try_last)
|
||||
74
dist/ba_data/python-site-packages/aiohttp/payload_streamer.py
vendored
Normal file
74
dist/ba_data/python-site-packages/aiohttp/payload_streamer.py
vendored
Normal file
|
|
@ -0,0 +1,74 @@
|
|||
""" Payload implemenation for coroutines as data provider.
|
||||
|
||||
As a simple case, you can upload data from file::
|
||||
|
||||
@aiohttp.streamer
|
||||
async def file_sender(writer, file_name=None):
|
||||
with open(file_name, 'rb') as f:
|
||||
chunk = f.read(2**16)
|
||||
while chunk:
|
||||
await writer.write(chunk)
|
||||
|
||||
chunk = f.read(2**16)
|
||||
|
||||
Then you can use `file_sender` like this:
|
||||
|
||||
async with session.post('http://httpbin.org/post',
|
||||
data=file_sender(file_name='huge_file')) as resp:
|
||||
print(await resp.text())
|
||||
|
||||
..note:: Coroutine must accept `writer` as first argument
|
||||
|
||||
"""
|
||||
|
||||
import types
|
||||
import warnings
|
||||
from typing import Any, Awaitable, Callable, Dict, Tuple
|
||||
|
||||
from .abc import AbstractStreamWriter
|
||||
from .payload import Payload, payload_type
|
||||
|
||||
__all__ = ("streamer",)
|
||||
|
||||
|
||||
class _stream_wrapper:
|
||||
def __init__(
|
||||
self,
|
||||
coro: Callable[..., Awaitable[None]],
|
||||
args: Tuple[Any, ...],
|
||||
kwargs: Dict[str, Any],
|
||||
) -> None:
|
||||
self.coro = types.coroutine(coro)
|
||||
self.args = args
|
||||
self.kwargs = kwargs
|
||||
|
||||
async def __call__(self, writer: AbstractStreamWriter) -> None:
|
||||
await self.coro(writer, *self.args, **self.kwargs) # type: ignore
|
||||
|
||||
|
||||
class streamer:
|
||||
def __init__(self, coro: Callable[..., Awaitable[None]]) -> None:
|
||||
warnings.warn(
|
||||
"@streamer is deprecated, use async generators instead",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
self.coro = coro
|
||||
|
||||
def __call__(self, *args: Any, **kwargs: Any) -> _stream_wrapper:
|
||||
return _stream_wrapper(self.coro, args, kwargs)
|
||||
|
||||
|
||||
@payload_type(_stream_wrapper)
|
||||
class StreamWrapperPayload(Payload):
|
||||
async def write(self, writer: AbstractStreamWriter) -> None:
|
||||
await self._value(writer)
|
||||
|
||||
|
||||
@payload_type(streamer)
|
||||
class StreamPayload(StreamWrapperPayload):
|
||||
def __init__(self, value: Any, *args: Any, **kwargs: Any) -> None:
|
||||
super().__init__(value(), *args, **kwargs)
|
||||
|
||||
async def write(self, writer: AbstractStreamWriter) -> None:
|
||||
await self._value(writer)
|
||||
1
dist/ba_data/python-site-packages/aiohttp/py.typed
vendored
Normal file
1
dist/ba_data/python-site-packages/aiohttp/py.typed
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
Marker
|
||||
380
dist/ba_data/python-site-packages/aiohttp/pytest_plugin.py
vendored
Normal file
380
dist/ba_data/python-site-packages/aiohttp/pytest_plugin.py
vendored
Normal file
|
|
@ -0,0 +1,380 @@
|
|||
import asyncio
|
||||
import contextlib
|
||||
import warnings
|
||||
from collections.abc import Callable
|
||||
|
||||
import pytest
|
||||
|
||||
from aiohttp.helpers import PY_37, isasyncgenfunction
|
||||
from aiohttp.web import Application
|
||||
|
||||
from .test_utils import (
|
||||
BaseTestServer,
|
||||
RawTestServer,
|
||||
TestClient,
|
||||
TestServer,
|
||||
loop_context,
|
||||
setup_test_loop,
|
||||
teardown_test_loop,
|
||||
unused_port as _unused_port,
|
||||
)
|
||||
|
||||
try:
|
||||
import uvloop
|
||||
except ImportError: # pragma: no cover
|
||||
uvloop = None
|
||||
|
||||
try:
|
||||
import tokio
|
||||
except ImportError: # pragma: no cover
|
||||
tokio = None
|
||||
|
||||
|
||||
def pytest_addoption(parser): # type: ignore
|
||||
parser.addoption(
|
||||
"--aiohttp-fast",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="run tests faster by disabling extra checks",
|
||||
)
|
||||
parser.addoption(
|
||||
"--aiohttp-loop",
|
||||
action="store",
|
||||
default="pyloop",
|
||||
help="run tests with specific loop: pyloop, uvloop, tokio or all",
|
||||
)
|
||||
parser.addoption(
|
||||
"--aiohttp-enable-loop-debug",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="enable event loop debug mode",
|
||||
)
|
||||
|
||||
|
||||
def pytest_fixture_setup(fixturedef): # type: ignore
|
||||
"""
|
||||
Allow fixtures to be coroutines. Run coroutine fixtures in an event loop.
|
||||
"""
|
||||
func = fixturedef.func
|
||||
|
||||
if isasyncgenfunction(func):
|
||||
# async generator fixture
|
||||
is_async_gen = True
|
||||
elif asyncio.iscoroutinefunction(func):
|
||||
# regular async fixture
|
||||
is_async_gen = False
|
||||
else:
|
||||
# not an async fixture, nothing to do
|
||||
return
|
||||
|
||||
strip_request = False
|
||||
if "request" not in fixturedef.argnames:
|
||||
fixturedef.argnames += ("request",)
|
||||
strip_request = True
|
||||
|
||||
def wrapper(*args, **kwargs): # type: ignore
|
||||
request = kwargs["request"]
|
||||
if strip_request:
|
||||
del kwargs["request"]
|
||||
|
||||
# if neither the fixture nor the test use the 'loop' fixture,
|
||||
# 'getfixturevalue' will fail because the test is not parameterized
|
||||
# (this can be removed someday if 'loop' is no longer parameterized)
|
||||
if "loop" not in request.fixturenames:
|
||||
raise Exception(
|
||||
"Asynchronous fixtures must depend on the 'loop' fixture or "
|
||||
"be used in tests depending from it."
|
||||
)
|
||||
|
||||
_loop = request.getfixturevalue("loop")
|
||||
|
||||
if is_async_gen:
|
||||
# for async generators, we need to advance the generator once,
|
||||
# then advance it again in a finalizer
|
||||
gen = func(*args, **kwargs)
|
||||
|
||||
def finalizer(): # type: ignore
|
||||
try:
|
||||
return _loop.run_until_complete(gen.__anext__())
|
||||
except StopAsyncIteration:
|
||||
pass
|
||||
|
||||
request.addfinalizer(finalizer)
|
||||
return _loop.run_until_complete(gen.__anext__())
|
||||
else:
|
||||
return _loop.run_until_complete(func(*args, **kwargs))
|
||||
|
||||
fixturedef.func = wrapper
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def fast(request): # type: ignore
|
||||
"""--fast config option"""
|
||||
return request.config.getoption("--aiohttp-fast")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def loop_debug(request): # type: ignore
|
||||
"""--enable-loop-debug config option"""
|
||||
return request.config.getoption("--aiohttp-enable-loop-debug")
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _runtime_warning_context(): # type: ignore
|
||||
"""
|
||||
Context manager which checks for RuntimeWarnings, specifically to
|
||||
avoid "coroutine 'X' was never awaited" warnings being missed.
|
||||
|
||||
If RuntimeWarnings occur in the context a RuntimeError is raised.
|
||||
"""
|
||||
with warnings.catch_warnings(record=True) as _warnings:
|
||||
yield
|
||||
rw = [
|
||||
"{w.filename}:{w.lineno}:{w.message}".format(w=w)
|
||||
for w in _warnings
|
||||
if w.category == RuntimeWarning
|
||||
]
|
||||
if rw:
|
||||
raise RuntimeError(
|
||||
"{} Runtime Warning{},\n{}".format(
|
||||
len(rw), "" if len(rw) == 1 else "s", "\n".join(rw)
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _passthrough_loop_context(loop, fast=False): # type: ignore
|
||||
"""
|
||||
setups and tears down a loop unless one is passed in via the loop
|
||||
argument when it's passed straight through.
|
||||
"""
|
||||
if loop:
|
||||
# loop already exists, pass it straight through
|
||||
yield loop
|
||||
else:
|
||||
# this shadows loop_context's standard behavior
|
||||
loop = setup_test_loop()
|
||||
yield loop
|
||||
teardown_test_loop(loop, fast=fast)
|
||||
|
||||
|
||||
def pytest_pycollect_makeitem(collector, name, obj): # type: ignore
|
||||
"""
|
||||
Fix pytest collecting for coroutines.
|
||||
"""
|
||||
if collector.funcnamefilter(name) and asyncio.iscoroutinefunction(obj):
|
||||
return list(collector._genfunctions(name, obj))
|
||||
|
||||
|
||||
def pytest_pyfunc_call(pyfuncitem): # type: ignore
|
||||
"""
|
||||
Run coroutines in an event loop instead of a normal function call.
|
||||
"""
|
||||
fast = pyfuncitem.config.getoption("--aiohttp-fast")
|
||||
if asyncio.iscoroutinefunction(pyfuncitem.function):
|
||||
existing_loop = pyfuncitem.funcargs.get(
|
||||
"proactor_loop"
|
||||
) or pyfuncitem.funcargs.get("loop", None)
|
||||
with _runtime_warning_context():
|
||||
with _passthrough_loop_context(existing_loop, fast=fast) as _loop:
|
||||
testargs = {
|
||||
arg: pyfuncitem.funcargs[arg]
|
||||
for arg in pyfuncitem._fixtureinfo.argnames
|
||||
}
|
||||
_loop.run_until_complete(pyfuncitem.obj(**testargs))
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def pytest_generate_tests(metafunc): # type: ignore
|
||||
if "loop_factory" not in metafunc.fixturenames:
|
||||
return
|
||||
|
||||
loops = metafunc.config.option.aiohttp_loop
|
||||
avail_factories = {"pyloop": asyncio.DefaultEventLoopPolicy}
|
||||
|
||||
if uvloop is not None: # pragma: no cover
|
||||
avail_factories["uvloop"] = uvloop.EventLoopPolicy
|
||||
|
||||
if tokio is not None: # pragma: no cover
|
||||
avail_factories["tokio"] = tokio.EventLoopPolicy
|
||||
|
||||
if loops == "all":
|
||||
loops = "pyloop,uvloop?,tokio?"
|
||||
|
||||
factories = {} # type: ignore
|
||||
for name in loops.split(","):
|
||||
required = not name.endswith("?")
|
||||
name = name.strip(" ?")
|
||||
if name not in avail_factories: # pragma: no cover
|
||||
if required:
|
||||
raise ValueError(
|
||||
"Unknown loop '%s', available loops: %s"
|
||||
% (name, list(factories.keys()))
|
||||
)
|
||||
else:
|
||||
continue
|
||||
factories[name] = avail_factories[name]
|
||||
metafunc.parametrize(
|
||||
"loop_factory", list(factories.values()), ids=list(factories.keys())
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def loop(loop_factory, fast, loop_debug): # type: ignore
|
||||
"""Return an instance of the event loop."""
|
||||
policy = loop_factory()
|
||||
asyncio.set_event_loop_policy(policy)
|
||||
with loop_context(fast=fast) as _loop:
|
||||
if loop_debug:
|
||||
_loop.set_debug(True) # pragma: no cover
|
||||
asyncio.set_event_loop(_loop)
|
||||
yield _loop
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def proactor_loop(): # type: ignore
|
||||
if not PY_37:
|
||||
policy = asyncio.get_event_loop_policy()
|
||||
policy._loop_factory = asyncio.ProactorEventLoop # type: ignore
|
||||
else:
|
||||
policy = asyncio.WindowsProactorEventLoopPolicy() # type: ignore
|
||||
asyncio.set_event_loop_policy(policy)
|
||||
|
||||
with loop_context(policy.new_event_loop) as _loop:
|
||||
asyncio.set_event_loop(_loop)
|
||||
yield _loop
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def unused_port(aiohttp_unused_port): # type: ignore # pragma: no cover
|
||||
warnings.warn(
|
||||
"Deprecated, use aiohttp_unused_port fixture instead",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return aiohttp_unused_port
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def aiohttp_unused_port(): # type: ignore
|
||||
"""Return a port that is unused on the current host."""
|
||||
return _unused_port
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def aiohttp_server(loop): # type: ignore
|
||||
"""Factory to create a TestServer instance, given an app.
|
||||
|
||||
aiohttp_server(app, **kwargs)
|
||||
"""
|
||||
servers = []
|
||||
|
||||
async def go(app, *, port=None, **kwargs): # type: ignore
|
||||
server = TestServer(app, port=port)
|
||||
await server.start_server(loop=loop, **kwargs)
|
||||
servers.append(server)
|
||||
return server
|
||||
|
||||
yield go
|
||||
|
||||
async def finalize(): # type: ignore
|
||||
while servers:
|
||||
await servers.pop().close()
|
||||
|
||||
loop.run_until_complete(finalize())
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def test_server(aiohttp_server): # type: ignore # pragma: no cover
|
||||
warnings.warn(
|
||||
"Deprecated, use aiohttp_server fixture instead",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return aiohttp_server
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def aiohttp_raw_server(loop): # type: ignore
|
||||
"""Factory to create a RawTestServer instance, given a web handler.
|
||||
|
||||
aiohttp_raw_server(handler, **kwargs)
|
||||
"""
|
||||
servers = []
|
||||
|
||||
async def go(handler, *, port=None, **kwargs): # type: ignore
|
||||
server = RawTestServer(handler, port=port)
|
||||
await server.start_server(loop=loop, **kwargs)
|
||||
servers.append(server)
|
||||
return server
|
||||
|
||||
yield go
|
||||
|
||||
async def finalize(): # type: ignore
|
||||
while servers:
|
||||
await servers.pop().close()
|
||||
|
||||
loop.run_until_complete(finalize())
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def raw_test_server(aiohttp_raw_server): # type: ignore # pragma: no cover
|
||||
warnings.warn(
|
||||
"Deprecated, use aiohttp_raw_server fixture instead",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return aiohttp_raw_server
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def aiohttp_client(loop): # type: ignore
|
||||
"""Factory to create a TestClient instance.
|
||||
|
||||
aiohttp_client(app, **kwargs)
|
||||
aiohttp_client(server, **kwargs)
|
||||
aiohttp_client(raw_server, **kwargs)
|
||||
"""
|
||||
clients = []
|
||||
|
||||
async def go(__param, *args, server_kwargs=None, **kwargs): # type: ignore
|
||||
|
||||
if isinstance(__param, Callable) and not isinstance( # type: ignore
|
||||
__param, (Application, BaseTestServer)
|
||||
):
|
||||
__param = __param(loop, *args, **kwargs)
|
||||
kwargs = {}
|
||||
else:
|
||||
assert not args, "args should be empty"
|
||||
|
||||
if isinstance(__param, Application):
|
||||
server_kwargs = server_kwargs or {}
|
||||
server = TestServer(__param, loop=loop, **server_kwargs)
|
||||
client = TestClient(server, loop=loop, **kwargs)
|
||||
elif isinstance(__param, BaseTestServer):
|
||||
client = TestClient(__param, loop=loop, **kwargs)
|
||||
else:
|
||||
raise ValueError("Unknown argument type: %r" % type(__param))
|
||||
|
||||
await client.start_server()
|
||||
clients.append(client)
|
||||
return client
|
||||
|
||||
yield go
|
||||
|
||||
async def finalize(): # type: ignore
|
||||
while clients:
|
||||
await clients.pop().close()
|
||||
|
||||
loop.run_until_complete(finalize())
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def test_client(aiohttp_client): # type: ignore # pragma: no cover
|
||||
warnings.warn(
|
||||
"Deprecated, use aiohttp_client fixture instead",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return aiohttp_client
|
||||
149
dist/ba_data/python-site-packages/aiohttp/resolver.py
vendored
Normal file
149
dist/ba_data/python-site-packages/aiohttp/resolver.py
vendored
Normal file
|
|
@ -0,0 +1,149 @@
|
|||
import asyncio
|
||||
import socket
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from .abc import AbstractResolver
|
||||
from .helpers import get_running_loop
|
||||
|
||||
__all__ = ("ThreadedResolver", "AsyncResolver", "DefaultResolver")
|
||||
|
||||
try:
|
||||
import aiodns
|
||||
|
||||
# aiodns_default = hasattr(aiodns.DNSResolver, 'gethostbyname')
|
||||
except ImportError: # pragma: no cover
|
||||
aiodns = None
|
||||
|
||||
aiodns_default = False
|
||||
|
||||
|
||||
class ThreadedResolver(AbstractResolver):
|
||||
"""Use Executor for synchronous getaddrinfo() calls, which defaults to
|
||||
concurrent.futures.ThreadPoolExecutor.
|
||||
"""
|
||||
|
||||
def __init__(self, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
|
||||
self._loop = get_running_loop(loop)
|
||||
|
||||
async def resolve(
|
||||
self, hostname: str, port: int = 0, family: int = socket.AF_INET
|
||||
) -> List[Dict[str, Any]]:
|
||||
infos = await self._loop.getaddrinfo(
|
||||
hostname,
|
||||
port,
|
||||
type=socket.SOCK_STREAM,
|
||||
family=family,
|
||||
flags=socket.AI_ADDRCONFIG,
|
||||
)
|
||||
|
||||
hosts = []
|
||||
for family, _, proto, _, address in infos:
|
||||
if family == socket.AF_INET6 and address[3]: # type: ignore
|
||||
# This is essential for link-local IPv6 addresses.
|
||||
# LL IPv6 is a VERY rare case. Strictly speaking, we should use
|
||||
# getnameinfo() unconditionally, but performance makes sense.
|
||||
host, _port = socket.getnameinfo(
|
||||
address, socket.NI_NUMERICHOST | socket.NI_NUMERICSERV
|
||||
)
|
||||
port = int(_port)
|
||||
else:
|
||||
host, port = address[:2]
|
||||
hosts.append(
|
||||
{
|
||||
"hostname": hostname,
|
||||
"host": host,
|
||||
"port": port,
|
||||
"family": family,
|
||||
"proto": proto,
|
||||
"flags": socket.AI_NUMERICHOST | socket.AI_NUMERICSERV,
|
||||
}
|
||||
)
|
||||
|
||||
return hosts
|
||||
|
||||
async def close(self) -> None:
|
||||
pass
|
||||
|
||||
|
||||
class AsyncResolver(AbstractResolver):
|
||||
"""Use the `aiodns` package to make asynchronous DNS lookups"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
loop: Optional[asyncio.AbstractEventLoop] = None,
|
||||
*args: Any,
|
||||
**kwargs: Any
|
||||
) -> None:
|
||||
if aiodns is None:
|
||||
raise RuntimeError("Resolver requires aiodns library")
|
||||
|
||||
self._loop = get_running_loop(loop)
|
||||
self._resolver = aiodns.DNSResolver(*args, loop=loop, **kwargs)
|
||||
|
||||
if not hasattr(self._resolver, "gethostbyname"):
|
||||
# aiodns 1.1 is not available, fallback to DNSResolver.query
|
||||
self.resolve = self._resolve_with_query # type: ignore
|
||||
|
||||
async def resolve(
|
||||
self, host: str, port: int = 0, family: int = socket.AF_INET
|
||||
) -> List[Dict[str, Any]]:
|
||||
try:
|
||||
resp = await self._resolver.gethostbyname(host, family)
|
||||
except aiodns.error.DNSError as exc:
|
||||
msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed"
|
||||
raise OSError(msg) from exc
|
||||
hosts = []
|
||||
for address in resp.addresses:
|
||||
hosts.append(
|
||||
{
|
||||
"hostname": host,
|
||||
"host": address,
|
||||
"port": port,
|
||||
"family": family,
|
||||
"proto": 0,
|
||||
"flags": socket.AI_NUMERICHOST | socket.AI_NUMERICSERV,
|
||||
}
|
||||
)
|
||||
|
||||
if not hosts:
|
||||
raise OSError("DNS lookup failed")
|
||||
|
||||
return hosts
|
||||
|
||||
async def _resolve_with_query(
|
||||
self, host: str, port: int = 0, family: int = socket.AF_INET
|
||||
) -> List[Dict[str, Any]]:
|
||||
if family == socket.AF_INET6:
|
||||
qtype = "AAAA"
|
||||
else:
|
||||
qtype = "A"
|
||||
|
||||
try:
|
||||
resp = await self._resolver.query(host, qtype)
|
||||
except aiodns.error.DNSError as exc:
|
||||
msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed"
|
||||
raise OSError(msg) from exc
|
||||
|
||||
hosts = []
|
||||
for rr in resp:
|
||||
hosts.append(
|
||||
{
|
||||
"hostname": host,
|
||||
"host": rr.host,
|
||||
"port": port,
|
||||
"family": family,
|
||||
"proto": 0,
|
||||
"flags": socket.AI_NUMERICHOST,
|
||||
}
|
||||
)
|
||||
|
||||
if not hosts:
|
||||
raise OSError("DNS lookup failed")
|
||||
|
||||
return hosts
|
||||
|
||||
async def close(self) -> None:
|
||||
return self._resolver.cancel()
|
||||
|
||||
|
||||
DefaultResolver = AsyncResolver if aiodns_default else ThreadedResolver
|
||||
34
dist/ba_data/python-site-packages/aiohttp/signals.py
vendored
Normal file
34
dist/ba_data/python-site-packages/aiohttp/signals.py
vendored
Normal file
|
|
@ -0,0 +1,34 @@
|
|||
from aiohttp.frozenlist import FrozenList
|
||||
|
||||
__all__ = ("Signal",)
|
||||
|
||||
|
||||
class Signal(FrozenList):
|
||||
"""Coroutine-based signal implementation.
|
||||
|
||||
To connect a callback to a signal, use any list method.
|
||||
|
||||
Signals are fired using the send() coroutine, which takes named
|
||||
arguments.
|
||||
"""
|
||||
|
||||
__slots__ = ("_owner",)
|
||||
|
||||
def __init__(self, owner):
|
||||
super().__init__()
|
||||
self._owner = owner
|
||||
|
||||
def __repr__(self):
|
||||
return "<Signal owner={}, frozen={}, {!r}>".format(
|
||||
self._owner, self.frozen, list(self)
|
||||
)
|
||||
|
||||
async def send(self, *args, **kwargs):
|
||||
"""
|
||||
Sends data to all registered receivers.
|
||||
"""
|
||||
if not self.frozen:
|
||||
raise RuntimeError("Cannot send non-frozen signal.")
|
||||
|
||||
for receiver in self:
|
||||
await receiver(*args, **kwargs) # type: ignore
|
||||
12
dist/ba_data/python-site-packages/aiohttp/signals.pyi
vendored
Normal file
12
dist/ba_data/python-site-packages/aiohttp/signals.pyi
vendored
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
from typing import Any, Generic, TypeVar
|
||||
|
||||
from aiohttp.frozenlist import FrozenList
|
||||
|
||||
__all__ = ("Signal",)
|
||||
|
||||
_T = TypeVar("_T")
|
||||
|
||||
class Signal(FrozenList[_T], Generic[_T]):
|
||||
def __init__(self, owner: Any) -> None: ...
|
||||
def __repr__(self) -> str: ...
|
||||
async def send(self, *args: Any, **kwargs: Any) -> None: ...
|
||||
647
dist/ba_data/python-site-packages/aiohttp/streams.py
vendored
Normal file
647
dist/ba_data/python-site-packages/aiohttp/streams.py
vendored
Normal file
|
|
@ -0,0 +1,647 @@
|
|||
import asyncio
|
||||
import collections
|
||||
import warnings
|
||||
from typing import Awaitable, Callable, Generic, List, Optional, Tuple, TypeVar
|
||||
|
||||
from .base_protocol import BaseProtocol
|
||||
from .helpers import BaseTimerContext, set_exception, set_result
|
||||
from .log import internal_logger
|
||||
|
||||
try: # pragma: no cover
|
||||
from typing import Deque
|
||||
except ImportError:
|
||||
from typing_extensions import Deque
|
||||
|
||||
__all__ = (
|
||||
"EMPTY_PAYLOAD",
|
||||
"EofStream",
|
||||
"StreamReader",
|
||||
"DataQueue",
|
||||
"FlowControlDataQueue",
|
||||
)
|
||||
|
||||
_T = TypeVar("_T")
|
||||
|
||||
|
||||
class EofStream(Exception):
|
||||
"""eof stream indication."""
|
||||
|
||||
|
||||
class AsyncStreamIterator(Generic[_T]):
|
||||
def __init__(self, read_func: Callable[[], Awaitable[_T]]) -> None:
|
||||
self.read_func = read_func
|
||||
|
||||
def __aiter__(self) -> "AsyncStreamIterator[_T]":
|
||||
return self
|
||||
|
||||
async def __anext__(self) -> _T:
|
||||
try:
|
||||
rv = await self.read_func()
|
||||
except EofStream:
|
||||
raise StopAsyncIteration
|
||||
if rv == b"":
|
||||
raise StopAsyncIteration
|
||||
return rv
|
||||
|
||||
|
||||
class ChunkTupleAsyncStreamIterator:
|
||||
def __init__(self, stream: "StreamReader") -> None:
|
||||
self._stream = stream
|
||||
|
||||
def __aiter__(self) -> "ChunkTupleAsyncStreamIterator":
|
||||
return self
|
||||
|
||||
async def __anext__(self) -> Tuple[bytes, bool]:
|
||||
rv = await self._stream.readchunk()
|
||||
if rv == (b"", False):
|
||||
raise StopAsyncIteration
|
||||
return rv
|
||||
|
||||
|
||||
class AsyncStreamReaderMixin:
|
||||
def __aiter__(self) -> AsyncStreamIterator[bytes]:
|
||||
return AsyncStreamIterator(self.readline) # type: ignore
|
||||
|
||||
def iter_chunked(self, n: int) -> AsyncStreamIterator[bytes]:
|
||||
"""Returns an asynchronous iterator that yields chunks of size n.
|
||||
|
||||
Python-3.5 available for Python 3.5+ only
|
||||
"""
|
||||
return AsyncStreamIterator(lambda: self.read(n)) # type: ignore
|
||||
|
||||
def iter_any(self) -> AsyncStreamIterator[bytes]:
|
||||
"""Returns an asynchronous iterator that yields all the available
|
||||
data as soon as it is received
|
||||
|
||||
Python-3.5 available for Python 3.5+ only
|
||||
"""
|
||||
return AsyncStreamIterator(self.readany) # type: ignore
|
||||
|
||||
def iter_chunks(self) -> ChunkTupleAsyncStreamIterator:
|
||||
"""Returns an asynchronous iterator that yields chunks of data
|
||||
as they are received by the server. The yielded objects are tuples
|
||||
of (bytes, bool) as returned by the StreamReader.readchunk method.
|
||||
|
||||
Python-3.5 available for Python 3.5+ only
|
||||
"""
|
||||
return ChunkTupleAsyncStreamIterator(self) # type: ignore
|
||||
|
||||
|
||||
class StreamReader(AsyncStreamReaderMixin):
|
||||
"""An enhancement of asyncio.StreamReader.
|
||||
|
||||
Supports asynchronous iteration by line, chunk or as available::
|
||||
|
||||
async for line in reader:
|
||||
...
|
||||
async for chunk in reader.iter_chunked(1024):
|
||||
...
|
||||
async for slice in reader.iter_any():
|
||||
...
|
||||
|
||||
"""
|
||||
|
||||
total_bytes = 0
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
protocol: BaseProtocol,
|
||||
limit: int,
|
||||
*,
|
||||
timer: Optional[BaseTimerContext] = None,
|
||||
loop: Optional[asyncio.AbstractEventLoop] = None
|
||||
) -> None:
|
||||
self._protocol = protocol
|
||||
self._low_water = limit
|
||||
self._high_water = limit * 2
|
||||
if loop is None:
|
||||
loop = asyncio.get_event_loop()
|
||||
self._loop = loop
|
||||
self._size = 0
|
||||
self._cursor = 0
|
||||
self._http_chunk_splits = None # type: Optional[List[int]]
|
||||
self._buffer = collections.deque() # type: Deque[bytes]
|
||||
self._buffer_offset = 0
|
||||
self._eof = False
|
||||
self._waiter = None # type: Optional[asyncio.Future[None]]
|
||||
self._eof_waiter = None # type: Optional[asyncio.Future[None]]
|
||||
self._exception = None # type: Optional[BaseException]
|
||||
self._timer = timer
|
||||
self._eof_callbacks = [] # type: List[Callable[[], None]]
|
||||
|
||||
def __repr__(self) -> str:
|
||||
info = [self.__class__.__name__]
|
||||
if self._size:
|
||||
info.append("%d bytes" % self._size)
|
||||
if self._eof:
|
||||
info.append("eof")
|
||||
if self._low_water != 2 ** 16: # default limit
|
||||
info.append("low=%d high=%d" % (self._low_water, self._high_water))
|
||||
if self._waiter:
|
||||
info.append("w=%r" % self._waiter)
|
||||
if self._exception:
|
||||
info.append("e=%r" % self._exception)
|
||||
return "<%s>" % " ".join(info)
|
||||
|
||||
def get_read_buffer_limits(self) -> Tuple[int, int]:
|
||||
return (self._low_water, self._high_water)
|
||||
|
||||
def exception(self) -> Optional[BaseException]:
|
||||
return self._exception
|
||||
|
||||
def set_exception(self, exc: BaseException) -> None:
|
||||
self._exception = exc
|
||||
self._eof_callbacks.clear()
|
||||
|
||||
waiter = self._waiter
|
||||
if waiter is not None:
|
||||
self._waiter = None
|
||||
set_exception(waiter, exc)
|
||||
|
||||
waiter = self._eof_waiter
|
||||
if waiter is not None:
|
||||
self._eof_waiter = None
|
||||
set_exception(waiter, exc)
|
||||
|
||||
def on_eof(self, callback: Callable[[], None]) -> None:
|
||||
if self._eof:
|
||||
try:
|
||||
callback()
|
||||
except Exception:
|
||||
internal_logger.exception("Exception in eof callback")
|
||||
else:
|
||||
self._eof_callbacks.append(callback)
|
||||
|
||||
def feed_eof(self) -> None:
|
||||
self._eof = True
|
||||
|
||||
waiter = self._waiter
|
||||
if waiter is not None:
|
||||
self._waiter = None
|
||||
set_result(waiter, None)
|
||||
|
||||
waiter = self._eof_waiter
|
||||
if waiter is not None:
|
||||
self._eof_waiter = None
|
||||
set_result(waiter, None)
|
||||
|
||||
for cb in self._eof_callbacks:
|
||||
try:
|
||||
cb()
|
||||
except Exception:
|
||||
internal_logger.exception("Exception in eof callback")
|
||||
|
||||
self._eof_callbacks.clear()
|
||||
|
||||
def is_eof(self) -> bool:
|
||||
"""Return True if 'feed_eof' was called."""
|
||||
return self._eof
|
||||
|
||||
def at_eof(self) -> bool:
|
||||
"""Return True if the buffer is empty and 'feed_eof' was called."""
|
||||
return self._eof and not self._buffer
|
||||
|
||||
async def wait_eof(self) -> None:
|
||||
if self._eof:
|
||||
return
|
||||
|
||||
assert self._eof_waiter is None
|
||||
self._eof_waiter = self._loop.create_future()
|
||||
try:
|
||||
await self._eof_waiter
|
||||
finally:
|
||||
self._eof_waiter = None
|
||||
|
||||
def unread_data(self, data: bytes) -> None:
|
||||
"""rollback reading some data from stream, inserting it to buffer head."""
|
||||
warnings.warn(
|
||||
"unread_data() is deprecated "
|
||||
"and will be removed in future releases (#3260)",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
if not data:
|
||||
return
|
||||
|
||||
if self._buffer_offset:
|
||||
self._buffer[0] = self._buffer[0][self._buffer_offset :]
|
||||
self._buffer_offset = 0
|
||||
self._size += len(data)
|
||||
self._cursor -= len(data)
|
||||
self._buffer.appendleft(data)
|
||||
self._eof_counter = 0
|
||||
|
||||
# TODO: size is ignored, remove the param later
|
||||
def feed_data(self, data: bytes, size: int = 0) -> None:
|
||||
assert not self._eof, "feed_data after feed_eof"
|
||||
|
||||
if not data:
|
||||
return
|
||||
|
||||
self._size += len(data)
|
||||
self._buffer.append(data)
|
||||
self.total_bytes += len(data)
|
||||
|
||||
waiter = self._waiter
|
||||
if waiter is not None:
|
||||
self._waiter = None
|
||||
set_result(waiter, None)
|
||||
|
||||
if self._size > self._high_water and not self._protocol._reading_paused:
|
||||
self._protocol.pause_reading()
|
||||
|
||||
def begin_http_chunk_receiving(self) -> None:
|
||||
if self._http_chunk_splits is None:
|
||||
if self.total_bytes:
|
||||
raise RuntimeError(
|
||||
"Called begin_http_chunk_receiving when" "some data was already fed"
|
||||
)
|
||||
self._http_chunk_splits = []
|
||||
|
||||
def end_http_chunk_receiving(self) -> None:
|
||||
if self._http_chunk_splits is None:
|
||||
raise RuntimeError(
|
||||
"Called end_chunk_receiving without calling "
|
||||
"begin_chunk_receiving first"
|
||||
)
|
||||
|
||||
# self._http_chunk_splits contains logical byte offsets from start of
|
||||
# the body transfer. Each offset is the offset of the end of a chunk.
|
||||
# "Logical" means bytes, accessible for a user.
|
||||
# If no chunks containig logical data were received, current position
|
||||
# is difinitely zero.
|
||||
pos = self._http_chunk_splits[-1] if self._http_chunk_splits else 0
|
||||
|
||||
if self.total_bytes == pos:
|
||||
# We should not add empty chunks here. So we check for that.
|
||||
# Note, when chunked + gzip is used, we can receive a chunk
|
||||
# of compressed data, but that data may not be enough for gzip FSM
|
||||
# to yield any uncompressed data. That's why current position may
|
||||
# not change after receiving a chunk.
|
||||
return
|
||||
|
||||
self._http_chunk_splits.append(self.total_bytes)
|
||||
|
||||
# wake up readchunk when end of http chunk received
|
||||
waiter = self._waiter
|
||||
if waiter is not None:
|
||||
self._waiter = None
|
||||
set_result(waiter, None)
|
||||
|
||||
async def _wait(self, func_name: str) -> None:
|
||||
# StreamReader uses a future to link the protocol feed_data() method
|
||||
# to a read coroutine. Running two read coroutines at the same time
|
||||
# would have an unexpected behaviour. It would not possible to know
|
||||
# which coroutine would get the next data.
|
||||
if self._waiter is not None:
|
||||
raise RuntimeError(
|
||||
"%s() called while another coroutine is "
|
||||
"already waiting for incoming data" % func_name
|
||||
)
|
||||
|
||||
waiter = self._waiter = self._loop.create_future()
|
||||
try:
|
||||
if self._timer:
|
||||
with self._timer:
|
||||
await waiter
|
||||
else:
|
||||
await waiter
|
||||
finally:
|
||||
self._waiter = None
|
||||
|
||||
async def readline(self) -> bytes:
|
||||
if self._exception is not None:
|
||||
raise self._exception
|
||||
|
||||
line = []
|
||||
line_size = 0
|
||||
not_enough = True
|
||||
|
||||
while not_enough:
|
||||
while self._buffer and not_enough:
|
||||
offset = self._buffer_offset
|
||||
ichar = self._buffer[0].find(b"\n", offset) + 1
|
||||
# Read from current offset to found b'\n' or to the end.
|
||||
data = self._read_nowait_chunk(ichar - offset if ichar else -1)
|
||||
line.append(data)
|
||||
line_size += len(data)
|
||||
if ichar:
|
||||
not_enough = False
|
||||
|
||||
if line_size > self._high_water:
|
||||
raise ValueError("Line is too long")
|
||||
|
||||
if self._eof:
|
||||
break
|
||||
|
||||
if not_enough:
|
||||
await self._wait("readline")
|
||||
|
||||
return b"".join(line)
|
||||
|
||||
async def read(self, n: int = -1) -> bytes:
|
||||
if self._exception is not None:
|
||||
raise self._exception
|
||||
|
||||
# migration problem; with DataQueue you have to catch
|
||||
# EofStream exception, so common way is to run payload.read() inside
|
||||
# infinite loop. what can cause real infinite loop with StreamReader
|
||||
# lets keep this code one major release.
|
||||
if __debug__:
|
||||
if self._eof and not self._buffer:
|
||||
self._eof_counter = getattr(self, "_eof_counter", 0) + 1
|
||||
if self._eof_counter > 5:
|
||||
internal_logger.warning(
|
||||
"Multiple access to StreamReader in eof state, "
|
||||
"might be infinite loop.",
|
||||
stack_info=True,
|
||||
)
|
||||
|
||||
if not n:
|
||||
return b""
|
||||
|
||||
if n < 0:
|
||||
# This used to just loop creating a new waiter hoping to
|
||||
# collect everything in self._buffer, but that would
|
||||
# deadlock if the subprocess sends more than self.limit
|
||||
# bytes. So just call self.readany() until EOF.
|
||||
blocks = []
|
||||
while True:
|
||||
block = await self.readany()
|
||||
if not block:
|
||||
break
|
||||
blocks.append(block)
|
||||
return b"".join(blocks)
|
||||
|
||||
# TODO: should be `if` instead of `while`
|
||||
# because waiter maybe triggered on chunk end,
|
||||
# without feeding any data
|
||||
while not self._buffer and not self._eof:
|
||||
await self._wait("read")
|
||||
|
||||
return self._read_nowait(n)
|
||||
|
||||
async def readany(self) -> bytes:
|
||||
if self._exception is not None:
|
||||
raise self._exception
|
||||
|
||||
# TODO: should be `if` instead of `while`
|
||||
# because waiter maybe triggered on chunk end,
|
||||
# without feeding any data
|
||||
while not self._buffer and not self._eof:
|
||||
await self._wait("readany")
|
||||
|
||||
return self._read_nowait(-1)
|
||||
|
||||
async def readchunk(self) -> Tuple[bytes, bool]:
|
||||
"""Returns a tuple of (data, end_of_http_chunk). When chunked transfer
|
||||
encoding is used, end_of_http_chunk is a boolean indicating if the end
|
||||
of the data corresponds to the end of a HTTP chunk , otherwise it is
|
||||
always False.
|
||||
"""
|
||||
while True:
|
||||
if self._exception is not None:
|
||||
raise self._exception
|
||||
|
||||
while self._http_chunk_splits:
|
||||
pos = self._http_chunk_splits.pop(0)
|
||||
if pos == self._cursor:
|
||||
return (b"", True)
|
||||
if pos > self._cursor:
|
||||
return (self._read_nowait(pos - self._cursor), True)
|
||||
internal_logger.warning(
|
||||
"Skipping HTTP chunk end due to data "
|
||||
"consumption beyond chunk boundary"
|
||||
)
|
||||
|
||||
if self._buffer:
|
||||
return (self._read_nowait_chunk(-1), False)
|
||||
# return (self._read_nowait(-1), False)
|
||||
|
||||
if self._eof:
|
||||
# Special case for signifying EOF.
|
||||
# (b'', True) is not a final return value actually.
|
||||
return (b"", False)
|
||||
|
||||
await self._wait("readchunk")
|
||||
|
||||
async def readexactly(self, n: int) -> bytes:
|
||||
if self._exception is not None:
|
||||
raise self._exception
|
||||
|
||||
blocks = [] # type: List[bytes]
|
||||
while n > 0:
|
||||
block = await self.read(n)
|
||||
if not block:
|
||||
partial = b"".join(blocks)
|
||||
raise asyncio.IncompleteReadError(partial, len(partial) + n)
|
||||
blocks.append(block)
|
||||
n -= len(block)
|
||||
|
||||
return b"".join(blocks)
|
||||
|
||||
def read_nowait(self, n: int = -1) -> bytes:
|
||||
# default was changed to be consistent with .read(-1)
|
||||
#
|
||||
# I believe the most users don't know about the method and
|
||||
# they are not affected.
|
||||
if self._exception is not None:
|
||||
raise self._exception
|
||||
|
||||
if self._waiter and not self._waiter.done():
|
||||
raise RuntimeError(
|
||||
"Called while some coroutine is waiting for incoming data."
|
||||
)
|
||||
|
||||
return self._read_nowait(n)
|
||||
|
||||
def _read_nowait_chunk(self, n: int) -> bytes:
|
||||
first_buffer = self._buffer[0]
|
||||
offset = self._buffer_offset
|
||||
if n != -1 and len(first_buffer) - offset > n:
|
||||
data = first_buffer[offset : offset + n]
|
||||
self._buffer_offset += n
|
||||
|
||||
elif offset:
|
||||
self._buffer.popleft()
|
||||
data = first_buffer[offset:]
|
||||
self._buffer_offset = 0
|
||||
|
||||
else:
|
||||
data = self._buffer.popleft()
|
||||
|
||||
self._size -= len(data)
|
||||
self._cursor += len(data)
|
||||
|
||||
chunk_splits = self._http_chunk_splits
|
||||
# Prevent memory leak: drop useless chunk splits
|
||||
while chunk_splits and chunk_splits[0] < self._cursor:
|
||||
chunk_splits.pop(0)
|
||||
|
||||
if self._size < self._low_water and self._protocol._reading_paused:
|
||||
self._protocol.resume_reading()
|
||||
return data
|
||||
|
||||
def _read_nowait(self, n: int) -> bytes:
|
||||
""" Read not more than n bytes, or whole buffer if n == -1 """
|
||||
chunks = []
|
||||
|
||||
while self._buffer:
|
||||
chunk = self._read_nowait_chunk(n)
|
||||
chunks.append(chunk)
|
||||
if n != -1:
|
||||
n -= len(chunk)
|
||||
if n == 0:
|
||||
break
|
||||
|
||||
return b"".join(chunks) if chunks else b""
|
||||
|
||||
|
||||
class EmptyStreamReader(AsyncStreamReaderMixin):
|
||||
def exception(self) -> Optional[BaseException]:
|
||||
return None
|
||||
|
||||
def set_exception(self, exc: BaseException) -> None:
|
||||
pass
|
||||
|
||||
def on_eof(self, callback: Callable[[], None]) -> None:
|
||||
try:
|
||||
callback()
|
||||
except Exception:
|
||||
internal_logger.exception("Exception in eof callback")
|
||||
|
||||
def feed_eof(self) -> None:
|
||||
pass
|
||||
|
||||
def is_eof(self) -> bool:
|
||||
return True
|
||||
|
||||
def at_eof(self) -> bool:
|
||||
return True
|
||||
|
||||
async def wait_eof(self) -> None:
|
||||
return
|
||||
|
||||
def feed_data(self, data: bytes, n: int = 0) -> None:
|
||||
pass
|
||||
|
||||
async def readline(self) -> bytes:
|
||||
return b""
|
||||
|
||||
async def read(self, n: int = -1) -> bytes:
|
||||
return b""
|
||||
|
||||
async def readany(self) -> bytes:
|
||||
return b""
|
||||
|
||||
async def readchunk(self) -> Tuple[bytes, bool]:
|
||||
return (b"", True)
|
||||
|
||||
async def readexactly(self, n: int) -> bytes:
|
||||
raise asyncio.IncompleteReadError(b"", n)
|
||||
|
||||
def read_nowait(self) -> bytes:
|
||||
return b""
|
||||
|
||||
|
||||
EMPTY_PAYLOAD = EmptyStreamReader()
|
||||
|
||||
|
||||
class DataQueue(Generic[_T]):
|
||||
"""DataQueue is a general-purpose blocking queue with one reader."""
|
||||
|
||||
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
|
||||
self._loop = loop
|
||||
self._eof = False
|
||||
self._waiter = None # type: Optional[asyncio.Future[None]]
|
||||
self._exception = None # type: Optional[BaseException]
|
||||
self._size = 0
|
||||
self._buffer = collections.deque() # type: Deque[Tuple[_T, int]]
|
||||
|
||||
def __len__(self) -> int:
|
||||
return len(self._buffer)
|
||||
|
||||
def is_eof(self) -> bool:
|
||||
return self._eof
|
||||
|
||||
def at_eof(self) -> bool:
|
||||
return self._eof and not self._buffer
|
||||
|
||||
def exception(self) -> Optional[BaseException]:
|
||||
return self._exception
|
||||
|
||||
def set_exception(self, exc: BaseException) -> None:
|
||||
self._eof = True
|
||||
self._exception = exc
|
||||
|
||||
waiter = self._waiter
|
||||
if waiter is not None:
|
||||
self._waiter = None
|
||||
set_exception(waiter, exc)
|
||||
|
||||
def feed_data(self, data: _T, size: int = 0) -> None:
|
||||
self._size += size
|
||||
self._buffer.append((data, size))
|
||||
|
||||
waiter = self._waiter
|
||||
if waiter is not None:
|
||||
self._waiter = None
|
||||
set_result(waiter, None)
|
||||
|
||||
def feed_eof(self) -> None:
|
||||
self._eof = True
|
||||
|
||||
waiter = self._waiter
|
||||
if waiter is not None:
|
||||
self._waiter = None
|
||||
set_result(waiter, None)
|
||||
|
||||
async def read(self) -> _T:
|
||||
if not self._buffer and not self._eof:
|
||||
assert not self._waiter
|
||||
self._waiter = self._loop.create_future()
|
||||
try:
|
||||
await self._waiter
|
||||
except (asyncio.CancelledError, asyncio.TimeoutError):
|
||||
self._waiter = None
|
||||
raise
|
||||
|
||||
if self._buffer:
|
||||
data, size = self._buffer.popleft()
|
||||
self._size -= size
|
||||
return data
|
||||
else:
|
||||
if self._exception is not None:
|
||||
raise self._exception
|
||||
else:
|
||||
raise EofStream
|
||||
|
||||
def __aiter__(self) -> AsyncStreamIterator[_T]:
|
||||
return AsyncStreamIterator(self.read)
|
||||
|
||||
|
||||
class FlowControlDataQueue(DataQueue[_T]):
|
||||
"""FlowControlDataQueue resumes and pauses an underlying stream.
|
||||
|
||||
It is a destination for parsed data."""
|
||||
|
||||
def __init__(
|
||||
self, protocol: BaseProtocol, limit: int, *, loop: asyncio.AbstractEventLoop
|
||||
) -> None:
|
||||
super().__init__(loop=loop)
|
||||
|
||||
self._protocol = protocol
|
||||
self._limit = limit * 2
|
||||
|
||||
def feed_data(self, data: _T, size: int = 0) -> None:
|
||||
super().feed_data(data, size)
|
||||
|
||||
if self._size > self._limit and not self._protocol._reading_paused:
|
||||
self._protocol.pause_reading()
|
||||
|
||||
async def read(self) -> _T:
|
||||
try:
|
||||
return await super().read()
|
||||
finally:
|
||||
if self._size < self._limit and self._protocol._reading_paused:
|
||||
self._protocol.resume_reading()
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue