@@ -0,0 +1 @@
|
||||
b60c37d122fa91049ccf318c94c871d82ba17ff3bc3fc64f8a65426fce7120b7 /home/runner/work/aiohttp/aiohttp/aiohttp/_cparser.pxd
|
@@ -0,0 +1 @@
|
||||
d067f01423cddb3c442933b5fcc039b18ab651fcec1bc91c577693aafc25cf78 /home/runner/work/aiohttp/aiohttp/aiohttp/_find_header.pxd
|
@@ -0,0 +1 @@
|
||||
043f0b704444c6c59da38ab3bae43ce1ff8bfe91d5ce45103b494400e7b71688 /home/runner/work/aiohttp/aiohttp/aiohttp/_frozenlist.pyx
|
@@ -0,0 +1 @@
|
||||
6682a22524b9d4fc442e123672622be7bdfb6238d9709b7b15b2113b7ca6d52b /home/runner/work/aiohttp/aiohttp/aiohttp/_helpers.pyi
|
@@ -0,0 +1 @@
|
||||
5de2db35fb795ffe227e2f1007c8ba4f2ad1b9aca28cc48edc80c779203cf6e3 /home/runner/work/aiohttp/aiohttp/aiohttp/_helpers.pyx
|
@@ -0,0 +1 @@
|
||||
f0688fb2e81ea92bf0a17822260d9591a30979101da12a4b873113fc459fb5fa /home/runner/work/aiohttp/aiohttp/aiohttp/_http_parser.pyx
|
@@ -0,0 +1 @@
|
||||
4e7b7f7baa5c65954e85a5b7c8db7786a0ec3498081b0a9420f792a803086281 /home/runner/work/aiohttp/aiohttp/aiohttp/_http_writer.pyx
|
@@ -0,0 +1 @@
|
||||
d57b8e48d0c26f20ebcc5e6e300da2b2a6aeb12b3c9768d64cb0e53432ccf48a /home/runner/work/aiohttp/aiohttp/aiohttp/_websocket.pyx
|
@@ -0,0 +1 @@
|
||||
6d134aa08da3d6ba0f76d81fc7f9ec7836a7bc1a99b1950d1c3aa65ed7e3951a /home/runner/work/aiohttp/aiohttp/aiohttp/frozenlist.pyi
|
@@ -0,0 +1 @@
|
||||
5ac8c3258003604c8993bfa8357361036337330b722e4849024972ccbb5c95f5 /home/runner/work/aiohttp/aiohttp/aiohttp/hdrs.py
|
@@ -0,0 +1 @@
|
||||
48b4df50f771d7e8385524ea0a7057ca1482974f8a43e674982b04b08bc17d5e /home/runner/work/aiohttp/aiohttp/aiohttp/signals.pyi
|
217
.venv/lib/python3.9/site-packages/aiohttp/__init__.py
Normal file
217
.venv/lib/python3.9/site-packages/aiohttp/__init__.py
Normal file
@@ -0,0 +1,217 @@
|
||||
__version__ = "3.7.4.post0"
|
||||
|
||||
from typing import Tuple
|
||||
|
||||
from . import hdrs as hdrs
|
||||
from .client import (
|
||||
BaseConnector as BaseConnector,
|
||||
ClientConnectionError as ClientConnectionError,
|
||||
ClientConnectorCertificateError as ClientConnectorCertificateError,
|
||||
ClientConnectorError as ClientConnectorError,
|
||||
ClientConnectorSSLError as ClientConnectorSSLError,
|
||||
ClientError as ClientError,
|
||||
ClientHttpProxyError as ClientHttpProxyError,
|
||||
ClientOSError as ClientOSError,
|
||||
ClientPayloadError as ClientPayloadError,
|
||||
ClientProxyConnectionError as ClientProxyConnectionError,
|
||||
ClientRequest as ClientRequest,
|
||||
ClientResponse as ClientResponse,
|
||||
ClientResponseError as ClientResponseError,
|
||||
ClientSession as ClientSession,
|
||||
ClientSSLError as ClientSSLError,
|
||||
ClientTimeout as ClientTimeout,
|
||||
ClientWebSocketResponse as ClientWebSocketResponse,
|
||||
ContentTypeError as ContentTypeError,
|
||||
Fingerprint as Fingerprint,
|
||||
InvalidURL as InvalidURL,
|
||||
NamedPipeConnector as NamedPipeConnector,
|
||||
RequestInfo as RequestInfo,
|
||||
ServerConnectionError as ServerConnectionError,
|
||||
ServerDisconnectedError as ServerDisconnectedError,
|
||||
ServerFingerprintMismatch as ServerFingerprintMismatch,
|
||||
ServerTimeoutError as ServerTimeoutError,
|
||||
TCPConnector as TCPConnector,
|
||||
TooManyRedirects as TooManyRedirects,
|
||||
UnixConnector as UnixConnector,
|
||||
WSServerHandshakeError as WSServerHandshakeError,
|
||||
request as request,
|
||||
)
|
||||
from .cookiejar import CookieJar as CookieJar, DummyCookieJar as DummyCookieJar
|
||||
from .formdata import FormData as FormData
|
||||
from .helpers import BasicAuth as BasicAuth, ChainMapProxy as ChainMapProxy
|
||||
from .http import (
|
||||
HttpVersion as HttpVersion,
|
||||
HttpVersion10 as HttpVersion10,
|
||||
HttpVersion11 as HttpVersion11,
|
||||
WebSocketError as WebSocketError,
|
||||
WSCloseCode as WSCloseCode,
|
||||
WSMessage as WSMessage,
|
||||
WSMsgType as WSMsgType,
|
||||
)
|
||||
from .multipart import (
|
||||
BadContentDispositionHeader as BadContentDispositionHeader,
|
||||
BadContentDispositionParam as BadContentDispositionParam,
|
||||
BodyPartReader as BodyPartReader,
|
||||
MultipartReader as MultipartReader,
|
||||
MultipartWriter as MultipartWriter,
|
||||
content_disposition_filename as content_disposition_filename,
|
||||
parse_content_disposition as parse_content_disposition,
|
||||
)
|
||||
from .payload import (
|
||||
PAYLOAD_REGISTRY as PAYLOAD_REGISTRY,
|
||||
AsyncIterablePayload as AsyncIterablePayload,
|
||||
BufferedReaderPayload as BufferedReaderPayload,
|
||||
BytesIOPayload as BytesIOPayload,
|
||||
BytesPayload as BytesPayload,
|
||||
IOBasePayload as IOBasePayload,
|
||||
JsonPayload as JsonPayload,
|
||||
Payload as Payload,
|
||||
StringIOPayload as StringIOPayload,
|
||||
StringPayload as StringPayload,
|
||||
TextIOPayload as TextIOPayload,
|
||||
get_payload as get_payload,
|
||||
payload_type as payload_type,
|
||||
)
|
||||
from .payload_streamer import streamer as streamer
|
||||
from .resolver import (
|
||||
AsyncResolver as AsyncResolver,
|
||||
DefaultResolver as DefaultResolver,
|
||||
ThreadedResolver as ThreadedResolver,
|
||||
)
|
||||
from .signals import Signal as Signal
|
||||
from .streams import (
|
||||
EMPTY_PAYLOAD as EMPTY_PAYLOAD,
|
||||
DataQueue as DataQueue,
|
||||
EofStream as EofStream,
|
||||
FlowControlDataQueue as FlowControlDataQueue,
|
||||
StreamReader as StreamReader,
|
||||
)
|
||||
from .tracing import (
|
||||
TraceConfig as TraceConfig,
|
||||
TraceConnectionCreateEndParams as TraceConnectionCreateEndParams,
|
||||
TraceConnectionCreateStartParams as TraceConnectionCreateStartParams,
|
||||
TraceConnectionQueuedEndParams as TraceConnectionQueuedEndParams,
|
||||
TraceConnectionQueuedStartParams as TraceConnectionQueuedStartParams,
|
||||
TraceConnectionReuseconnParams as TraceConnectionReuseconnParams,
|
||||
TraceDnsCacheHitParams as TraceDnsCacheHitParams,
|
||||
TraceDnsCacheMissParams as TraceDnsCacheMissParams,
|
||||
TraceDnsResolveHostEndParams as TraceDnsResolveHostEndParams,
|
||||
TraceDnsResolveHostStartParams as TraceDnsResolveHostStartParams,
|
||||
TraceRequestChunkSentParams as TraceRequestChunkSentParams,
|
||||
TraceRequestEndParams as TraceRequestEndParams,
|
||||
TraceRequestExceptionParams as TraceRequestExceptionParams,
|
||||
TraceRequestRedirectParams as TraceRequestRedirectParams,
|
||||
TraceRequestStartParams as TraceRequestStartParams,
|
||||
TraceResponseChunkReceivedParams as TraceResponseChunkReceivedParams,
|
||||
)
|
||||
|
||||
__all__: Tuple[str, ...] = (
|
||||
"hdrs",
|
||||
# client
|
||||
"BaseConnector",
|
||||
"ClientConnectionError",
|
||||
"ClientConnectorCertificateError",
|
||||
"ClientConnectorError",
|
||||
"ClientConnectorSSLError",
|
||||
"ClientError",
|
||||
"ClientHttpProxyError",
|
||||
"ClientOSError",
|
||||
"ClientPayloadError",
|
||||
"ClientProxyConnectionError",
|
||||
"ClientResponse",
|
||||
"ClientRequest",
|
||||
"ClientResponseError",
|
||||
"ClientSSLError",
|
||||
"ClientSession",
|
||||
"ClientTimeout",
|
||||
"ClientWebSocketResponse",
|
||||
"ContentTypeError",
|
||||
"Fingerprint",
|
||||
"InvalidURL",
|
||||
"RequestInfo",
|
||||
"ServerConnectionError",
|
||||
"ServerDisconnectedError",
|
||||
"ServerFingerprintMismatch",
|
||||
"ServerTimeoutError",
|
||||
"TCPConnector",
|
||||
"TooManyRedirects",
|
||||
"UnixConnector",
|
||||
"NamedPipeConnector",
|
||||
"WSServerHandshakeError",
|
||||
"request",
|
||||
# cookiejar
|
||||
"CookieJar",
|
||||
"DummyCookieJar",
|
||||
# formdata
|
||||
"FormData",
|
||||
# helpers
|
||||
"BasicAuth",
|
||||
"ChainMapProxy",
|
||||
# http
|
||||
"HttpVersion",
|
||||
"HttpVersion10",
|
||||
"HttpVersion11",
|
||||
"WSMsgType",
|
||||
"WSCloseCode",
|
||||
"WSMessage",
|
||||
"WebSocketError",
|
||||
# multipart
|
||||
"BadContentDispositionHeader",
|
||||
"BadContentDispositionParam",
|
||||
"BodyPartReader",
|
||||
"MultipartReader",
|
||||
"MultipartWriter",
|
||||
"content_disposition_filename",
|
||||
"parse_content_disposition",
|
||||
# payload
|
||||
"AsyncIterablePayload",
|
||||
"BufferedReaderPayload",
|
||||
"BytesIOPayload",
|
||||
"BytesPayload",
|
||||
"IOBasePayload",
|
||||
"JsonPayload",
|
||||
"PAYLOAD_REGISTRY",
|
||||
"Payload",
|
||||
"StringIOPayload",
|
||||
"StringPayload",
|
||||
"TextIOPayload",
|
||||
"get_payload",
|
||||
"payload_type",
|
||||
# payload_streamer
|
||||
"streamer",
|
||||
# resolver
|
||||
"AsyncResolver",
|
||||
"DefaultResolver",
|
||||
"ThreadedResolver",
|
||||
# signals
|
||||
"Signal",
|
||||
"DataQueue",
|
||||
"EMPTY_PAYLOAD",
|
||||
"EofStream",
|
||||
"FlowControlDataQueue",
|
||||
"StreamReader",
|
||||
# tracing
|
||||
"TraceConfig",
|
||||
"TraceConnectionCreateEndParams",
|
||||
"TraceConnectionCreateStartParams",
|
||||
"TraceConnectionQueuedEndParams",
|
||||
"TraceConnectionQueuedStartParams",
|
||||
"TraceConnectionReuseconnParams",
|
||||
"TraceDnsCacheHitParams",
|
||||
"TraceDnsCacheMissParams",
|
||||
"TraceDnsResolveHostEndParams",
|
||||
"TraceDnsResolveHostStartParams",
|
||||
"TraceRequestChunkSentParams",
|
||||
"TraceRequestEndParams",
|
||||
"TraceRequestExceptionParams",
|
||||
"TraceRequestRedirectParams",
|
||||
"TraceRequestStartParams",
|
||||
"TraceResponseChunkReceivedParams",
|
||||
)
|
||||
|
||||
try:
|
||||
from .worker import GunicornUVLoopWebWorker, GunicornWebWorker
|
||||
|
||||
__all__ += ("GunicornWebWorker", "GunicornUVLoopWebWorker")
|
||||
except ImportError: # pragma: no cover
|
||||
pass
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
140
.venv/lib/python3.9/site-packages/aiohttp/_cparser.pxd
Normal file
140
.venv/lib/python3.9/site-packages/aiohttp/_cparser.pxd
Normal file
@@ -0,0 +1,140 @@
|
||||
from libc.stdint cimport uint16_t, uint32_t, uint64_t
|
||||
|
||||
|
||||
cdef extern from "../vendor/http-parser/http_parser.h":
|
||||
ctypedef int (*http_data_cb) (http_parser*,
|
||||
const char *at,
|
||||
size_t length) except -1
|
||||
|
||||
ctypedef int (*http_cb) (http_parser*) except -1
|
||||
|
||||
struct http_parser:
|
||||
unsigned int type
|
||||
unsigned int flags
|
||||
unsigned int state
|
||||
unsigned int header_state
|
||||
unsigned int index
|
||||
|
||||
uint32_t nread
|
||||
uint64_t content_length
|
||||
|
||||
unsigned short http_major
|
||||
unsigned short http_minor
|
||||
unsigned int status_code
|
||||
unsigned int method
|
||||
unsigned int http_errno
|
||||
|
||||
unsigned int upgrade
|
||||
|
||||
void *data
|
||||
|
||||
struct http_parser_settings:
|
||||
http_cb on_message_begin
|
||||
http_data_cb on_url
|
||||
http_data_cb on_status
|
||||
http_data_cb on_header_field
|
||||
http_data_cb on_header_value
|
||||
http_cb on_headers_complete
|
||||
http_data_cb on_body
|
||||
http_cb on_message_complete
|
||||
http_cb on_chunk_header
|
||||
http_cb on_chunk_complete
|
||||
|
||||
enum http_parser_type:
|
||||
HTTP_REQUEST,
|
||||
HTTP_RESPONSE,
|
||||
HTTP_BOTH
|
||||
|
||||
enum http_errno:
|
||||
HPE_OK,
|
||||
HPE_CB_message_begin,
|
||||
HPE_CB_url,
|
||||
HPE_CB_header_field,
|
||||
HPE_CB_header_value,
|
||||
HPE_CB_headers_complete,
|
||||
HPE_CB_body,
|
||||
HPE_CB_message_complete,
|
||||
HPE_CB_status,
|
||||
HPE_CB_chunk_header,
|
||||
HPE_CB_chunk_complete,
|
||||
HPE_INVALID_EOF_STATE,
|
||||
HPE_HEADER_OVERFLOW,
|
||||
HPE_CLOSED_CONNECTION,
|
||||
HPE_INVALID_VERSION,
|
||||
HPE_INVALID_STATUS,
|
||||
HPE_INVALID_METHOD,
|
||||
HPE_INVALID_URL,
|
||||
HPE_INVALID_HOST,
|
||||
HPE_INVALID_PORT,
|
||||
HPE_INVALID_PATH,
|
||||
HPE_INVALID_QUERY_STRING,
|
||||
HPE_INVALID_FRAGMENT,
|
||||
HPE_LF_EXPECTED,
|
||||
HPE_INVALID_HEADER_TOKEN,
|
||||
HPE_INVALID_CONTENT_LENGTH,
|
||||
HPE_INVALID_CHUNK_SIZE,
|
||||
HPE_INVALID_CONSTANT,
|
||||
HPE_INVALID_INTERNAL_STATE,
|
||||
HPE_STRICT,
|
||||
HPE_PAUSED,
|
||||
HPE_UNKNOWN
|
||||
|
||||
enum flags:
|
||||
F_CHUNKED,
|
||||
F_CONNECTION_KEEP_ALIVE,
|
||||
F_CONNECTION_CLOSE,
|
||||
F_CONNECTION_UPGRADE,
|
||||
F_TRAILING,
|
||||
F_UPGRADE,
|
||||
F_SKIPBODY,
|
||||
F_CONTENTLENGTH
|
||||
|
||||
enum http_method:
|
||||
DELETE, GET, HEAD, POST, PUT, CONNECT, OPTIONS, TRACE, COPY,
|
||||
LOCK, MKCOL, MOVE, PROPFIND, PROPPATCH, SEARCH, UNLOCK, BIND,
|
||||
REBIND, UNBIND, ACL, REPORT, MKACTIVITY, CHECKOUT, MERGE,
|
||||
MSEARCH, NOTIFY, SUBSCRIBE, UNSUBSCRIBE, PATCH, PURGE, MKCALENDAR,
|
||||
LINK, UNLINK
|
||||
|
||||
void http_parser_init(http_parser *parser, http_parser_type type)
|
||||
|
||||
size_t http_parser_execute(http_parser *parser,
|
||||
const http_parser_settings *settings,
|
||||
const char *data,
|
||||
size_t len)
|
||||
|
||||
int http_should_keep_alive(const http_parser *parser)
|
||||
|
||||
void http_parser_settings_init(http_parser_settings *settings)
|
||||
|
||||
const char *http_errno_name(http_errno err)
|
||||
const char *http_errno_description(http_errno err)
|
||||
const char *http_method_str(http_method m)
|
||||
|
||||
# URL Parser
|
||||
|
||||
enum http_parser_url_fields:
|
||||
UF_SCHEMA = 0,
|
||||
UF_HOST = 1,
|
||||
UF_PORT = 2,
|
||||
UF_PATH = 3,
|
||||
UF_QUERY = 4,
|
||||
UF_FRAGMENT = 5,
|
||||
UF_USERINFO = 6,
|
||||
UF_MAX = 7
|
||||
|
||||
struct http_parser_url_field_data:
|
||||
uint16_t off
|
||||
uint16_t len
|
||||
|
||||
struct http_parser_url:
|
||||
uint16_t field_set
|
||||
uint16_t port
|
||||
http_parser_url_field_data[<int>UF_MAX] field_data
|
||||
|
||||
void http_parser_url_init(http_parser_url *u)
|
||||
|
||||
int http_parser_parse_url(const char *buf,
|
||||
size_t buflen,
|
||||
int is_connect,
|
||||
http_parser_url *u)
|
9870
.venv/lib/python3.9/site-packages/aiohttp/_find_header.c
Normal file
9870
.venv/lib/python3.9/site-packages/aiohttp/_find_header.c
Normal file
File diff suppressed because it is too large
Load Diff
14
.venv/lib/python3.9/site-packages/aiohttp/_find_header.h
Normal file
14
.venv/lib/python3.9/site-packages/aiohttp/_find_header.h
Normal file
@@ -0,0 +1,14 @@
|
||||
#ifndef _FIND_HEADERS_H
|
||||
#define _FIND_HEADERS_H
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
int find_header(const char *str, int size);
|
||||
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
#endif
|
@@ -0,0 +1,2 @@
|
||||
cdef extern from "_find_header.h":
|
||||
int find_header(char *, int)
|
7512
.venv/lib/python3.9/site-packages/aiohttp/_frozenlist.c
Normal file
7512
.venv/lib/python3.9/site-packages/aiohttp/_frozenlist.c
Normal file
File diff suppressed because it is too large
Load Diff
Binary file not shown.
108
.venv/lib/python3.9/site-packages/aiohttp/_frozenlist.pyx
Normal file
108
.venv/lib/python3.9/site-packages/aiohttp/_frozenlist.pyx
Normal file
@@ -0,0 +1,108 @@
|
||||
from collections.abc import MutableSequence
|
||||
|
||||
|
||||
cdef class FrozenList:
|
||||
|
||||
cdef readonly bint frozen
|
||||
cdef list _items
|
||||
|
||||
def __init__(self, items=None):
|
||||
self.frozen = False
|
||||
if items is not None:
|
||||
items = list(items)
|
||||
else:
|
||||
items = []
|
||||
self._items = items
|
||||
|
||||
cdef object _check_frozen(self):
|
||||
if self.frozen:
|
||||
raise RuntimeError("Cannot modify frozen list.")
|
||||
|
||||
cdef inline object _fast_len(self):
|
||||
return len(self._items)
|
||||
|
||||
def freeze(self):
|
||||
self.frozen = True
|
||||
|
||||
def __getitem__(self, index):
|
||||
return self._items[index]
|
||||
|
||||
def __setitem__(self, index, value):
|
||||
self._check_frozen()
|
||||
self._items[index] = value
|
||||
|
||||
def __delitem__(self, index):
|
||||
self._check_frozen()
|
||||
del self._items[index]
|
||||
|
||||
def __len__(self):
|
||||
return self._fast_len()
|
||||
|
||||
def __iter__(self):
|
||||
return self._items.__iter__()
|
||||
|
||||
def __reversed__(self):
|
||||
return self._items.__reversed__()
|
||||
|
||||
def __richcmp__(self, other, op):
|
||||
if op == 0: # <
|
||||
return list(self) < other
|
||||
if op == 1: # <=
|
||||
return list(self) <= other
|
||||
if op == 2: # ==
|
||||
return list(self) == other
|
||||
if op == 3: # !=
|
||||
return list(self) != other
|
||||
if op == 4: # >
|
||||
return list(self) > other
|
||||
if op == 5: # =>
|
||||
return list(self) >= other
|
||||
|
||||
def insert(self, pos, item):
|
||||
self._check_frozen()
|
||||
self._items.insert(pos, item)
|
||||
|
||||
def __contains__(self, item):
|
||||
return item in self._items
|
||||
|
||||
def __iadd__(self, items):
|
||||
self._check_frozen()
|
||||
self._items += list(items)
|
||||
return self
|
||||
|
||||
def index(self, item):
|
||||
return self._items.index(item)
|
||||
|
||||
def remove(self, item):
|
||||
self._check_frozen()
|
||||
self._items.remove(item)
|
||||
|
||||
def clear(self):
|
||||
self._check_frozen()
|
||||
self._items.clear()
|
||||
|
||||
def extend(self, items):
|
||||
self._check_frozen()
|
||||
self._items += list(items)
|
||||
|
||||
def reverse(self):
|
||||
self._check_frozen()
|
||||
self._items.reverse()
|
||||
|
||||
def pop(self, index=-1):
|
||||
self._check_frozen()
|
||||
return self._items.pop(index)
|
||||
|
||||
def append(self, item):
|
||||
self._check_frozen()
|
||||
return self._items.append(item)
|
||||
|
||||
def count(self, item):
|
||||
return self._items.count(item)
|
||||
|
||||
def __repr__(self):
|
||||
return '<FrozenList(frozen={}, {!r})>'.format(self.frozen,
|
||||
self._items)
|
||||
|
||||
|
||||
MutableSequence.register(FrozenList)
|
83
.venv/lib/python3.9/site-packages/aiohttp/_headers.pxi
Normal file
83
.venv/lib/python3.9/site-packages/aiohttp/_headers.pxi
Normal file
@@ -0,0 +1,83 @@
|
||||
# The file is autogenerated from aiohttp/hdrs.py
|
||||
# Run ./tools/gen.py to update it after the origin changing.
|
||||
|
||||
from . import hdrs
|
||||
cdef tuple headers = (
|
||||
hdrs.ACCEPT,
|
||||
hdrs.ACCEPT_CHARSET,
|
||||
hdrs.ACCEPT_ENCODING,
|
||||
hdrs.ACCEPT_LANGUAGE,
|
||||
hdrs.ACCEPT_RANGES,
|
||||
hdrs.ACCESS_CONTROL_ALLOW_CREDENTIALS,
|
||||
hdrs.ACCESS_CONTROL_ALLOW_HEADERS,
|
||||
hdrs.ACCESS_CONTROL_ALLOW_METHODS,
|
||||
hdrs.ACCESS_CONTROL_ALLOW_ORIGIN,
|
||||
hdrs.ACCESS_CONTROL_EXPOSE_HEADERS,
|
||||
hdrs.ACCESS_CONTROL_MAX_AGE,
|
||||
hdrs.ACCESS_CONTROL_REQUEST_HEADERS,
|
||||
hdrs.ACCESS_CONTROL_REQUEST_METHOD,
|
||||
hdrs.AGE,
|
||||
hdrs.ALLOW,
|
||||
hdrs.AUTHORIZATION,
|
||||
hdrs.CACHE_CONTROL,
|
||||
hdrs.CONNECTION,
|
||||
hdrs.CONTENT_DISPOSITION,
|
||||
hdrs.CONTENT_ENCODING,
|
||||
hdrs.CONTENT_LANGUAGE,
|
||||
hdrs.CONTENT_LENGTH,
|
||||
hdrs.CONTENT_LOCATION,
|
||||
hdrs.CONTENT_MD5,
|
||||
hdrs.CONTENT_RANGE,
|
||||
hdrs.CONTENT_TRANSFER_ENCODING,
|
||||
hdrs.CONTENT_TYPE,
|
||||
hdrs.COOKIE,
|
||||
hdrs.DATE,
|
||||
hdrs.DESTINATION,
|
||||
hdrs.DIGEST,
|
||||
hdrs.ETAG,
|
||||
hdrs.EXPECT,
|
||||
hdrs.EXPIRES,
|
||||
hdrs.FORWARDED,
|
||||
hdrs.FROM,
|
||||
hdrs.HOST,
|
||||
hdrs.IF_MATCH,
|
||||
hdrs.IF_MODIFIED_SINCE,
|
||||
hdrs.IF_NONE_MATCH,
|
||||
hdrs.IF_RANGE,
|
||||
hdrs.IF_UNMODIFIED_SINCE,
|
||||
hdrs.KEEP_ALIVE,
|
||||
hdrs.LAST_EVENT_ID,
|
||||
hdrs.LAST_MODIFIED,
|
||||
hdrs.LINK,
|
||||
hdrs.LOCATION,
|
||||
hdrs.MAX_FORWARDS,
|
||||
hdrs.ORIGIN,
|
||||
hdrs.PRAGMA,
|
||||
hdrs.PROXY_AUTHENTICATE,
|
||||
hdrs.PROXY_AUTHORIZATION,
|
||||
hdrs.RANGE,
|
||||
hdrs.REFERER,
|
||||
hdrs.RETRY_AFTER,
|
||||
hdrs.SEC_WEBSOCKET_ACCEPT,
|
||||
hdrs.SEC_WEBSOCKET_EXTENSIONS,
|
||||
hdrs.SEC_WEBSOCKET_KEY,
|
||||
hdrs.SEC_WEBSOCKET_KEY1,
|
||||
hdrs.SEC_WEBSOCKET_PROTOCOL,
|
||||
hdrs.SEC_WEBSOCKET_VERSION,
|
||||
hdrs.SERVER,
|
||||
hdrs.SET_COOKIE,
|
||||
hdrs.TE,
|
||||
hdrs.TRAILER,
|
||||
hdrs.TRANSFER_ENCODING,
|
||||
hdrs.URI,
|
||||
hdrs.UPGRADE,
|
||||
hdrs.USER_AGENT,
|
||||
hdrs.VARY,
|
||||
hdrs.VIA,
|
||||
hdrs.WWW_AUTHENTICATE,
|
||||
hdrs.WANT_DIGEST,
|
||||
hdrs.WARNING,
|
||||
hdrs.X_FORWARDED_FOR,
|
||||
hdrs.X_FORWARDED_HOST,
|
||||
hdrs.X_FORWARDED_PROTO,
|
||||
)
|
5433
.venv/lib/python3.9/site-packages/aiohttp/_helpers.c
Normal file
5433
.venv/lib/python3.9/site-packages/aiohttp/_helpers.c
Normal file
File diff suppressed because it is too large
Load Diff
Binary file not shown.
6
.venv/lib/python3.9/site-packages/aiohttp/_helpers.pyi
Normal file
6
.venv/lib/python3.9/site-packages/aiohttp/_helpers.pyi
Normal file
@@ -0,0 +1,6 @@
|
||||
from typing import Any
|
||||
|
||||
class reify:
|
||||
def __init__(self, wrapped: Any) -> None: ...
|
||||
def __get__(self, inst: Any, owner: Any) -> Any: ...
|
||||
def __set__(self, inst: Any, value: Any) -> None: ...
|
35
.venv/lib/python3.9/site-packages/aiohttp/_helpers.pyx
Normal file
35
.venv/lib/python3.9/site-packages/aiohttp/_helpers.pyx
Normal file
@@ -0,0 +1,35 @@
|
||||
cdef class reify:
|
||||
"""Use as a class method decorator. It operates almost exactly like
|
||||
the Python `@property` decorator, but it puts the result of the
|
||||
method it decorates into the instance dict after the first call,
|
||||
effectively replacing the function it decorates with an instance
|
||||
variable. It is, in Python parlance, a data descriptor.
|
||||
|
||||
"""
|
||||
|
||||
cdef object wrapped
|
||||
cdef object name
|
||||
|
||||
def __init__(self, wrapped):
|
||||
self.wrapped = wrapped
|
||||
self.name = wrapped.__name__
|
||||
|
||||
@property
|
||||
def __doc__(self):
|
||||
return self.wrapped.__doc__
|
||||
|
||||
def __get__(self, inst, owner):
|
||||
try:
|
||||
try:
|
||||
return inst._cache[self.name]
|
||||
except KeyError:
|
||||
val = self.wrapped(inst)
|
||||
inst._cache[self.name] = val
|
||||
return val
|
||||
except AttributeError:
|
||||
if inst is None:
|
||||
return self
|
||||
raise
|
||||
|
||||
def __set__(self, inst, value):
|
||||
raise AttributeError("reified property is read-only")
|
24607
.venv/lib/python3.9/site-packages/aiohttp/_http_parser.c
Normal file
24607
.venv/lib/python3.9/site-packages/aiohttp/_http_parser.c
Normal file
File diff suppressed because it is too large
Load Diff
Binary file not shown.
875
.venv/lib/python3.9/site-packages/aiohttp/_http_parser.pyx
Normal file
875
.venv/lib/python3.9/site-packages/aiohttp/_http_parser.pyx
Normal file
@@ -0,0 +1,875 @@
|
||||
#cython: language_level=3
|
||||
#
|
||||
# Based on https://github.com/MagicStack/httptools
|
||||
#
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
from cpython cimport (
|
||||
Py_buffer,
|
||||
PyBUF_SIMPLE,
|
||||
PyBuffer_Release,
|
||||
PyBytes_AsString,
|
||||
PyBytes_AsStringAndSize,
|
||||
PyObject_GetBuffer,
|
||||
)
|
||||
from cpython.mem cimport PyMem_Free, PyMem_Malloc
|
||||
from libc.limits cimport ULLONG_MAX
|
||||
from libc.string cimport memcpy
|
||||
|
||||
from multidict import CIMultiDict as _CIMultiDict, CIMultiDictProxy as _CIMultiDictProxy
|
||||
from yarl import URL as _URL
|
||||
|
||||
from aiohttp import hdrs
|
||||
|
||||
from .http_exceptions import (
|
||||
BadHttpMessage,
|
||||
BadStatusLine,
|
||||
ContentLengthError,
|
||||
InvalidHeader,
|
||||
InvalidURLError,
|
||||
LineTooLong,
|
||||
PayloadEncodingError,
|
||||
TransferEncodingError,
|
||||
)
|
||||
from .http_parser import DeflateBuffer as _DeflateBuffer
|
||||
from .http_writer import (
|
||||
HttpVersion as _HttpVersion,
|
||||
HttpVersion10 as _HttpVersion10,
|
||||
HttpVersion11 as _HttpVersion11,
|
||||
)
|
||||
from .streams import EMPTY_PAYLOAD as _EMPTY_PAYLOAD, StreamReader as _StreamReader
|
||||
|
||||
cimport cython
|
||||
|
||||
from aiohttp cimport _cparser as cparser
|
||||
|
||||
include "_headers.pxi"
|
||||
|
||||
from aiohttp cimport _find_header
|
||||
|
||||
DEF DEFAULT_FREELIST_SIZE = 250
|
||||
|
||||
cdef extern from "Python.h":
|
||||
int PyByteArray_Resize(object, Py_ssize_t) except -1
|
||||
Py_ssize_t PyByteArray_Size(object) except -1
|
||||
char* PyByteArray_AsString(object)
|
||||
|
||||
__all__ = ('HttpRequestParser', 'HttpResponseParser',
|
||||
'RawRequestMessage', 'RawResponseMessage')
|
||||
|
||||
cdef object URL = _URL
|
||||
cdef object URL_build = URL.build
|
||||
cdef object CIMultiDict = _CIMultiDict
|
||||
cdef object CIMultiDictProxy = _CIMultiDictProxy
|
||||
cdef object HttpVersion = _HttpVersion
|
||||
cdef object HttpVersion10 = _HttpVersion10
|
||||
cdef object HttpVersion11 = _HttpVersion11
|
||||
cdef object SEC_WEBSOCKET_KEY1 = hdrs.SEC_WEBSOCKET_KEY1
|
||||
cdef object CONTENT_ENCODING = hdrs.CONTENT_ENCODING
|
||||
cdef object EMPTY_PAYLOAD = _EMPTY_PAYLOAD
|
||||
cdef object StreamReader = _StreamReader
|
||||
cdef object DeflateBuffer = _DeflateBuffer
|
||||
|
||||
|
||||
cdef inline object extend(object buf, const char* at, size_t length):
|
||||
cdef Py_ssize_t s
|
||||
cdef char* ptr
|
||||
s = PyByteArray_Size(buf)
|
||||
PyByteArray_Resize(buf, s + length)
|
||||
ptr = PyByteArray_AsString(buf)
|
||||
memcpy(ptr + s, at, length)
|
||||
|
||||
|
||||
DEF METHODS_COUNT = 34;
|
||||
|
||||
cdef list _http_method = []
|
||||
|
||||
for i in range(METHODS_COUNT):
|
||||
_http_method.append(
|
||||
cparser.http_method_str(<cparser.http_method> i).decode('ascii'))
|
||||
|
||||
|
||||
cdef inline str http_method_str(int i):
|
||||
if i < METHODS_COUNT:
|
||||
return <str>_http_method[i]
|
||||
else:
|
||||
return "<unknown>"
|
||||
|
||||
cdef inline object find_header(bytes raw_header):
|
||||
cdef Py_ssize_t size
|
||||
cdef char *buf
|
||||
cdef int idx
|
||||
PyBytes_AsStringAndSize(raw_header, &buf, &size)
|
||||
idx = _find_header.find_header(buf, size)
|
||||
if idx == -1:
|
||||
return raw_header.decode('utf-8', 'surrogateescape')
|
||||
return headers[idx]
|
||||
|
||||
|
||||
@cython.freelist(DEFAULT_FREELIST_SIZE)
|
||||
cdef class RawRequestMessage:
|
||||
cdef readonly str method
|
||||
cdef readonly str path
|
||||
cdef readonly object version # HttpVersion
|
||||
cdef readonly object headers # CIMultiDict
|
||||
cdef readonly object raw_headers # tuple
|
||||
cdef readonly object should_close
|
||||
cdef readonly object compression
|
||||
cdef readonly object upgrade
|
||||
cdef readonly object chunked
|
||||
cdef readonly object url # yarl.URL
|
||||
|
||||
def __init__(self, method, path, version, headers, raw_headers,
|
||||
should_close, compression, upgrade, chunked, url):
|
||||
self.method = method
|
||||
self.path = path
|
||||
self.version = version
|
||||
self.headers = headers
|
||||
self.raw_headers = raw_headers
|
||||
self.should_close = should_close
|
||||
self.compression = compression
|
||||
self.upgrade = upgrade
|
||||
self.chunked = chunked
|
||||
self.url = url
|
||||
|
||||
def __repr__(self):
|
||||
info = []
|
||||
info.append(("method", self.method))
|
||||
info.append(("path", self.path))
|
||||
info.append(("version", self.version))
|
||||
info.append(("headers", self.headers))
|
||||
info.append(("raw_headers", self.raw_headers))
|
||||
info.append(("should_close", self.should_close))
|
||||
info.append(("compression", self.compression))
|
||||
info.append(("upgrade", self.upgrade))
|
||||
info.append(("chunked", self.chunked))
|
||||
info.append(("url", self.url))
|
||||
sinfo = ', '.join(name + '=' + repr(val) for name, val in info)
|
||||
return '<RawRequestMessage(' + sinfo + ')>'
|
||||
|
||||
def _replace(self, **dct):
|
||||
cdef RawRequestMessage ret
|
||||
ret = _new_request_message(self.method,
|
||||
self.path,
|
||||
self.version,
|
||||
self.headers,
|
||||
self.raw_headers,
|
||||
self.should_close,
|
||||
self.compression,
|
||||
self.upgrade,
|
||||
self.chunked,
|
||||
self.url)
|
||||
if "method" in dct:
|
||||
ret.method = dct["method"]
|
||||
if "path" in dct:
|
||||
ret.path = dct["path"]
|
||||
if "version" in dct:
|
||||
ret.version = dct["version"]
|
||||
if "headers" in dct:
|
||||
ret.headers = dct["headers"]
|
||||
if "raw_headers" in dct:
|
||||
ret.raw_headers = dct["raw_headers"]
|
||||
if "should_close" in dct:
|
||||
ret.should_close = dct["should_close"]
|
||||
if "compression" in dct:
|
||||
ret.compression = dct["compression"]
|
||||
if "upgrade" in dct:
|
||||
ret.upgrade = dct["upgrade"]
|
||||
if "chunked" in dct:
|
||||
ret.chunked = dct["chunked"]
|
||||
if "url" in dct:
|
||||
ret.url = dct["url"]
|
||||
return ret
|
||||
|
||||
cdef _new_request_message(str method,
|
||||
str path,
|
||||
object version,
|
||||
object headers,
|
||||
object raw_headers,
|
||||
bint should_close,
|
||||
object compression,
|
||||
bint upgrade,
|
||||
bint chunked,
|
||||
object url):
|
||||
cdef RawRequestMessage ret
|
||||
ret = RawRequestMessage.__new__(RawRequestMessage)
|
||||
ret.method = method
|
||||
ret.path = path
|
||||
ret.version = version
|
||||
ret.headers = headers
|
||||
ret.raw_headers = raw_headers
|
||||
ret.should_close = should_close
|
||||
ret.compression = compression
|
||||
ret.upgrade = upgrade
|
||||
ret.chunked = chunked
|
||||
ret.url = url
|
||||
return ret
|
||||
|
||||
|
||||
@cython.freelist(DEFAULT_FREELIST_SIZE)
|
||||
cdef class RawResponseMessage:
|
||||
cdef readonly object version # HttpVersion
|
||||
cdef readonly int code
|
||||
cdef readonly str reason
|
||||
cdef readonly object headers # CIMultiDict
|
||||
cdef readonly object raw_headers # tuple
|
||||
cdef readonly object should_close
|
||||
cdef readonly object compression
|
||||
cdef readonly object upgrade
|
||||
cdef readonly object chunked
|
||||
|
||||
def __init__(self, version, code, reason, headers, raw_headers,
|
||||
should_close, compression, upgrade, chunked):
|
||||
self.version = version
|
||||
self.code = code
|
||||
self.reason = reason
|
||||
self.headers = headers
|
||||
self.raw_headers = raw_headers
|
||||
self.should_close = should_close
|
||||
self.compression = compression
|
||||
self.upgrade = upgrade
|
||||
self.chunked = chunked
|
||||
|
||||
def __repr__(self):
|
||||
info = []
|
||||
info.append(("version", self.version))
|
||||
info.append(("code", self.code))
|
||||
info.append(("reason", self.reason))
|
||||
info.append(("headers", self.headers))
|
||||
info.append(("raw_headers", self.raw_headers))
|
||||
info.append(("should_close", self.should_close))
|
||||
info.append(("compression", self.compression))
|
||||
info.append(("upgrade", self.upgrade))
|
||||
info.append(("chunked", self.chunked))
|
||||
sinfo = ', '.join(name + '=' + repr(val) for name, val in info)
|
||||
return '<RawResponseMessage(' + sinfo + ')>'
|
||||
|
||||
|
||||
cdef _new_response_message(object version,
|
||||
int code,
|
||||
str reason,
|
||||
object headers,
|
||||
object raw_headers,
|
||||
bint should_close,
|
||||
object compression,
|
||||
bint upgrade,
|
||||
bint chunked):
|
||||
cdef RawResponseMessage ret
|
||||
ret = RawResponseMessage.__new__(RawResponseMessage)
|
||||
ret.version = version
|
||||
ret.code = code
|
||||
ret.reason = reason
|
||||
ret.headers = headers
|
||||
ret.raw_headers = raw_headers
|
||||
ret.should_close = should_close
|
||||
ret.compression = compression
|
||||
ret.upgrade = upgrade
|
||||
ret.chunked = chunked
|
||||
return ret
|
||||
|
||||
|
||||
@cython.internal
|
||||
cdef class HttpParser:
|
||||
|
||||
cdef:
|
||||
cparser.http_parser* _cparser
|
||||
cparser.http_parser_settings* _csettings
|
||||
|
||||
bytearray _raw_name
|
||||
bytearray _raw_value
|
||||
bint _has_value
|
||||
|
||||
object _protocol
|
||||
object _loop
|
||||
object _timer
|
||||
|
||||
size_t _max_line_size
|
||||
size_t _max_field_size
|
||||
size_t _max_headers
|
||||
bint _response_with_body
|
||||
bint _read_until_eof
|
||||
|
||||
bint _started
|
||||
object _url
|
||||
bytearray _buf
|
||||
str _path
|
||||
str _reason
|
||||
object _headers
|
||||
list _raw_headers
|
||||
bint _upgraded
|
||||
list _messages
|
||||
object _payload
|
||||
bint _payload_error
|
||||
object _payload_exception
|
||||
object _last_error
|
||||
bint _auto_decompress
|
||||
int _limit
|
||||
|
||||
str _content_encoding
|
||||
|
||||
Py_buffer py_buf
|
||||
|
||||
def __cinit__(self):
|
||||
self._cparser = <cparser.http_parser*> \
|
||||
PyMem_Malloc(sizeof(cparser.http_parser))
|
||||
if self._cparser is NULL:
|
||||
raise MemoryError()
|
||||
|
||||
self._csettings = <cparser.http_parser_settings*> \
|
||||
PyMem_Malloc(sizeof(cparser.http_parser_settings))
|
||||
if self._csettings is NULL:
|
||||
raise MemoryError()
|
||||
|
||||
def __dealloc__(self):
|
||||
PyMem_Free(self._cparser)
|
||||
PyMem_Free(self._csettings)
|
||||
|
||||
cdef _init(self, cparser.http_parser_type mode,
|
||||
object protocol, object loop, int limit,
|
||||
object timer=None,
|
||||
size_t max_line_size=8190, size_t max_headers=32768,
|
||||
size_t max_field_size=8190, payload_exception=None,
|
||||
bint response_with_body=True, bint read_until_eof=False,
|
||||
bint auto_decompress=True):
|
||||
cparser.http_parser_init(self._cparser, mode)
|
||||
self._cparser.data = <void*>self
|
||||
self._cparser.content_length = 0
|
||||
|
||||
cparser.http_parser_settings_init(self._csettings)
|
||||
|
||||
self._protocol = protocol
|
||||
self._loop = loop
|
||||
self._timer = timer
|
||||
|
||||
self._buf = bytearray()
|
||||
self._payload = None
|
||||
self._payload_error = 0
|
||||
self._payload_exception = payload_exception
|
||||
self._messages = []
|
||||
|
||||
self._raw_name = bytearray()
|
||||
self._raw_value = bytearray()
|
||||
self._has_value = False
|
||||
|
||||
self._max_line_size = max_line_size
|
||||
self._max_headers = max_headers
|
||||
self._max_field_size = max_field_size
|
||||
self._response_with_body = response_with_body
|
||||
self._read_until_eof = read_until_eof
|
||||
self._upgraded = False
|
||||
self._auto_decompress = auto_decompress
|
||||
self._content_encoding = None
|
||||
|
||||
self._csettings.on_url = cb_on_url
|
||||
self._csettings.on_status = cb_on_status
|
||||
self._csettings.on_header_field = cb_on_header_field
|
||||
self._csettings.on_header_value = cb_on_header_value
|
||||
self._csettings.on_headers_complete = cb_on_headers_complete
|
||||
self._csettings.on_body = cb_on_body
|
||||
self._csettings.on_message_begin = cb_on_message_begin
|
||||
self._csettings.on_message_complete = cb_on_message_complete
|
||||
self._csettings.on_chunk_header = cb_on_chunk_header
|
||||
self._csettings.on_chunk_complete = cb_on_chunk_complete
|
||||
|
||||
self._last_error = None
|
||||
self._limit = limit
|
||||
|
||||
cdef _process_header(self):
|
||||
if self._raw_name:
|
||||
raw_name = bytes(self._raw_name)
|
||||
raw_value = bytes(self._raw_value)
|
||||
|
||||
name = find_header(raw_name)
|
||||
value = raw_value.decode('utf-8', 'surrogateescape')
|
||||
|
||||
self._headers.add(name, value)
|
||||
|
||||
if name is CONTENT_ENCODING:
|
||||
self._content_encoding = value
|
||||
|
||||
PyByteArray_Resize(self._raw_name, 0)
|
||||
PyByteArray_Resize(self._raw_value, 0)
|
||||
self._has_value = False
|
||||
self._raw_headers.append((raw_name, raw_value))
|
||||
|
||||
cdef _on_header_field(self, char* at, size_t length):
|
||||
cdef Py_ssize_t size
|
||||
cdef char *buf
|
||||
if self._has_value:
|
||||
self._process_header()
|
||||
|
||||
size = PyByteArray_Size(self._raw_name)
|
||||
PyByteArray_Resize(self._raw_name, size + length)
|
||||
buf = PyByteArray_AsString(self._raw_name)
|
||||
memcpy(buf + size, at, length)
|
||||
|
||||
cdef _on_header_value(self, char* at, size_t length):
|
||||
cdef Py_ssize_t size
|
||||
cdef char *buf
|
||||
|
||||
size = PyByteArray_Size(self._raw_value)
|
||||
PyByteArray_Resize(self._raw_value, size + length)
|
||||
buf = PyByteArray_AsString(self._raw_value)
|
||||
memcpy(buf + size, at, length)
|
||||
self._has_value = True
|
||||
|
||||
cdef _on_headers_complete(self):
|
||||
self._process_header()
|
||||
|
||||
method = http_method_str(self._cparser.method)
|
||||
should_close = not cparser.http_should_keep_alive(self._cparser)
|
||||
upgrade = self._cparser.upgrade
|
||||
chunked = self._cparser.flags & cparser.F_CHUNKED
|
||||
|
||||
raw_headers = tuple(self._raw_headers)
|
||||
headers = CIMultiDictProxy(self._headers)
|
||||
|
||||
if upgrade or self._cparser.method == 5: # cparser.CONNECT:
|
||||
self._upgraded = True
|
||||
|
||||
# do not support old websocket spec
|
||||
if SEC_WEBSOCKET_KEY1 in headers:
|
||||
raise InvalidHeader(SEC_WEBSOCKET_KEY1)
|
||||
|
||||
encoding = None
|
||||
enc = self._content_encoding
|
||||
if enc is not None:
|
||||
self._content_encoding = None
|
||||
enc = enc.lower()
|
||||
if enc in ('gzip', 'deflate', 'br'):
|
||||
encoding = enc
|
||||
|
||||
if self._cparser.type == cparser.HTTP_REQUEST:
|
||||
msg = _new_request_message(
|
||||
method, self._path,
|
||||
self.http_version(), headers, raw_headers,
|
||||
should_close, encoding, upgrade, chunked, self._url)
|
||||
else:
|
||||
msg = _new_response_message(
|
||||
self.http_version(), self._cparser.status_code, self._reason,
|
||||
headers, raw_headers, should_close, encoding,
|
||||
upgrade, chunked)
|
||||
|
||||
if (ULLONG_MAX > self._cparser.content_length > 0 or chunked or
|
||||
self._cparser.method == 5 or # CONNECT: 5
|
||||
(self._cparser.status_code >= 199 and
|
||||
self._cparser.content_length == ULLONG_MAX and
|
||||
self._read_until_eof)
|
||||
):
|
||||
payload = StreamReader(
|
||||
self._protocol, timer=self._timer, loop=self._loop,
|
||||
limit=self._limit)
|
||||
else:
|
||||
payload = EMPTY_PAYLOAD
|
||||
|
||||
self._payload = payload
|
||||
if encoding is not None and self._auto_decompress:
|
||||
self._payload = DeflateBuffer(payload, encoding)
|
||||
|
||||
if not self._response_with_body:
|
||||
payload = EMPTY_PAYLOAD
|
||||
|
||||
self._messages.append((msg, payload))
|
||||
|
||||
cdef _on_message_complete(self):
|
||||
self._payload.feed_eof()
|
||||
self._payload = None
|
||||
|
||||
cdef _on_chunk_header(self):
|
||||
self._payload.begin_http_chunk_receiving()
|
||||
|
||||
cdef _on_chunk_complete(self):
|
||||
self._payload.end_http_chunk_receiving()
|
||||
|
||||
cdef object _on_status_complete(self):
|
||||
pass
|
||||
|
||||
cdef inline http_version(self):
|
||||
cdef cparser.http_parser* parser = self._cparser
|
||||
|
||||
if parser.http_major == 1:
|
||||
if parser.http_minor == 0:
|
||||
return HttpVersion10
|
||||
elif parser.http_minor == 1:
|
||||
return HttpVersion11
|
||||
|
||||
return HttpVersion(parser.http_major, parser.http_minor)
|
||||
|
||||
### Public API ###
|
||||
|
||||
def feed_eof(self):
|
||||
cdef bytes desc
|
||||
|
||||
if self._payload is not None:
|
||||
if self._cparser.flags & cparser.F_CHUNKED:
|
||||
raise TransferEncodingError(
|
||||
"Not enough data for satisfy transfer length header.")
|
||||
elif self._cparser.flags & cparser.F_CONTENTLENGTH:
|
||||
raise ContentLengthError(
|
||||
"Not enough data for satisfy content length header.")
|
||||
elif self._cparser.http_errno != cparser.HPE_OK:
|
||||
desc = cparser.http_errno_description(
|
||||
<cparser.http_errno> self._cparser.http_errno)
|
||||
raise PayloadEncodingError(desc.decode('latin-1'))
|
||||
else:
|
||||
self._payload.feed_eof()
|
||||
elif self._started:
|
||||
self._on_headers_complete()
|
||||
if self._messages:
|
||||
return self._messages[-1][0]
|
||||
|
||||
def feed_data(self, data):
|
||||
cdef:
|
||||
size_t data_len
|
||||
size_t nb
|
||||
|
||||
PyObject_GetBuffer(data, &self.py_buf, PyBUF_SIMPLE)
|
||||
data_len = <size_t>self.py_buf.len
|
||||
|
||||
nb = cparser.http_parser_execute(
|
||||
self._cparser,
|
||||
self._csettings,
|
||||
<char*>self.py_buf.buf,
|
||||
data_len)
|
||||
|
||||
PyBuffer_Release(&self.py_buf)
|
||||
|
||||
if (self._cparser.http_errno != cparser.HPE_OK):
|
||||
if self._payload_error == 0:
|
||||
if self._last_error is not None:
|
||||
ex = self._last_error
|
||||
self._last_error = None
|
||||
else:
|
||||
ex = parser_error_from_errno(
|
||||
<cparser.http_errno> self._cparser.http_errno)
|
||||
self._payload = None
|
||||
raise ex
|
||||
|
||||
if self._messages:
|
||||
messages = self._messages
|
||||
self._messages = []
|
||||
else:
|
||||
messages = ()
|
||||
|
||||
if self._upgraded:
|
||||
return messages, True, data[nb:]
|
||||
else:
|
||||
return messages, False, b''
|
||||
|
||||
def set_upgraded(self, val):
|
||||
self._upgraded = val
|
||||
|
||||
|
||||
cdef class HttpRequestParser(HttpParser):
|
||||
|
||||
def __init__(self, protocol, loop, int limit, timer=None,
|
||||
size_t max_line_size=8190, size_t max_headers=32768,
|
||||
size_t max_field_size=8190, payload_exception=None,
|
||||
bint response_with_body=True, bint read_until_eof=False,
|
||||
):
|
||||
self._init(cparser.HTTP_REQUEST, protocol, loop, limit, timer,
|
||||
max_line_size, max_headers, max_field_size,
|
||||
payload_exception, response_with_body, read_until_eof)
|
||||
|
||||
cdef object _on_status_complete(self):
|
||||
cdef Py_buffer py_buf
|
||||
if not self._buf:
|
||||
return
|
||||
self._path = self._buf.decode('utf-8', 'surrogateescape')
|
||||
if self._cparser.method == 5: # CONNECT
|
||||
self._url = URL(self._path)
|
||||
else:
|
||||
PyObject_GetBuffer(self._buf, &py_buf, PyBUF_SIMPLE)
|
||||
try:
|
||||
self._url = _parse_url(<char*>py_buf.buf,
|
||||
py_buf.len)
|
||||
finally:
|
||||
PyBuffer_Release(&py_buf)
|
||||
PyByteArray_Resize(self._buf, 0)
|
||||
|
||||
|
||||
cdef class HttpResponseParser(HttpParser):
|
||||
|
||||
def __init__(self, protocol, loop, int limit, timer=None,
|
||||
size_t max_line_size=8190, size_t max_headers=32768,
|
||||
size_t max_field_size=8190, payload_exception=None,
|
||||
bint response_with_body=True, bint read_until_eof=False,
|
||||
bint auto_decompress=True
|
||||
):
|
||||
self._init(cparser.HTTP_RESPONSE, protocol, loop, limit, timer,
|
||||
max_line_size, max_headers, max_field_size,
|
||||
payload_exception, response_with_body, read_until_eof,
|
||||
auto_decompress)
|
||||
|
||||
cdef object _on_status_complete(self):
|
||||
if self._buf:
|
||||
self._reason = self._buf.decode('utf-8', 'surrogateescape')
|
||||
PyByteArray_Resize(self._buf, 0)
|
||||
else:
|
||||
self._reason = self._reason or ''
|
||||
|
||||
cdef int cb_on_message_begin(cparser.http_parser* parser) except -1:
|
||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||
|
||||
pyparser._started = True
|
||||
pyparser._headers = CIMultiDict()
|
||||
pyparser._raw_headers = []
|
||||
PyByteArray_Resize(pyparser._buf, 0)
|
||||
pyparser._path = None
|
||||
pyparser._reason = None
|
||||
return 0
|
||||
|
||||
|
||||
cdef int cb_on_url(cparser.http_parser* parser,
|
||||
const char *at, size_t length) except -1:
|
||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||
try:
|
||||
if length > pyparser._max_line_size:
|
||||
raise LineTooLong(
|
||||
'Status line is too long', pyparser._max_line_size, length)
|
||||
extend(pyparser._buf, at, length)
|
||||
except BaseException as ex:
|
||||
pyparser._last_error = ex
|
||||
return -1
|
||||
else:
|
||||
return 0
|
||||
|
||||
|
||||
cdef int cb_on_status(cparser.http_parser* parser,
|
||||
const char *at, size_t length) except -1:
|
||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||
cdef str reason
|
||||
try:
|
||||
if length > pyparser._max_line_size:
|
||||
raise LineTooLong(
|
||||
'Status line is too long', pyparser._max_line_size, length)
|
||||
extend(pyparser._buf, at, length)
|
||||
except BaseException as ex:
|
||||
pyparser._last_error = ex
|
||||
return -1
|
||||
else:
|
||||
return 0
|
||||
|
||||
|
||||
cdef int cb_on_header_field(cparser.http_parser* parser,
|
||||
const char *at, size_t length) except -1:
|
||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||
cdef Py_ssize_t size
|
||||
try:
|
||||
pyparser._on_status_complete()
|
||||
size = len(pyparser._raw_name) + length
|
||||
if size > pyparser._max_field_size:
|
||||
raise LineTooLong(
|
||||
'Header name is too long', pyparser._max_field_size, size)
|
||||
pyparser._on_header_field(at, length)
|
||||
except BaseException as ex:
|
||||
pyparser._last_error = ex
|
||||
return -1
|
||||
else:
|
||||
return 0
|
||||
|
||||
|
||||
cdef int cb_on_header_value(cparser.http_parser* parser,
|
||||
const char *at, size_t length) except -1:
|
||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||
cdef Py_ssize_t size
|
||||
try:
|
||||
size = len(pyparser._raw_value) + length
|
||||
if size > pyparser._max_field_size:
|
||||
raise LineTooLong(
|
||||
'Header value is too long', pyparser._max_field_size, size)
|
||||
pyparser._on_header_value(at, length)
|
||||
except BaseException as ex:
|
||||
pyparser._last_error = ex
|
||||
return -1
|
||||
else:
|
||||
return 0
|
||||
|
||||
|
||||
cdef int cb_on_headers_complete(cparser.http_parser* parser) except -1:
|
||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||
try:
|
||||
pyparser._on_status_complete()
|
||||
pyparser._on_headers_complete()
|
||||
except BaseException as exc:
|
||||
pyparser._last_error = exc
|
||||
return -1
|
||||
else:
|
||||
if pyparser._cparser.upgrade or pyparser._cparser.method == 5: # CONNECT
|
||||
return 2
|
||||
else:
|
||||
return 0
|
||||
|
||||
|
||||
cdef int cb_on_body(cparser.http_parser* parser,
|
||||
const char *at, size_t length) except -1:
|
||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||
cdef bytes body = at[:length]
|
||||
try:
|
||||
pyparser._payload.feed_data(body, length)
|
||||
except BaseException as exc:
|
||||
if pyparser._payload_exception is not None:
|
||||
pyparser._payload.set_exception(pyparser._payload_exception(str(exc)))
|
||||
else:
|
||||
pyparser._payload.set_exception(exc)
|
||||
pyparser._payload_error = 1
|
||||
return -1
|
||||
else:
|
||||
return 0
|
||||
|
||||
|
||||
cdef int cb_on_message_complete(cparser.http_parser* parser) except -1:
|
||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||
try:
|
||||
pyparser._started = False
|
||||
pyparser._on_message_complete()
|
||||
except BaseException as exc:
|
||||
pyparser._last_error = exc
|
||||
return -1
|
||||
else:
|
||||
return 0
|
||||
|
||||
|
||||
cdef int cb_on_chunk_header(cparser.http_parser* parser) except -1:
|
||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||
try:
|
||||
pyparser._on_chunk_header()
|
||||
except BaseException as exc:
|
||||
pyparser._last_error = exc
|
||||
return -1
|
||||
else:
|
||||
return 0
|
||||
|
||||
|
||||
cdef int cb_on_chunk_complete(cparser.http_parser* parser) except -1:
|
||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||
try:
|
||||
pyparser._on_chunk_complete()
|
||||
except BaseException as exc:
|
||||
pyparser._last_error = exc
|
||||
return -1
|
||||
else:
|
||||
return 0
|
||||
|
||||
|
||||
cdef parser_error_from_errno(cparser.http_errno errno):
|
||||
cdef bytes desc = cparser.http_errno_description(errno)
|
||||
|
||||
if errno in (cparser.HPE_CB_message_begin,
|
||||
cparser.HPE_CB_url,
|
||||
cparser.HPE_CB_header_field,
|
||||
cparser.HPE_CB_header_value,
|
||||
cparser.HPE_CB_headers_complete,
|
||||
cparser.HPE_CB_body,
|
||||
cparser.HPE_CB_message_complete,
|
||||
cparser.HPE_CB_status,
|
||||
cparser.HPE_CB_chunk_header,
|
||||
cparser.HPE_CB_chunk_complete):
|
||||
cls = BadHttpMessage
|
||||
|
||||
elif errno == cparser.HPE_INVALID_STATUS:
|
||||
cls = BadStatusLine
|
||||
|
||||
elif errno == cparser.HPE_INVALID_METHOD:
|
||||
cls = BadStatusLine
|
||||
|
||||
elif errno == cparser.HPE_INVALID_URL:
|
||||
cls = InvalidURLError
|
||||
|
||||
else:
|
||||
cls = BadHttpMessage
|
||||
|
||||
return cls(desc.decode('latin-1'))
|
||||
|
||||
|
||||
def parse_url(url):
|
||||
cdef:
|
||||
Py_buffer py_buf
|
||||
char* buf_data
|
||||
|
||||
PyObject_GetBuffer(url, &py_buf, PyBUF_SIMPLE)
|
||||
try:
|
||||
buf_data = <char*>py_buf.buf
|
||||
return _parse_url(buf_data, py_buf.len)
|
||||
finally:
|
||||
PyBuffer_Release(&py_buf)
|
||||
|
||||
|
||||
cdef _parse_url(char* buf_data, size_t length):
|
||||
cdef:
|
||||
cparser.http_parser_url* parsed
|
||||
int res
|
||||
str schema = None
|
||||
str host = None
|
||||
object port = None
|
||||
str path = None
|
||||
str query = None
|
||||
str fragment = None
|
||||
str user = None
|
||||
str password = None
|
||||
str userinfo = None
|
||||
object result = None
|
||||
int off
|
||||
int ln
|
||||
|
||||
parsed = <cparser.http_parser_url*> \
|
||||
PyMem_Malloc(sizeof(cparser.http_parser_url))
|
||||
if parsed is NULL:
|
||||
raise MemoryError()
|
||||
cparser.http_parser_url_init(parsed)
|
||||
try:
|
||||
res = cparser.http_parser_parse_url(buf_data, length, 0, parsed)
|
||||
|
||||
if res == 0:
|
||||
if parsed.field_set & (1 << cparser.UF_SCHEMA):
|
||||
off = parsed.field_data[<int>cparser.UF_SCHEMA].off
|
||||
ln = parsed.field_data[<int>cparser.UF_SCHEMA].len
|
||||
schema = buf_data[off:off+ln].decode('utf-8', 'surrogateescape')
|
||||
else:
|
||||
schema = ''
|
||||
|
||||
if parsed.field_set & (1 << cparser.UF_HOST):
|
||||
off = parsed.field_data[<int>cparser.UF_HOST].off
|
||||
ln = parsed.field_data[<int>cparser.UF_HOST].len
|
||||
host = buf_data[off:off+ln].decode('utf-8', 'surrogateescape')
|
||||
else:
|
||||
host = ''
|
||||
|
||||
if parsed.field_set & (1 << cparser.UF_PORT):
|
||||
port = parsed.port
|
||||
|
||||
if parsed.field_set & (1 << cparser.UF_PATH):
|
||||
off = parsed.field_data[<int>cparser.UF_PATH].off
|
||||
ln = parsed.field_data[<int>cparser.UF_PATH].len
|
||||
path = buf_data[off:off+ln].decode('utf-8', 'surrogateescape')
|
||||
else:
|
||||
path = ''
|
||||
|
||||
if parsed.field_set & (1 << cparser.UF_QUERY):
|
||||
off = parsed.field_data[<int>cparser.UF_QUERY].off
|
||||
ln = parsed.field_data[<int>cparser.UF_QUERY].len
|
||||
query = buf_data[off:off+ln].decode('utf-8', 'surrogateescape')
|
||||
else:
|
||||
query = ''
|
||||
|
||||
if parsed.field_set & (1 << cparser.UF_FRAGMENT):
|
||||
off = parsed.field_data[<int>cparser.UF_FRAGMENT].off
|
||||
ln = parsed.field_data[<int>cparser.UF_FRAGMENT].len
|
||||
fragment = buf_data[off:off+ln].decode('utf-8', 'surrogateescape')
|
||||
else:
|
||||
fragment = ''
|
||||
|
||||
if parsed.field_set & (1 << cparser.UF_USERINFO):
|
||||
off = parsed.field_data[<int>cparser.UF_USERINFO].off
|
||||
ln = parsed.field_data[<int>cparser.UF_USERINFO].len
|
||||
userinfo = buf_data[off:off+ln].decode('utf-8', 'surrogateescape')
|
||||
|
||||
user, sep, password = userinfo.partition(':')
|
||||
|
||||
return URL_build(scheme=schema,
|
||||
user=user, password=password, host=host, port=port,
|
||||
path=path, query_string=query, fragment=fragment, encoded=True)
|
||||
else:
|
||||
raise InvalidURLError("invalid url {!r}".format(buf_data))
|
||||
finally:
|
||||
PyMem_Free(parsed)
|
5840
.venv/lib/python3.9/site-packages/aiohttp/_http_writer.c
Normal file
5840
.venv/lib/python3.9/site-packages/aiohttp/_http_writer.c
Normal file
File diff suppressed because it is too large
Load Diff
Binary file not shown.
151
.venv/lib/python3.9/site-packages/aiohttp/_http_writer.pyx
Normal file
151
.venv/lib/python3.9/site-packages/aiohttp/_http_writer.pyx
Normal file
@@ -0,0 +1,151 @@
|
||||
from cpython.bytes cimport PyBytes_FromStringAndSize
|
||||
from cpython.exc cimport PyErr_NoMemory
|
||||
from cpython.mem cimport PyMem_Free, PyMem_Malloc, PyMem_Realloc
|
||||
from cpython.object cimport PyObject_Str
|
||||
from libc.stdint cimport uint8_t, uint64_t
|
||||
from libc.string cimport memcpy
|
||||
|
||||
from multidict import istr
|
||||
|
||||
DEF BUF_SIZE = 16 * 1024 # 16KiB
|
||||
cdef char BUFFER[BUF_SIZE]
|
||||
|
||||
cdef object _istr = istr
|
||||
|
||||
|
||||
# ----------------- writer ---------------------------
|
||||
|
||||
cdef struct Writer:
|
||||
char *buf
|
||||
Py_ssize_t size
|
||||
Py_ssize_t pos
|
||||
|
||||
|
||||
cdef inline void _init_writer(Writer* writer):
|
||||
writer.buf = &BUFFER[0]
|
||||
writer.size = BUF_SIZE
|
||||
writer.pos = 0
|
||||
|
||||
|
||||
cdef inline void _release_writer(Writer* writer):
|
||||
if writer.buf != BUFFER:
|
||||
PyMem_Free(writer.buf)
|
||||
|
||||
|
||||
cdef inline int _write_byte(Writer* writer, uint8_t ch):
|
||||
cdef char * buf
|
||||
cdef Py_ssize_t size
|
||||
|
||||
if writer.pos == writer.size:
|
||||
# reallocate
|
||||
size = writer.size + BUF_SIZE
|
||||
if writer.buf == BUFFER:
|
||||
buf = <char*>PyMem_Malloc(size)
|
||||
if buf == NULL:
|
||||
PyErr_NoMemory()
|
||||
return -1
|
||||
memcpy(buf, writer.buf, writer.size)
|
||||
else:
|
||||
buf = <char*>PyMem_Realloc(writer.buf, size)
|
||||
if buf == NULL:
|
||||
PyErr_NoMemory()
|
||||
return -1
|
||||
writer.buf = buf
|
||||
writer.size = size
|
||||
writer.buf[writer.pos] = <char>ch
|
||||
writer.pos += 1
|
||||
return 0
|
||||
|
||||
|
||||
cdef inline int _write_utf8(Writer* writer, Py_UCS4 symbol):
|
||||
cdef uint64_t utf = <uint64_t> symbol
|
||||
|
||||
if utf < 0x80:
|
||||
return _write_byte(writer, <uint8_t>utf)
|
||||
elif utf < 0x800:
|
||||
if _write_byte(writer, <uint8_t>(0xc0 | (utf >> 6))) < 0:
|
||||
return -1
|
||||
return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
|
||||
elif 0xD800 <= utf <= 0xDFFF:
|
||||
# surogate pair, ignored
|
||||
return 0
|
||||
elif utf < 0x10000:
|
||||
if _write_byte(writer, <uint8_t>(0xe0 | (utf >> 12))) < 0:
|
||||
return -1
|
||||
if _write_byte(writer, <uint8_t>(0x80 | ((utf >> 6) & 0x3f))) < 0:
|
||||
return -1
|
||||
return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
|
||||
elif utf > 0x10FFFF:
|
||||
# symbol is too large
|
||||
return 0
|
||||
else:
|
||||
if _write_byte(writer, <uint8_t>(0xf0 | (utf >> 18))) < 0:
|
||||
return -1
|
||||
if _write_byte(writer,
|
||||
<uint8_t>(0x80 | ((utf >> 12) & 0x3f))) < 0:
|
||||
return -1
|
||||
if _write_byte(writer,
|
||||
<uint8_t>(0x80 | ((utf >> 6) & 0x3f))) < 0:
|
||||
return -1
|
||||
return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
|
||||
|
||||
|
||||
cdef inline int _write_str(Writer* writer, str s):
|
||||
cdef Py_UCS4 ch
|
||||
for ch in s:
|
||||
if _write_utf8(writer, ch) < 0:
|
||||
return -1
|
||||
|
||||
|
||||
# --------------- _serialize_headers ----------------------
|
||||
|
||||
cdef str to_str(object s):
|
||||
typ = type(s)
|
||||
if typ is str:
|
||||
return <str>s
|
||||
elif typ is _istr:
|
||||
return PyObject_Str(s)
|
||||
elif not isinstance(s, str):
|
||||
raise TypeError("Cannot serialize non-str key {!r}".format(s))
|
||||
else:
|
||||
return str(s)
|
||||
|
||||
|
||||
def _serialize_headers(str status_line, headers):
|
||||
cdef Writer writer
|
||||
cdef object key
|
||||
cdef object val
|
||||
cdef bytes ret
|
||||
|
||||
_init_writer(&writer)
|
||||
|
||||
try:
|
||||
if _write_str(&writer, status_line) < 0:
|
||||
raise
|
||||
if _write_byte(&writer, b'\r') < 0:
|
||||
raise
|
||||
if _write_byte(&writer, b'\n') < 0:
|
||||
raise
|
||||
|
||||
for key, val in headers.items():
|
||||
if _write_str(&writer, to_str(key)) < 0:
|
||||
raise
|
||||
if _write_byte(&writer, b':') < 0:
|
||||
raise
|
||||
if _write_byte(&writer, b' ') < 0:
|
||||
raise
|
||||
if _write_str(&writer, to_str(val)) < 0:
|
||||
raise
|
||||
if _write_byte(&writer, b'\r') < 0:
|
||||
raise
|
||||
if _write_byte(&writer, b'\n') < 0:
|
||||
raise
|
||||
|
||||
if _write_byte(&writer, b'\r') < 0:
|
||||
raise
|
||||
if _write_byte(&writer, b'\n') < 0:
|
||||
raise
|
||||
|
||||
return PyBytes_FromStringAndSize(writer.buf, writer.pos)
|
||||
finally:
|
||||
_release_writer(&writer)
|
3588
.venv/lib/python3.9/site-packages/aiohttp/_websocket.c
Normal file
3588
.venv/lib/python3.9/site-packages/aiohttp/_websocket.c
Normal file
File diff suppressed because it is too large
Load Diff
Binary file not shown.
56
.venv/lib/python3.9/site-packages/aiohttp/_websocket.pyx
Normal file
56
.venv/lib/python3.9/site-packages/aiohttp/_websocket.pyx
Normal file
@@ -0,0 +1,56 @@
|
||||
from cpython cimport PyBytes_AsString
|
||||
|
||||
|
||||
#from cpython cimport PyByteArray_AsString # cython still not exports that
|
||||
cdef extern from "Python.h":
|
||||
char* PyByteArray_AsString(bytearray ba) except NULL
|
||||
|
||||
from libc.stdint cimport uint32_t, uint64_t, uintmax_t
|
||||
|
||||
|
||||
def _websocket_mask_cython(object mask, object data):
|
||||
"""Note, this function mutates its `data` argument
|
||||
"""
|
||||
cdef:
|
||||
Py_ssize_t data_len, i
|
||||
# bit operations on signed integers are implementation-specific
|
||||
unsigned char * in_buf
|
||||
const unsigned char * mask_buf
|
||||
uint32_t uint32_msk
|
||||
uint64_t uint64_msk
|
||||
|
||||
assert len(mask) == 4
|
||||
|
||||
if not isinstance(mask, bytes):
|
||||
mask = bytes(mask)
|
||||
|
||||
if isinstance(data, bytearray):
|
||||
data = <bytearray>data
|
||||
else:
|
||||
data = bytearray(data)
|
||||
|
||||
data_len = len(data)
|
||||
in_buf = <unsigned char*>PyByteArray_AsString(data)
|
||||
mask_buf = <const unsigned char*>PyBytes_AsString(mask)
|
||||
uint32_msk = (<uint32_t*>mask_buf)[0]
|
||||
|
||||
# TODO: align in_data ptr to achieve even faster speeds
|
||||
# does it need in python ?! malloc() always aligns to sizeof(long) bytes
|
||||
|
||||
if sizeof(size_t) >= 8:
|
||||
uint64_msk = uint32_msk
|
||||
uint64_msk = (uint64_msk << 32) | uint32_msk
|
||||
|
||||
while data_len >= 8:
|
||||
(<uint64_t*>in_buf)[0] ^= uint64_msk
|
||||
in_buf += 8
|
||||
data_len -= 8
|
||||
|
||||
|
||||
while data_len >= 4:
|
||||
(<uint32_t*>in_buf)[0] ^= uint32_msk
|
||||
in_buf += 4
|
||||
data_len -= 4
|
||||
|
||||
for i in range(0, data_len):
|
||||
in_buf[i] ^= mask_buf[i]
|
200
.venv/lib/python3.9/site-packages/aiohttp/abc.py
Normal file
200
.venv/lib/python3.9/site-packages/aiohttp/abc.py
Normal file
@@ -0,0 +1,200 @@
|
||||
import asyncio
|
||||
import logging
|
||||
from abc import ABC, abstractmethod
|
||||
from collections.abc import Sized
|
||||
from http.cookies import BaseCookie, Morsel
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
Awaitable,
|
||||
Callable,
|
||||
Dict,
|
||||
Generator,
|
||||
Iterable,
|
||||
List,
|
||||
Optional,
|
||||
Tuple,
|
||||
)
|
||||
|
||||
from multidict import CIMultiDict
|
||||
from yarl import URL
|
||||
|
||||
from .helpers import get_running_loop
|
||||
from .typedefs import LooseCookies
|
||||
|
||||
if TYPE_CHECKING: # pragma: no cover
|
||||
from .web_app import Application
|
||||
from .web_exceptions import HTTPException
|
||||
from .web_request import BaseRequest, Request
|
||||
from .web_response import StreamResponse
|
||||
else:
|
||||
BaseRequest = Request = Application = StreamResponse = None
|
||||
HTTPException = None
|
||||
|
||||
|
||||
class AbstractRouter(ABC):
|
||||
def __init__(self) -> None:
|
||||
self._frozen = False
|
||||
|
||||
def post_init(self, app: Application) -> None:
|
||||
"""Post init stage.
|
||||
|
||||
Not an abstract method for sake of backward compatibility,
|
||||
but if the router wants to be aware of the application
|
||||
it can override this.
|
||||
"""
|
||||
|
||||
@property
|
||||
def frozen(self) -> bool:
|
||||
return self._frozen
|
||||
|
||||
def freeze(self) -> None:
|
||||
"""Freeze router."""
|
||||
self._frozen = True
|
||||
|
||||
@abstractmethod
|
||||
async def resolve(self, request: Request) -> "AbstractMatchInfo":
|
||||
"""Return MATCH_INFO for given request"""
|
||||
|
||||
|
||||
class AbstractMatchInfo(ABC):
|
||||
@property # pragma: no branch
|
||||
@abstractmethod
|
||||
def handler(self) -> Callable[[Request], Awaitable[StreamResponse]]:
|
||||
"""Execute matched request handler"""
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def expect_handler(self) -> Callable[[Request], Awaitable[None]]:
|
||||
"""Expect handler for 100-continue processing"""
|
||||
|
||||
@property # pragma: no branch
|
||||
@abstractmethod
|
||||
def http_exception(self) -> Optional[HTTPException]:
|
||||
"""HTTPException instance raised on router's resolving, or None"""
|
||||
|
||||
@abstractmethod # pragma: no branch
|
||||
def get_info(self) -> Dict[str, Any]:
|
||||
"""Return a dict with additional info useful for introspection"""
|
||||
|
||||
@property # pragma: no branch
|
||||
@abstractmethod
|
||||
def apps(self) -> Tuple[Application, ...]:
|
||||
"""Stack of nested applications.
|
||||
|
||||
Top level application is left-most element.
|
||||
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def add_app(self, app: Application) -> None:
|
||||
"""Add application to the nested apps stack."""
|
||||
|
||||
@abstractmethod
|
||||
def freeze(self) -> None:
|
||||
"""Freeze the match info.
|
||||
|
||||
The method is called after route resolution.
|
||||
|
||||
After the call .add_app() is forbidden.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class AbstractView(ABC):
|
||||
"""Abstract class based view."""
|
||||
|
||||
def __init__(self, request: Request) -> None:
|
||||
self._request = request
|
||||
|
||||
@property
|
||||
def request(self) -> Request:
|
||||
"""Request instance."""
|
||||
return self._request
|
||||
|
||||
@abstractmethod
|
||||
def __await__(self) -> Generator[Any, None, StreamResponse]:
|
||||
"""Execute the view handler."""
|
||||
|
||||
|
||||
class AbstractResolver(ABC):
|
||||
"""Abstract DNS resolver."""
|
||||
|
||||
@abstractmethod
|
||||
async def resolve(self, host: str, port: int, family: int) -> List[Dict[str, Any]]:
|
||||
"""Return IP address for given hostname"""
|
||||
|
||||
@abstractmethod
|
||||
async def close(self) -> None:
|
||||
"""Release resolver"""
|
||||
|
||||
|
||||
if TYPE_CHECKING: # pragma: no cover
|
||||
IterableBase = Iterable[Morsel[str]]
|
||||
else:
|
||||
IterableBase = Iterable
|
||||
|
||||
|
||||
class AbstractCookieJar(Sized, IterableBase):
|
||||
"""Abstract Cookie Jar."""
|
||||
|
||||
def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
|
||||
self._loop = get_running_loop(loop)
|
||||
|
||||
@abstractmethod
|
||||
def clear(self) -> None:
|
||||
"""Clear all cookies."""
|
||||
|
||||
@abstractmethod
|
||||
def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
|
||||
"""Update cookies."""
|
||||
|
||||
@abstractmethod
|
||||
def filter_cookies(self, request_url: URL) -> "BaseCookie[str]":
|
||||
"""Return the jar's cookies filtered by their attributes."""
|
||||
|
||||
|
||||
class AbstractStreamWriter(ABC):
|
||||
"""Abstract stream writer."""
|
||||
|
||||
buffer_size = 0
|
||||
output_size = 0
|
||||
length = 0 # type: Optional[int]
|
||||
|
||||
@abstractmethod
|
||||
async def write(self, chunk: bytes) -> None:
|
||||
"""Write chunk into stream."""
|
||||
|
||||
@abstractmethod
|
||||
async def write_eof(self, chunk: bytes = b"") -> None:
|
||||
"""Write last chunk."""
|
||||
|
||||
@abstractmethod
|
||||
async def drain(self) -> None:
|
||||
"""Flush the write buffer."""
|
||||
|
||||
@abstractmethod
|
||||
def enable_compression(self, encoding: str = "deflate") -> None:
|
||||
"""Enable HTTP body compression"""
|
||||
|
||||
@abstractmethod
|
||||
def enable_chunking(self) -> None:
|
||||
"""Enable HTTP chunked mode"""
|
||||
|
||||
@abstractmethod
|
||||
async def write_headers(
|
||||
self, status_line: str, headers: "CIMultiDict[str]"
|
||||
) -> None:
|
||||
"""Write HTTP headers"""
|
||||
|
||||
|
||||
class AbstractAccessLogger(ABC):
|
||||
"""Abstract writer to access log."""
|
||||
|
||||
def __init__(self, logger: logging.Logger, log_format: str) -> None:
|
||||
self.logger = logger
|
||||
self.log_format = log_format
|
||||
|
||||
@abstractmethod
|
||||
def log(self, request: BaseRequest, response: StreamResponse, time: float) -> None:
|
||||
"""Emit log to logger."""
|
87
.venv/lib/python3.9/site-packages/aiohttp/base_protocol.py
Normal file
87
.venv/lib/python3.9/site-packages/aiohttp/base_protocol.py
Normal file
@@ -0,0 +1,87 @@
|
||||
import asyncio
|
||||
from typing import Optional, cast
|
||||
|
||||
from .tcp_helpers import tcp_nodelay
|
||||
|
||||
|
||||
class BaseProtocol(asyncio.Protocol):
|
||||
__slots__ = (
|
||||
"_loop",
|
||||
"_paused",
|
||||
"_drain_waiter",
|
||||
"_connection_lost",
|
||||
"_reading_paused",
|
||||
"transport",
|
||||
)
|
||||
|
||||
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
|
||||
self._loop = loop # type: asyncio.AbstractEventLoop
|
||||
self._paused = False
|
||||
self._drain_waiter = None # type: Optional[asyncio.Future[None]]
|
||||
self._connection_lost = False
|
||||
self._reading_paused = False
|
||||
|
||||
self.transport = None # type: Optional[asyncio.Transport]
|
||||
|
||||
def pause_writing(self) -> None:
|
||||
assert not self._paused
|
||||
self._paused = True
|
||||
|
||||
def resume_writing(self) -> None:
|
||||
assert self._paused
|
||||
self._paused = False
|
||||
|
||||
waiter = self._drain_waiter
|
||||
if waiter is not None:
|
||||
self._drain_waiter = None
|
||||
if not waiter.done():
|
||||
waiter.set_result(None)
|
||||
|
||||
def pause_reading(self) -> None:
|
||||
if not self._reading_paused and self.transport is not None:
|
||||
try:
|
||||
self.transport.pause_reading()
|
||||
except (AttributeError, NotImplementedError, RuntimeError):
|
||||
pass
|
||||
self._reading_paused = True
|
||||
|
||||
def resume_reading(self) -> None:
|
||||
if self._reading_paused and self.transport is not None:
|
||||
try:
|
||||
self.transport.resume_reading()
|
||||
except (AttributeError, NotImplementedError, RuntimeError):
|
||||
pass
|
||||
self._reading_paused = False
|
||||
|
||||
def connection_made(self, transport: asyncio.BaseTransport) -> None:
|
||||
tr = cast(asyncio.Transport, transport)
|
||||
tcp_nodelay(tr, True)
|
||||
self.transport = tr
|
||||
|
||||
def connection_lost(self, exc: Optional[BaseException]) -> None:
|
||||
self._connection_lost = True
|
||||
# Wake up the writer if currently paused.
|
||||
self.transport = None
|
||||
if not self._paused:
|
||||
return
|
||||
waiter = self._drain_waiter
|
||||
if waiter is None:
|
||||
return
|
||||
self._drain_waiter = None
|
||||
if waiter.done():
|
||||
return
|
||||
if exc is None:
|
||||
waiter.set_result(None)
|
||||
else:
|
||||
waiter.set_exception(exc)
|
||||
|
||||
async def _drain_helper(self) -> None:
|
||||
if self._connection_lost:
|
||||
raise ConnectionResetError("Connection lost")
|
||||
if not self._paused:
|
||||
return
|
||||
waiter = self._drain_waiter
|
||||
assert waiter is None or waiter.cancelled()
|
||||
waiter = self._loop.create_future()
|
||||
self._drain_waiter = waiter
|
||||
await waiter
|
1275
.venv/lib/python3.9/site-packages/aiohttp/client.py
Normal file
1275
.venv/lib/python3.9/site-packages/aiohttp/client.py
Normal file
File diff suppressed because it is too large
Load Diff
317
.venv/lib/python3.9/site-packages/aiohttp/client_exceptions.py
Normal file
317
.venv/lib/python3.9/site-packages/aiohttp/client_exceptions.py
Normal file
@@ -0,0 +1,317 @@
|
||||
"""HTTP related errors."""
|
||||
|
||||
import asyncio
|
||||
import warnings
|
||||
from typing import TYPE_CHECKING, Any, Optional, Tuple, Union
|
||||
|
||||
from .typedefs import LooseHeaders
|
||||
|
||||
try:
|
||||
import ssl
|
||||
|
||||
SSLContext = ssl.SSLContext
|
||||
except ImportError: # pragma: no cover
|
||||
ssl = SSLContext = None # type: ignore
|
||||
|
||||
|
||||
if TYPE_CHECKING: # pragma: no cover
|
||||
from .client_reqrep import ClientResponse, ConnectionKey, Fingerprint, RequestInfo
|
||||
else:
|
||||
RequestInfo = ClientResponse = ConnectionKey = None
|
||||
|
||||
__all__ = (
|
||||
"ClientError",
|
||||
"ClientConnectionError",
|
||||
"ClientOSError",
|
||||
"ClientConnectorError",
|
||||
"ClientProxyConnectionError",
|
||||
"ClientSSLError",
|
||||
"ClientConnectorSSLError",
|
||||
"ClientConnectorCertificateError",
|
||||
"ServerConnectionError",
|
||||
"ServerTimeoutError",
|
||||
"ServerDisconnectedError",
|
||||
"ServerFingerprintMismatch",
|
||||
"ClientResponseError",
|
||||
"ClientHttpProxyError",
|
||||
"WSServerHandshakeError",
|
||||
"ContentTypeError",
|
||||
"ClientPayloadError",
|
||||
"InvalidURL",
|
||||
)
|
||||
|
||||
|
||||
class ClientError(Exception):
|
||||
"""Base class for client connection errors."""
|
||||
|
||||
|
||||
class ClientResponseError(ClientError):
|
||||
"""Connection error during reading response.
|
||||
|
||||
request_info: instance of RequestInfo
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
request_info: RequestInfo,
|
||||
history: Tuple[ClientResponse, ...],
|
||||
*,
|
||||
code: Optional[int] = None,
|
||||
status: Optional[int] = None,
|
||||
message: str = "",
|
||||
headers: Optional[LooseHeaders] = None,
|
||||
) -> None:
|
||||
self.request_info = request_info
|
||||
if code is not None:
|
||||
if status is not None:
|
||||
raise ValueError(
|
||||
"Both code and status arguments are provided; "
|
||||
"code is deprecated, use status instead"
|
||||
)
|
||||
warnings.warn(
|
||||
"code argument is deprecated, use status instead",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
if status is not None:
|
||||
self.status = status
|
||||
elif code is not None:
|
||||
self.status = code
|
||||
else:
|
||||
self.status = 0
|
||||
self.message = message
|
||||
self.headers = headers
|
||||
self.history = history
|
||||
self.args = (request_info, history)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return "{}, message={!r}, url={!r}".format(
|
||||
self.status,
|
||||
self.message,
|
||||
self.request_info.real_url,
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
args = f"{self.request_info!r}, {self.history!r}"
|
||||
if self.status != 0:
|
||||
args += f", status={self.status!r}"
|
||||
if self.message != "":
|
||||
args += f", message={self.message!r}"
|
||||
if self.headers is not None:
|
||||
args += f", headers={self.headers!r}"
|
||||
return "{}({})".format(type(self).__name__, args)
|
||||
|
||||
@property
|
||||
def code(self) -> int:
|
||||
warnings.warn(
|
||||
"code property is deprecated, use status instead",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return self.status
|
||||
|
||||
@code.setter
|
||||
def code(self, value: int) -> None:
|
||||
warnings.warn(
|
||||
"code property is deprecated, use status instead",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
self.status = value
|
||||
|
||||
|
||||
class ContentTypeError(ClientResponseError):
|
||||
"""ContentType found is not valid."""
|
||||
|
||||
|
||||
class WSServerHandshakeError(ClientResponseError):
|
||||
"""websocket server handshake error."""
|
||||
|
||||
|
||||
class ClientHttpProxyError(ClientResponseError):
|
||||
"""HTTP proxy error.
|
||||
|
||||
Raised in :class:`aiohttp.connector.TCPConnector` if
|
||||
proxy responds with status other than ``200 OK``
|
||||
on ``CONNECT`` request.
|
||||
"""
|
||||
|
||||
|
||||
class TooManyRedirects(ClientResponseError):
|
||||
"""Client was redirected too many times."""
|
||||
|
||||
|
||||
class ClientConnectionError(ClientError):
|
||||
"""Base class for client socket errors."""
|
||||
|
||||
|
||||
class ClientOSError(ClientConnectionError, OSError):
|
||||
"""OSError error."""
|
||||
|
||||
|
||||
class ClientConnectorError(ClientOSError):
|
||||
"""Client connector error.
|
||||
|
||||
Raised in :class:`aiohttp.connector.TCPConnector` if
|
||||
connection to proxy can not be established.
|
||||
"""
|
||||
|
||||
def __init__(self, connection_key: ConnectionKey, os_error: OSError) -> None:
|
||||
self._conn_key = connection_key
|
||||
self._os_error = os_error
|
||||
super().__init__(os_error.errno, os_error.strerror)
|
||||
self.args = (connection_key, os_error)
|
||||
|
||||
@property
|
||||
def os_error(self) -> OSError:
|
||||
return self._os_error
|
||||
|
||||
@property
|
||||
def host(self) -> str:
|
||||
return self._conn_key.host
|
||||
|
||||
@property
|
||||
def port(self) -> Optional[int]:
|
||||
return self._conn_key.port
|
||||
|
||||
@property
|
||||
def ssl(self) -> Union[SSLContext, None, bool, "Fingerprint"]:
|
||||
return self._conn_key.ssl
|
||||
|
||||
def __str__(self) -> str:
|
||||
return "Cannot connect to host {0.host}:{0.port} ssl:{1} [{2}]".format(
|
||||
self, self.ssl if self.ssl is not None else "default", self.strerror
|
||||
)
|
||||
|
||||
# OSError.__reduce__ does too much black magick
|
||||
__reduce__ = BaseException.__reduce__
|
||||
|
||||
|
||||
class ClientProxyConnectionError(ClientConnectorError):
|
||||
"""Proxy connection error.
|
||||
|
||||
Raised in :class:`aiohttp.connector.TCPConnector` if
|
||||
connection to proxy can not be established.
|
||||
"""
|
||||
|
||||
|
||||
class ServerConnectionError(ClientConnectionError):
|
||||
"""Server connection errors."""
|
||||
|
||||
|
||||
class ServerDisconnectedError(ServerConnectionError):
|
||||
"""Server disconnected."""
|
||||
|
||||
def __init__(self, message: Optional[str] = None) -> None:
|
||||
if message is None:
|
||||
message = "Server disconnected"
|
||||
|
||||
self.args = (message,)
|
||||
self.message = message
|
||||
|
||||
|
||||
class ServerTimeoutError(ServerConnectionError, asyncio.TimeoutError):
|
||||
"""Server timeout error."""
|
||||
|
||||
|
||||
class ServerFingerprintMismatch(ServerConnectionError):
|
||||
"""SSL certificate does not match expected fingerprint."""
|
||||
|
||||
def __init__(self, expected: bytes, got: bytes, host: str, port: int) -> None:
|
||||
self.expected = expected
|
||||
self.got = got
|
||||
self.host = host
|
||||
self.port = port
|
||||
self.args = (expected, got, host, port)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return "<{} expected={!r} got={!r} host={!r} port={!r}>".format(
|
||||
self.__class__.__name__, self.expected, self.got, self.host, self.port
|
||||
)
|
||||
|
||||
|
||||
class ClientPayloadError(ClientError):
|
||||
"""Response payload error."""
|
||||
|
||||
|
||||
class InvalidURL(ClientError, ValueError):
|
||||
"""Invalid URL.
|
||||
|
||||
URL used for fetching is malformed, e.g. it doesn't contains host
|
||||
part."""
|
||||
|
||||
# Derive from ValueError for backward compatibility
|
||||
|
||||
def __init__(self, url: Any) -> None:
|
||||
# The type of url is not yarl.URL because the exception can be raised
|
||||
# on URL(url) call
|
||||
super().__init__(url)
|
||||
|
||||
@property
|
||||
def url(self) -> Any:
|
||||
return self.args[0]
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<{self.__class__.__name__} {self.url}>"
|
||||
|
||||
|
||||
class ClientSSLError(ClientConnectorError):
|
||||
"""Base error for ssl.*Errors."""
|
||||
|
||||
|
||||
if ssl is not None:
|
||||
cert_errors = (ssl.CertificateError,)
|
||||
cert_errors_bases = (
|
||||
ClientSSLError,
|
||||
ssl.CertificateError,
|
||||
)
|
||||
|
||||
ssl_errors = (ssl.SSLError,)
|
||||
ssl_error_bases = (ClientSSLError, ssl.SSLError)
|
||||
else: # pragma: no cover
|
||||
cert_errors = tuple()
|
||||
cert_errors_bases = (
|
||||
ClientSSLError,
|
||||
ValueError,
|
||||
)
|
||||
|
||||
ssl_errors = tuple()
|
||||
ssl_error_bases = (ClientSSLError,)
|
||||
|
||||
|
||||
class ClientConnectorSSLError(*ssl_error_bases): # type: ignore
|
||||
"""Response ssl error."""
|
||||
|
||||
|
||||
class ClientConnectorCertificateError(*cert_errors_bases): # type: ignore
|
||||
"""Response certificate error."""
|
||||
|
||||
def __init__(
|
||||
self, connection_key: ConnectionKey, certificate_error: Exception
|
||||
) -> None:
|
||||
self._conn_key = connection_key
|
||||
self._certificate_error = certificate_error
|
||||
self.args = (connection_key, certificate_error)
|
||||
|
||||
@property
|
||||
def certificate_error(self) -> Exception:
|
||||
return self._certificate_error
|
||||
|
||||
@property
|
||||
def host(self) -> str:
|
||||
return self._conn_key.host
|
||||
|
||||
@property
|
||||
def port(self) -> Optional[int]:
|
||||
return self._conn_key.port
|
||||
|
||||
@property
|
||||
def ssl(self) -> bool:
|
||||
return self._conn_key.is_ssl
|
||||
|
||||
def __str__(self) -> str:
|
||||
return (
|
||||
"Cannot connect to host {0.host}:{0.port} ssl:{0.ssl} "
|
||||
"[{0.certificate_error.__class__.__name__}: "
|
||||
"{0.certificate_error.args}]".format(self)
|
||||
)
|
251
.venv/lib/python3.9/site-packages/aiohttp/client_proto.py
Normal file
251
.venv/lib/python3.9/site-packages/aiohttp/client_proto.py
Normal file
@@ -0,0 +1,251 @@
|
||||
import asyncio
|
||||
from contextlib import suppress
|
||||
from typing import Any, Optional, Tuple
|
||||
|
||||
from .base_protocol import BaseProtocol
|
||||
from .client_exceptions import (
|
||||
ClientOSError,
|
||||
ClientPayloadError,
|
||||
ServerDisconnectedError,
|
||||
ServerTimeoutError,
|
||||
)
|
||||
from .helpers import BaseTimerContext
|
||||
from .http import HttpResponseParser, RawResponseMessage
|
||||
from .streams import EMPTY_PAYLOAD, DataQueue, StreamReader
|
||||
|
||||
|
||||
class ResponseHandler(BaseProtocol, DataQueue[Tuple[RawResponseMessage, StreamReader]]):
|
||||
"""Helper class to adapt between Protocol and StreamReader."""
|
||||
|
||||
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
|
||||
BaseProtocol.__init__(self, loop=loop)
|
||||
DataQueue.__init__(self, loop)
|
||||
|
||||
self._should_close = False
|
||||
|
||||
self._payload = None
|
||||
self._skip_payload = False
|
||||
self._payload_parser = None
|
||||
|
||||
self._timer = None
|
||||
|
||||
self._tail = b""
|
||||
self._upgraded = False
|
||||
self._parser = None # type: Optional[HttpResponseParser]
|
||||
|
||||
self._read_timeout = None # type: Optional[float]
|
||||
self._read_timeout_handle = None # type: Optional[asyncio.TimerHandle]
|
||||
|
||||
@property
|
||||
def upgraded(self) -> bool:
|
||||
return self._upgraded
|
||||
|
||||
@property
|
||||
def should_close(self) -> bool:
|
||||
if self._payload is not None and not self._payload.is_eof() or self._upgraded:
|
||||
return True
|
||||
|
||||
return (
|
||||
self._should_close
|
||||
or self._upgraded
|
||||
or self.exception() is not None
|
||||
or self._payload_parser is not None
|
||||
or len(self) > 0
|
||||
or bool(self._tail)
|
||||
)
|
||||
|
||||
def force_close(self) -> None:
|
||||
self._should_close = True
|
||||
|
||||
def close(self) -> None:
|
||||
transport = self.transport
|
||||
if transport is not None:
|
||||
transport.close()
|
||||
self.transport = None
|
||||
self._payload = None
|
||||
self._drop_timeout()
|
||||
|
||||
def is_connected(self) -> bool:
|
||||
return self.transport is not None and not self.transport.is_closing()
|
||||
|
||||
def connection_lost(self, exc: Optional[BaseException]) -> None:
|
||||
self._drop_timeout()
|
||||
|
||||
if self._payload_parser is not None:
|
||||
with suppress(Exception):
|
||||
self._payload_parser.feed_eof()
|
||||
|
||||
uncompleted = None
|
||||
if self._parser is not None:
|
||||
try:
|
||||
uncompleted = self._parser.feed_eof()
|
||||
except Exception:
|
||||
if self._payload is not None:
|
||||
self._payload.set_exception(
|
||||
ClientPayloadError("Response payload is not completed")
|
||||
)
|
||||
|
||||
if not self.is_eof():
|
||||
if isinstance(exc, OSError):
|
||||
exc = ClientOSError(*exc.args)
|
||||
if exc is None:
|
||||
exc = ServerDisconnectedError(uncompleted)
|
||||
# assigns self._should_close to True as side effect,
|
||||
# we do it anyway below
|
||||
self.set_exception(exc)
|
||||
|
||||
self._should_close = True
|
||||
self._parser = None
|
||||
self._payload = None
|
||||
self._payload_parser = None
|
||||
self._reading_paused = False
|
||||
|
||||
super().connection_lost(exc)
|
||||
|
||||
def eof_received(self) -> None:
|
||||
# should call parser.feed_eof() most likely
|
||||
self._drop_timeout()
|
||||
|
||||
def pause_reading(self) -> None:
|
||||
super().pause_reading()
|
||||
self._drop_timeout()
|
||||
|
||||
def resume_reading(self) -> None:
|
||||
super().resume_reading()
|
||||
self._reschedule_timeout()
|
||||
|
||||
def set_exception(self, exc: BaseException) -> None:
|
||||
self._should_close = True
|
||||
self._drop_timeout()
|
||||
super().set_exception(exc)
|
||||
|
||||
def set_parser(self, parser: Any, payload: Any) -> None:
|
||||
# TODO: actual types are:
|
||||
# parser: WebSocketReader
|
||||
# payload: FlowControlDataQueue
|
||||
# but they are not generi enough
|
||||
# Need an ABC for both types
|
||||
self._payload = payload
|
||||
self._payload_parser = parser
|
||||
|
||||
self._drop_timeout()
|
||||
|
||||
if self._tail:
|
||||
data, self._tail = self._tail, b""
|
||||
self.data_received(data)
|
||||
|
||||
def set_response_params(
|
||||
self,
|
||||
*,
|
||||
timer: Optional[BaseTimerContext] = None,
|
||||
skip_payload: bool = False,
|
||||
read_until_eof: bool = False,
|
||||
auto_decompress: bool = True,
|
||||
read_timeout: Optional[float] = None,
|
||||
read_bufsize: int = 2 ** 16
|
||||
) -> None:
|
||||
self._skip_payload = skip_payload
|
||||
|
||||
self._read_timeout = read_timeout
|
||||
self._reschedule_timeout()
|
||||
|
||||
self._parser = HttpResponseParser(
|
||||
self,
|
||||
self._loop,
|
||||
read_bufsize,
|
||||
timer=timer,
|
||||
payload_exception=ClientPayloadError,
|
||||
response_with_body=not skip_payload,
|
||||
read_until_eof=read_until_eof,
|
||||
auto_decompress=auto_decompress,
|
||||
)
|
||||
|
||||
if self._tail:
|
||||
data, self._tail = self._tail, b""
|
||||
self.data_received(data)
|
||||
|
||||
def _drop_timeout(self) -> None:
|
||||
if self._read_timeout_handle is not None:
|
||||
self._read_timeout_handle.cancel()
|
||||
self._read_timeout_handle = None
|
||||
|
||||
def _reschedule_timeout(self) -> None:
|
||||
timeout = self._read_timeout
|
||||
if self._read_timeout_handle is not None:
|
||||
self._read_timeout_handle.cancel()
|
||||
|
||||
if timeout:
|
||||
self._read_timeout_handle = self._loop.call_later(
|
||||
timeout, self._on_read_timeout
|
||||
)
|
||||
else:
|
||||
self._read_timeout_handle = None
|
||||
|
||||
def _on_read_timeout(self) -> None:
|
||||
exc = ServerTimeoutError("Timeout on reading data from socket")
|
||||
self.set_exception(exc)
|
||||
if self._payload is not None:
|
||||
self._payload.set_exception(exc)
|
||||
|
||||
def data_received(self, data: bytes) -> None:
|
||||
self._reschedule_timeout()
|
||||
|
||||
if not data:
|
||||
return
|
||||
|
||||
# custom payload parser
|
||||
if self._payload_parser is not None:
|
||||
eof, tail = self._payload_parser.feed_data(data)
|
||||
if eof:
|
||||
self._payload = None
|
||||
self._payload_parser = None
|
||||
|
||||
if tail:
|
||||
self.data_received(tail)
|
||||
return
|
||||
else:
|
||||
if self._upgraded or self._parser is None:
|
||||
# i.e. websocket connection, websocket parser is not set yet
|
||||
self._tail += data
|
||||
else:
|
||||
# parse http messages
|
||||
try:
|
||||
messages, upgraded, tail = self._parser.feed_data(data)
|
||||
except BaseException as exc:
|
||||
if self.transport is not None:
|
||||
# connection.release() could be called BEFORE
|
||||
# data_received(), the transport is already
|
||||
# closed in this case
|
||||
self.transport.close()
|
||||
# should_close is True after the call
|
||||
self.set_exception(exc)
|
||||
return
|
||||
|
||||
self._upgraded = upgraded
|
||||
|
||||
payload = None
|
||||
for message, payload in messages:
|
||||
if message.should_close:
|
||||
self._should_close = True
|
||||
|
||||
self._payload = payload
|
||||
|
||||
if self._skip_payload or message.code in (204, 304):
|
||||
self.feed_data((message, EMPTY_PAYLOAD), 0) # type: ignore
|
||||
else:
|
||||
self.feed_data((message, payload), 0)
|
||||
if payload is not None:
|
||||
# new message(s) was processed
|
||||
# register timeout handler unsubscribing
|
||||
# either on end-of-stream or immediately for
|
||||
# EMPTY_PAYLOAD
|
||||
if payload is not EMPTY_PAYLOAD:
|
||||
payload.on_eof(self._drop_timeout)
|
||||
else:
|
||||
self._drop_timeout()
|
||||
|
||||
if tail:
|
||||
if upgraded:
|
||||
self.data_received(tail)
|
||||
else:
|
||||
self._tail = tail
|
1127
.venv/lib/python3.9/site-packages/aiohttp/client_reqrep.py
Normal file
1127
.venv/lib/python3.9/site-packages/aiohttp/client_reqrep.py
Normal file
File diff suppressed because it is too large
Load Diff
301
.venv/lib/python3.9/site-packages/aiohttp/client_ws.py
Normal file
301
.venv/lib/python3.9/site-packages/aiohttp/client_ws.py
Normal file
@@ -0,0 +1,301 @@
|
||||
"""WebSocket client for asyncio."""
|
||||
|
||||
import asyncio
|
||||
from typing import Any, Optional
|
||||
|
||||
import async_timeout
|
||||
|
||||
from .client_exceptions import ClientError
|
||||
from .client_reqrep import ClientResponse
|
||||
from .helpers import call_later, set_result
|
||||
from .http import (
|
||||
WS_CLOSED_MESSAGE,
|
||||
WS_CLOSING_MESSAGE,
|
||||
WebSocketError,
|
||||
WSMessage,
|
||||
WSMsgType,
|
||||
)
|
||||
from .http_websocket import WebSocketWriter # WSMessage
|
||||
from .streams import EofStream, FlowControlDataQueue
|
||||
from .typedefs import (
|
||||
DEFAULT_JSON_DECODER,
|
||||
DEFAULT_JSON_ENCODER,
|
||||
JSONDecoder,
|
||||
JSONEncoder,
|
||||
)
|
||||
|
||||
|
||||
class ClientWebSocketResponse:
|
||||
def __init__(
|
||||
self,
|
||||
reader: "FlowControlDataQueue[WSMessage]",
|
||||
writer: WebSocketWriter,
|
||||
protocol: Optional[str],
|
||||
response: ClientResponse,
|
||||
timeout: float,
|
||||
autoclose: bool,
|
||||
autoping: bool,
|
||||
loop: asyncio.AbstractEventLoop,
|
||||
*,
|
||||
receive_timeout: Optional[float] = None,
|
||||
heartbeat: Optional[float] = None,
|
||||
compress: int = 0,
|
||||
client_notakeover: bool = False,
|
||||
) -> None:
|
||||
self._response = response
|
||||
self._conn = response.connection
|
||||
|
||||
self._writer = writer
|
||||
self._reader = reader
|
||||
self._protocol = protocol
|
||||
self._closed = False
|
||||
self._closing = False
|
||||
self._close_code = None # type: Optional[int]
|
||||
self._timeout = timeout
|
||||
self._receive_timeout = receive_timeout
|
||||
self._autoclose = autoclose
|
||||
self._autoping = autoping
|
||||
self._heartbeat = heartbeat
|
||||
self._heartbeat_cb = None
|
||||
if heartbeat is not None:
|
||||
self._pong_heartbeat = heartbeat / 2.0
|
||||
self._pong_response_cb = None
|
||||
self._loop = loop
|
||||
self._waiting = None # type: Optional[asyncio.Future[bool]]
|
||||
self._exception = None # type: Optional[BaseException]
|
||||
self._compress = compress
|
||||
self._client_notakeover = client_notakeover
|
||||
|
||||
self._reset_heartbeat()
|
||||
|
||||
def _cancel_heartbeat(self) -> None:
|
||||
if self._pong_response_cb is not None:
|
||||
self._pong_response_cb.cancel()
|
||||
self._pong_response_cb = None
|
||||
|
||||
if self._heartbeat_cb is not None:
|
||||
self._heartbeat_cb.cancel()
|
||||
self._heartbeat_cb = None
|
||||
|
||||
def _reset_heartbeat(self) -> None:
|
||||
self._cancel_heartbeat()
|
||||
|
||||
if self._heartbeat is not None:
|
||||
self._heartbeat_cb = call_later(
|
||||
self._send_heartbeat, self._heartbeat, self._loop
|
||||
)
|
||||
|
||||
def _send_heartbeat(self) -> None:
|
||||
if self._heartbeat is not None and not self._closed:
|
||||
# fire-and-forget a task is not perfect but maybe ok for
|
||||
# sending ping. Otherwise we need a long-living heartbeat
|
||||
# task in the class.
|
||||
self._loop.create_task(self._writer.ping())
|
||||
|
||||
if self._pong_response_cb is not None:
|
||||
self._pong_response_cb.cancel()
|
||||
self._pong_response_cb = call_later(
|
||||
self._pong_not_received, self._pong_heartbeat, self._loop
|
||||
)
|
||||
|
||||
def _pong_not_received(self) -> None:
|
||||
if not self._closed:
|
||||
self._closed = True
|
||||
self._close_code = 1006
|
||||
self._exception = asyncio.TimeoutError()
|
||||
self._response.close()
|
||||
|
||||
@property
|
||||
def closed(self) -> bool:
|
||||
return self._closed
|
||||
|
||||
@property
|
||||
def close_code(self) -> Optional[int]:
|
||||
return self._close_code
|
||||
|
||||
@property
|
||||
def protocol(self) -> Optional[str]:
|
||||
return self._protocol
|
||||
|
||||
@property
|
||||
def compress(self) -> int:
|
||||
return self._compress
|
||||
|
||||
@property
|
||||
def client_notakeover(self) -> bool:
|
||||
return self._client_notakeover
|
||||
|
||||
def get_extra_info(self, name: str, default: Any = None) -> Any:
|
||||
"""extra info from connection transport"""
|
||||
conn = self._response.connection
|
||||
if conn is None:
|
||||
return default
|
||||
transport = conn.transport
|
||||
if transport is None:
|
||||
return default
|
||||
return transport.get_extra_info(name, default)
|
||||
|
||||
def exception(self) -> Optional[BaseException]:
|
||||
return self._exception
|
||||
|
||||
async def ping(self, message: bytes = b"") -> None:
|
||||
await self._writer.ping(message)
|
||||
|
||||
async def pong(self, message: bytes = b"") -> None:
|
||||
await self._writer.pong(message)
|
||||
|
||||
async def send_str(self, data: str, compress: Optional[int] = None) -> None:
|
||||
if not isinstance(data, str):
|
||||
raise TypeError("data argument must be str (%r)" % type(data))
|
||||
await self._writer.send(data, binary=False, compress=compress)
|
||||
|
||||
async def send_bytes(self, data: bytes, compress: Optional[int] = None) -> None:
|
||||
if not isinstance(data, (bytes, bytearray, memoryview)):
|
||||
raise TypeError("data argument must be byte-ish (%r)" % type(data))
|
||||
await self._writer.send(data, binary=True, compress=compress)
|
||||
|
||||
async def send_json(
|
||||
self,
|
||||
data: Any,
|
||||
compress: Optional[int] = None,
|
||||
*,
|
||||
dumps: JSONEncoder = DEFAULT_JSON_ENCODER,
|
||||
) -> None:
|
||||
await self.send_str(dumps(data), compress=compress)
|
||||
|
||||
async def close(self, *, code: int = 1000, message: bytes = b"") -> bool:
|
||||
# we need to break `receive()` cycle first,
|
||||
# `close()` may be called from different task
|
||||
if self._waiting is not None and not self._closed:
|
||||
self._reader.feed_data(WS_CLOSING_MESSAGE, 0)
|
||||
await self._waiting
|
||||
|
||||
if not self._closed:
|
||||
self._cancel_heartbeat()
|
||||
self._closed = True
|
||||
try:
|
||||
await self._writer.close(code, message)
|
||||
except asyncio.CancelledError:
|
||||
self._close_code = 1006
|
||||
self._response.close()
|
||||
raise
|
||||
except Exception as exc:
|
||||
self._close_code = 1006
|
||||
self._exception = exc
|
||||
self._response.close()
|
||||
return True
|
||||
|
||||
if self._closing:
|
||||
self._response.close()
|
||||
return True
|
||||
|
||||
while True:
|
||||
try:
|
||||
with async_timeout.timeout(self._timeout, loop=self._loop):
|
||||
msg = await self._reader.read()
|
||||
except asyncio.CancelledError:
|
||||
self._close_code = 1006
|
||||
self._response.close()
|
||||
raise
|
||||
except Exception as exc:
|
||||
self._close_code = 1006
|
||||
self._exception = exc
|
||||
self._response.close()
|
||||
return True
|
||||
|
||||
if msg.type == WSMsgType.CLOSE:
|
||||
self._close_code = msg.data
|
||||
self._response.close()
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
async def receive(self, timeout: Optional[float] = None) -> WSMessage:
|
||||
while True:
|
||||
if self._waiting is not None:
|
||||
raise RuntimeError("Concurrent call to receive() is not allowed")
|
||||
|
||||
if self._closed:
|
||||
return WS_CLOSED_MESSAGE
|
||||
elif self._closing:
|
||||
await self.close()
|
||||
return WS_CLOSED_MESSAGE
|
||||
|
||||
try:
|
||||
self._waiting = self._loop.create_future()
|
||||
try:
|
||||
with async_timeout.timeout(
|
||||
timeout or self._receive_timeout, loop=self._loop
|
||||
):
|
||||
msg = await self._reader.read()
|
||||
self._reset_heartbeat()
|
||||
finally:
|
||||
waiter = self._waiting
|
||||
self._waiting = None
|
||||
set_result(waiter, True)
|
||||
except (asyncio.CancelledError, asyncio.TimeoutError):
|
||||
self._close_code = 1006
|
||||
raise
|
||||
except EofStream:
|
||||
self._close_code = 1000
|
||||
await self.close()
|
||||
return WSMessage(WSMsgType.CLOSED, None, None)
|
||||
except ClientError:
|
||||
self._closed = True
|
||||
self._close_code = 1006
|
||||
return WS_CLOSED_MESSAGE
|
||||
except WebSocketError as exc:
|
||||
self._close_code = exc.code
|
||||
await self.close(code=exc.code)
|
||||
return WSMessage(WSMsgType.ERROR, exc, None)
|
||||
except Exception as exc:
|
||||
self._exception = exc
|
||||
self._closing = True
|
||||
self._close_code = 1006
|
||||
await self.close()
|
||||
return WSMessage(WSMsgType.ERROR, exc, None)
|
||||
|
||||
if msg.type == WSMsgType.CLOSE:
|
||||
self._closing = True
|
||||
self._close_code = msg.data
|
||||
if not self._closed and self._autoclose:
|
||||
await self.close()
|
||||
elif msg.type == WSMsgType.CLOSING:
|
||||
self._closing = True
|
||||
elif msg.type == WSMsgType.PING and self._autoping:
|
||||
await self.pong(msg.data)
|
||||
continue
|
||||
elif msg.type == WSMsgType.PONG and self._autoping:
|
||||
continue
|
||||
|
||||
return msg
|
||||
|
||||
async def receive_str(self, *, timeout: Optional[float] = None) -> str:
|
||||
msg = await self.receive(timeout)
|
||||
if msg.type != WSMsgType.TEXT:
|
||||
raise TypeError(f"Received message {msg.type}:{msg.data!r} is not str")
|
||||
return msg.data
|
||||
|
||||
async def receive_bytes(self, *, timeout: Optional[float] = None) -> bytes:
|
||||
msg = await self.receive(timeout)
|
||||
if msg.type != WSMsgType.BINARY:
|
||||
raise TypeError(f"Received message {msg.type}:{msg.data!r} is not bytes")
|
||||
return msg.data
|
||||
|
||||
async def receive_json(
|
||||
self,
|
||||
*,
|
||||
loads: JSONDecoder = DEFAULT_JSON_DECODER,
|
||||
timeout: Optional[float] = None,
|
||||
) -> Any:
|
||||
data = await self.receive_str(timeout=timeout)
|
||||
return loads(data)
|
||||
|
||||
def __aiter__(self) -> "ClientWebSocketResponse":
|
||||
return self
|
||||
|
||||
async def __anext__(self) -> WSMessage:
|
||||
msg = await self.receive()
|
||||
if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSING, WSMsgType.CLOSED):
|
||||
raise StopAsyncIteration
|
||||
return msg
|
1262
.venv/lib/python3.9/site-packages/aiohttp/connector.py
Normal file
1262
.venv/lib/python3.9/site-packages/aiohttp/connector.py
Normal file
File diff suppressed because it is too large
Load Diff
382
.venv/lib/python3.9/site-packages/aiohttp/cookiejar.py
Normal file
382
.venv/lib/python3.9/site-packages/aiohttp/cookiejar.py
Normal file
@@ -0,0 +1,382 @@
|
||||
import asyncio
|
||||
import datetime
|
||||
import os # noqa
|
||||
import pathlib
|
||||
import pickle
|
||||
import re
|
||||
from collections import defaultdict
|
||||
from http.cookies import BaseCookie, Morsel, SimpleCookie
|
||||
from typing import ( # noqa
|
||||
DefaultDict,
|
||||
Dict,
|
||||
Iterable,
|
||||
Iterator,
|
||||
Mapping,
|
||||
Optional,
|
||||
Set,
|
||||
Tuple,
|
||||
Union,
|
||||
cast,
|
||||
)
|
||||
|
||||
from yarl import URL
|
||||
|
||||
from .abc import AbstractCookieJar
|
||||
from .helpers import is_ip_address, next_whole_second
|
||||
from .typedefs import LooseCookies, PathLike
|
||||
|
||||
__all__ = ("CookieJar", "DummyCookieJar")
|
||||
|
||||
|
||||
CookieItem = Union[str, "Morsel[str]"]
|
||||
|
||||
|
||||
class CookieJar(AbstractCookieJar):
|
||||
"""Implements cookie storage adhering to RFC 6265."""
|
||||
|
||||
DATE_TOKENS_RE = re.compile(
|
||||
r"[\x09\x20-\x2F\x3B-\x40\x5B-\x60\x7B-\x7E]*"
|
||||
r"(?P<token>[\x00-\x08\x0A-\x1F\d:a-zA-Z\x7F-\xFF]+)"
|
||||
)
|
||||
|
||||
DATE_HMS_TIME_RE = re.compile(r"(\d{1,2}):(\d{1,2}):(\d{1,2})")
|
||||
|
||||
DATE_DAY_OF_MONTH_RE = re.compile(r"(\d{1,2})")
|
||||
|
||||
DATE_MONTH_RE = re.compile(
|
||||
"(jan)|(feb)|(mar)|(apr)|(may)|(jun)|(jul)|" "(aug)|(sep)|(oct)|(nov)|(dec)",
|
||||
re.I,
|
||||
)
|
||||
|
||||
DATE_YEAR_RE = re.compile(r"(\d{2,4})")
|
||||
|
||||
MAX_TIME = datetime.datetime.max.replace(tzinfo=datetime.timezone.utc)
|
||||
|
||||
MAX_32BIT_TIME = datetime.datetime.utcfromtimestamp(2 ** 31 - 1)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
unsafe: bool = False,
|
||||
quote_cookie: bool = True,
|
||||
loop: Optional[asyncio.AbstractEventLoop] = None
|
||||
) -> None:
|
||||
super().__init__(loop=loop)
|
||||
self._cookies = defaultdict(
|
||||
SimpleCookie
|
||||
) # type: DefaultDict[str, SimpleCookie[str]]
|
||||
self._host_only_cookies = set() # type: Set[Tuple[str, str]]
|
||||
self._unsafe = unsafe
|
||||
self._quote_cookie = quote_cookie
|
||||
self._next_expiration = next_whole_second()
|
||||
self._expirations = {} # type: Dict[Tuple[str, str], datetime.datetime]
|
||||
# #4515: datetime.max may not be representable on 32-bit platforms
|
||||
self._max_time = self.MAX_TIME
|
||||
try:
|
||||
self._max_time.timestamp()
|
||||
except OverflowError:
|
||||
self._max_time = self.MAX_32BIT_TIME
|
||||
|
||||
def save(self, file_path: PathLike) -> None:
|
||||
file_path = pathlib.Path(file_path)
|
||||
with file_path.open(mode="wb") as f:
|
||||
pickle.dump(self._cookies, f, pickle.HIGHEST_PROTOCOL)
|
||||
|
||||
def load(self, file_path: PathLike) -> None:
|
||||
file_path = pathlib.Path(file_path)
|
||||
with file_path.open(mode="rb") as f:
|
||||
self._cookies = pickle.load(f)
|
||||
|
||||
def clear(self) -> None:
|
||||
self._cookies.clear()
|
||||
self._host_only_cookies.clear()
|
||||
self._next_expiration = next_whole_second()
|
||||
self._expirations.clear()
|
||||
|
||||
def __iter__(self) -> "Iterator[Morsel[str]]":
|
||||
self._do_expiration()
|
||||
for val in self._cookies.values():
|
||||
yield from val.values()
|
||||
|
||||
def __len__(self) -> int:
|
||||
return sum(1 for i in self)
|
||||
|
||||
def _do_expiration(self) -> None:
|
||||
now = datetime.datetime.now(datetime.timezone.utc)
|
||||
if self._next_expiration > now:
|
||||
return
|
||||
if not self._expirations:
|
||||
return
|
||||
next_expiration = self._max_time
|
||||
to_del = []
|
||||
cookies = self._cookies
|
||||
expirations = self._expirations
|
||||
for (domain, name), when in expirations.items():
|
||||
if when <= now:
|
||||
cookies[domain].pop(name, None)
|
||||
to_del.append((domain, name))
|
||||
self._host_only_cookies.discard((domain, name))
|
||||
else:
|
||||
next_expiration = min(next_expiration, when)
|
||||
for key in to_del:
|
||||
del expirations[key]
|
||||
|
||||
try:
|
||||
self._next_expiration = next_expiration.replace(
|
||||
microsecond=0
|
||||
) + datetime.timedelta(seconds=1)
|
||||
except OverflowError:
|
||||
self._next_expiration = self._max_time
|
||||
|
||||
def _expire_cookie(self, when: datetime.datetime, domain: str, name: str) -> None:
|
||||
self._next_expiration = min(self._next_expiration, when)
|
||||
self._expirations[(domain, name)] = when
|
||||
|
||||
def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
|
||||
"""Update cookies."""
|
||||
hostname = response_url.raw_host
|
||||
|
||||
if not self._unsafe and is_ip_address(hostname):
|
||||
# Don't accept cookies from IPs
|
||||
return
|
||||
|
||||
if isinstance(cookies, Mapping):
|
||||
cookies = cookies.items()
|
||||
|
||||
for name, cookie in cookies:
|
||||
if not isinstance(cookie, Morsel):
|
||||
tmp = SimpleCookie() # type: SimpleCookie[str]
|
||||
tmp[name] = cookie # type: ignore
|
||||
cookie = tmp[name]
|
||||
|
||||
domain = cookie["domain"]
|
||||
|
||||
# ignore domains with trailing dots
|
||||
if domain.endswith("."):
|
||||
domain = ""
|
||||
del cookie["domain"]
|
||||
|
||||
if not domain and hostname is not None:
|
||||
# Set the cookie's domain to the response hostname
|
||||
# and set its host-only-flag
|
||||
self._host_only_cookies.add((hostname, name))
|
||||
domain = cookie["domain"] = hostname
|
||||
|
||||
if domain.startswith("."):
|
||||
# Remove leading dot
|
||||
domain = domain[1:]
|
||||
cookie["domain"] = domain
|
||||
|
||||
if hostname and not self._is_domain_match(domain, hostname):
|
||||
# Setting cookies for different domains is not allowed
|
||||
continue
|
||||
|
||||
path = cookie["path"]
|
||||
if not path or not path.startswith("/"):
|
||||
# Set the cookie's path to the response path
|
||||
path = response_url.path
|
||||
if not path.startswith("/"):
|
||||
path = "/"
|
||||
else:
|
||||
# Cut everything from the last slash to the end
|
||||
path = "/" + path[1 : path.rfind("/")]
|
||||
cookie["path"] = path
|
||||
|
||||
max_age = cookie["max-age"]
|
||||
if max_age:
|
||||
try:
|
||||
delta_seconds = int(max_age)
|
||||
try:
|
||||
max_age_expiration = datetime.datetime.now(
|
||||
datetime.timezone.utc
|
||||
) + datetime.timedelta(seconds=delta_seconds)
|
||||
except OverflowError:
|
||||
max_age_expiration = self._max_time
|
||||
self._expire_cookie(max_age_expiration, domain, name)
|
||||
except ValueError:
|
||||
cookie["max-age"] = ""
|
||||
|
||||
else:
|
||||
expires = cookie["expires"]
|
||||
if expires:
|
||||
expire_time = self._parse_date(expires)
|
||||
if expire_time:
|
||||
self._expire_cookie(expire_time, domain, name)
|
||||
else:
|
||||
cookie["expires"] = ""
|
||||
|
||||
self._cookies[domain][name] = cookie
|
||||
|
||||
self._do_expiration()
|
||||
|
||||
def filter_cookies(
|
||||
self, request_url: URL = URL()
|
||||
) -> Union["BaseCookie[str]", "SimpleCookie[str]"]:
|
||||
"""Returns this jar's cookies filtered by their attributes."""
|
||||
self._do_expiration()
|
||||
request_url = URL(request_url)
|
||||
filtered: Union["SimpleCookie[str]", "BaseCookie[str]"] = (
|
||||
SimpleCookie() if self._quote_cookie else BaseCookie()
|
||||
)
|
||||
hostname = request_url.raw_host or ""
|
||||
is_not_secure = request_url.scheme not in ("https", "wss")
|
||||
|
||||
for cookie in self:
|
||||
name = cookie.key
|
||||
domain = cookie["domain"]
|
||||
|
||||
# Send shared cookies
|
||||
if not domain:
|
||||
filtered[name] = cookie.value
|
||||
continue
|
||||
|
||||
if not self._unsafe and is_ip_address(hostname):
|
||||
continue
|
||||
|
||||
if (domain, name) in self._host_only_cookies:
|
||||
if domain != hostname:
|
||||
continue
|
||||
elif not self._is_domain_match(domain, hostname):
|
||||
continue
|
||||
|
||||
if not self._is_path_match(request_url.path, cookie["path"]):
|
||||
continue
|
||||
|
||||
if is_not_secure and cookie["secure"]:
|
||||
continue
|
||||
|
||||
# It's critical we use the Morsel so the coded_value
|
||||
# (based on cookie version) is preserved
|
||||
mrsl_val = cast("Morsel[str]", cookie.get(cookie.key, Morsel()))
|
||||
mrsl_val.set(cookie.key, cookie.value, cookie.coded_value)
|
||||
filtered[name] = mrsl_val
|
||||
|
||||
return filtered
|
||||
|
||||
@staticmethod
|
||||
def _is_domain_match(domain: str, hostname: str) -> bool:
|
||||
"""Implements domain matching adhering to RFC 6265."""
|
||||
if hostname == domain:
|
||||
return True
|
||||
|
||||
if not hostname.endswith(domain):
|
||||
return False
|
||||
|
||||
non_matching = hostname[: -len(domain)]
|
||||
|
||||
if not non_matching.endswith("."):
|
||||
return False
|
||||
|
||||
return not is_ip_address(hostname)
|
||||
|
||||
@staticmethod
|
||||
def _is_path_match(req_path: str, cookie_path: str) -> bool:
|
||||
"""Implements path matching adhering to RFC 6265."""
|
||||
if not req_path.startswith("/"):
|
||||
req_path = "/"
|
||||
|
||||
if req_path == cookie_path:
|
||||
return True
|
||||
|
||||
if not req_path.startswith(cookie_path):
|
||||
return False
|
||||
|
||||
if cookie_path.endswith("/"):
|
||||
return True
|
||||
|
||||
non_matching = req_path[len(cookie_path) :]
|
||||
|
||||
return non_matching.startswith("/")
|
||||
|
||||
@classmethod
|
||||
def _parse_date(cls, date_str: str) -> Optional[datetime.datetime]:
|
||||
"""Implements date string parsing adhering to RFC 6265."""
|
||||
if not date_str:
|
||||
return None
|
||||
|
||||
found_time = False
|
||||
found_day = False
|
||||
found_month = False
|
||||
found_year = False
|
||||
|
||||
hour = minute = second = 0
|
||||
day = 0
|
||||
month = 0
|
||||
year = 0
|
||||
|
||||
for token_match in cls.DATE_TOKENS_RE.finditer(date_str):
|
||||
|
||||
token = token_match.group("token")
|
||||
|
||||
if not found_time:
|
||||
time_match = cls.DATE_HMS_TIME_RE.match(token)
|
||||
if time_match:
|
||||
found_time = True
|
||||
hour, minute, second = [int(s) for s in time_match.groups()]
|
||||
continue
|
||||
|
||||
if not found_day:
|
||||
day_match = cls.DATE_DAY_OF_MONTH_RE.match(token)
|
||||
if day_match:
|
||||
found_day = True
|
||||
day = int(day_match.group())
|
||||
continue
|
||||
|
||||
if not found_month:
|
||||
month_match = cls.DATE_MONTH_RE.match(token)
|
||||
if month_match:
|
||||
found_month = True
|
||||
assert month_match.lastindex is not None
|
||||
month = month_match.lastindex
|
||||
continue
|
||||
|
||||
if not found_year:
|
||||
year_match = cls.DATE_YEAR_RE.match(token)
|
||||
if year_match:
|
||||
found_year = True
|
||||
year = int(year_match.group())
|
||||
|
||||
if 70 <= year <= 99:
|
||||
year += 1900
|
||||
elif 0 <= year <= 69:
|
||||
year += 2000
|
||||
|
||||
if False in (found_day, found_month, found_year, found_time):
|
||||
return None
|
||||
|
||||
if not 1 <= day <= 31:
|
||||
return None
|
||||
|
||||
if year < 1601 or hour > 23 or minute > 59 or second > 59:
|
||||
return None
|
||||
|
||||
return datetime.datetime(
|
||||
year, month, day, hour, minute, second, tzinfo=datetime.timezone.utc
|
||||
)
|
||||
|
||||
|
||||
class DummyCookieJar(AbstractCookieJar):
|
||||
"""Implements a dummy cookie storage.
|
||||
|
||||
It can be used with the ClientSession when no cookie processing is needed.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
|
||||
super().__init__(loop=loop)
|
||||
|
||||
def __iter__(self) -> "Iterator[Morsel[str]]":
|
||||
while False:
|
||||
yield None
|
||||
|
||||
def __len__(self) -> int:
|
||||
return 0
|
||||
|
||||
def clear(self) -> None:
|
||||
pass
|
||||
|
||||
def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
|
||||
pass
|
||||
|
||||
def filter_cookies(self, request_url: URL) -> "BaseCookie[str]":
|
||||
return SimpleCookie()
|
170
.venv/lib/python3.9/site-packages/aiohttp/formdata.py
Normal file
170
.venv/lib/python3.9/site-packages/aiohttp/formdata.py
Normal file
@@ -0,0 +1,170 @@
|
||||
import io
|
||||
from typing import Any, Iterable, List, Optional
|
||||
from urllib.parse import urlencode
|
||||
|
||||
from multidict import MultiDict, MultiDictProxy
|
||||
|
||||
from . import hdrs, multipart, payload
|
||||
from .helpers import guess_filename
|
||||
from .payload import Payload
|
||||
|
||||
__all__ = ("FormData",)
|
||||
|
||||
|
||||
class FormData:
|
||||
"""Helper class for multipart/form-data and
|
||||
application/x-www-form-urlencoded body generation."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
fields: Iterable[Any] = (),
|
||||
quote_fields: bool = True,
|
||||
charset: Optional[str] = None,
|
||||
) -> None:
|
||||
self._writer = multipart.MultipartWriter("form-data")
|
||||
self._fields = [] # type: List[Any]
|
||||
self._is_multipart = False
|
||||
self._is_processed = False
|
||||
self._quote_fields = quote_fields
|
||||
self._charset = charset
|
||||
|
||||
if isinstance(fields, dict):
|
||||
fields = list(fields.items())
|
||||
elif not isinstance(fields, (list, tuple)):
|
||||
fields = (fields,)
|
||||
self.add_fields(*fields)
|
||||
|
||||
@property
|
||||
def is_multipart(self) -> bool:
|
||||
return self._is_multipart
|
||||
|
||||
def add_field(
|
||||
self,
|
||||
name: str,
|
||||
value: Any,
|
||||
*,
|
||||
content_type: Optional[str] = None,
|
||||
filename: Optional[str] = None,
|
||||
content_transfer_encoding: Optional[str] = None
|
||||
) -> None:
|
||||
|
||||
if isinstance(value, io.IOBase):
|
||||
self._is_multipart = True
|
||||
elif isinstance(value, (bytes, bytearray, memoryview)):
|
||||
if filename is None and content_transfer_encoding is None:
|
||||
filename = name
|
||||
|
||||
type_options = MultiDict({"name": name}) # type: MultiDict[str]
|
||||
if filename is not None and not isinstance(filename, str):
|
||||
raise TypeError(
|
||||
"filename must be an instance of str. " "Got: %s" % filename
|
||||
)
|
||||
if filename is None and isinstance(value, io.IOBase):
|
||||
filename = guess_filename(value, name)
|
||||
if filename is not None:
|
||||
type_options["filename"] = filename
|
||||
self._is_multipart = True
|
||||
|
||||
headers = {}
|
||||
if content_type is not None:
|
||||
if not isinstance(content_type, str):
|
||||
raise TypeError(
|
||||
"content_type must be an instance of str. " "Got: %s" % content_type
|
||||
)
|
||||
headers[hdrs.CONTENT_TYPE] = content_type
|
||||
self._is_multipart = True
|
||||
if content_transfer_encoding is not None:
|
||||
if not isinstance(content_transfer_encoding, str):
|
||||
raise TypeError(
|
||||
"content_transfer_encoding must be an instance"
|
||||
" of str. Got: %s" % content_transfer_encoding
|
||||
)
|
||||
headers[hdrs.CONTENT_TRANSFER_ENCODING] = content_transfer_encoding
|
||||
self._is_multipart = True
|
||||
|
||||
self._fields.append((type_options, headers, value))
|
||||
|
||||
def add_fields(self, *fields: Any) -> None:
|
||||
to_add = list(fields)
|
||||
|
||||
while to_add:
|
||||
rec = to_add.pop(0)
|
||||
|
||||
if isinstance(rec, io.IOBase):
|
||||
k = guess_filename(rec, "unknown")
|
||||
self.add_field(k, rec) # type: ignore
|
||||
|
||||
elif isinstance(rec, (MultiDictProxy, MultiDict)):
|
||||
to_add.extend(rec.items())
|
||||
|
||||
elif isinstance(rec, (list, tuple)) and len(rec) == 2:
|
||||
k, fp = rec
|
||||
self.add_field(k, fp) # type: ignore
|
||||
|
||||
else:
|
||||
raise TypeError(
|
||||
"Only io.IOBase, multidict and (name, file) "
|
||||
"pairs allowed, use .add_field() for passing "
|
||||
"more complex parameters, got {!r}".format(rec)
|
||||
)
|
||||
|
||||
def _gen_form_urlencoded(self) -> payload.BytesPayload:
|
||||
# form data (x-www-form-urlencoded)
|
||||
data = []
|
||||
for type_options, _, value in self._fields:
|
||||
data.append((type_options["name"], value))
|
||||
|
||||
charset = self._charset if self._charset is not None else "utf-8"
|
||||
|
||||
if charset == "utf-8":
|
||||
content_type = "application/x-www-form-urlencoded"
|
||||
else:
|
||||
content_type = "application/x-www-form-urlencoded; " "charset=%s" % charset
|
||||
|
||||
return payload.BytesPayload(
|
||||
urlencode(data, doseq=True, encoding=charset).encode(),
|
||||
content_type=content_type,
|
||||
)
|
||||
|
||||
def _gen_form_data(self) -> multipart.MultipartWriter:
|
||||
"""Encode a list of fields using the multipart/form-data MIME format"""
|
||||
if self._is_processed:
|
||||
raise RuntimeError("Form data has been processed already")
|
||||
for dispparams, headers, value in self._fields:
|
||||
try:
|
||||
if hdrs.CONTENT_TYPE in headers:
|
||||
part = payload.get_payload(
|
||||
value,
|
||||
content_type=headers[hdrs.CONTENT_TYPE],
|
||||
headers=headers,
|
||||
encoding=self._charset,
|
||||
)
|
||||
else:
|
||||
part = payload.get_payload(
|
||||
value, headers=headers, encoding=self._charset
|
||||
)
|
||||
except Exception as exc:
|
||||
raise TypeError(
|
||||
"Can not serialize value type: %r\n "
|
||||
"headers: %r\n value: %r" % (type(value), headers, value)
|
||||
) from exc
|
||||
|
||||
if dispparams:
|
||||
part.set_content_disposition(
|
||||
"form-data", quote_fields=self._quote_fields, **dispparams
|
||||
)
|
||||
# FIXME cgi.FieldStorage doesn't likes body parts with
|
||||
# Content-Length which were sent via chunked transfer encoding
|
||||
assert part.headers is not None
|
||||
part.headers.popall(hdrs.CONTENT_LENGTH, None)
|
||||
|
||||
self._writer.append_payload(part)
|
||||
|
||||
self._is_processed = True
|
||||
return self._writer
|
||||
|
||||
def __call__(self) -> Payload:
|
||||
if self._is_multipart:
|
||||
return self._gen_form_data()
|
||||
else:
|
||||
return self._gen_form_urlencoded()
|
72
.venv/lib/python3.9/site-packages/aiohttp/frozenlist.py
Normal file
72
.venv/lib/python3.9/site-packages/aiohttp/frozenlist.py
Normal file
@@ -0,0 +1,72 @@
|
||||
from collections.abc import MutableSequence
|
||||
from functools import total_ordering
|
||||
|
||||
from .helpers import NO_EXTENSIONS
|
||||
|
||||
|
||||
@total_ordering
|
||||
class FrozenList(MutableSequence):
|
||||
|
||||
__slots__ = ("_frozen", "_items")
|
||||
|
||||
def __init__(self, items=None):
|
||||
self._frozen = False
|
||||
if items is not None:
|
||||
items = list(items)
|
||||
else:
|
||||
items = []
|
||||
self._items = items
|
||||
|
||||
@property
|
||||
def frozen(self):
|
||||
return self._frozen
|
||||
|
||||
def freeze(self):
|
||||
self._frozen = True
|
||||
|
||||
def __getitem__(self, index):
|
||||
return self._items[index]
|
||||
|
||||
def __setitem__(self, index, value):
|
||||
if self._frozen:
|
||||
raise RuntimeError("Cannot modify frozen list.")
|
||||
self._items[index] = value
|
||||
|
||||
def __delitem__(self, index):
|
||||
if self._frozen:
|
||||
raise RuntimeError("Cannot modify frozen list.")
|
||||
del self._items[index]
|
||||
|
||||
def __len__(self):
|
||||
return self._items.__len__()
|
||||
|
||||
def __iter__(self):
|
||||
return self._items.__iter__()
|
||||
|
||||
def __reversed__(self):
|
||||
return self._items.__reversed__()
|
||||
|
||||
def __eq__(self, other):
|
||||
return list(self) == other
|
||||
|
||||
def __le__(self, other):
|
||||
return list(self) <= other
|
||||
|
||||
def insert(self, pos, item):
|
||||
if self._frozen:
|
||||
raise RuntimeError("Cannot modify frozen list.")
|
||||
self._items.insert(pos, item)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<FrozenList(frozen={self._frozen}, {self._items!r})>"
|
||||
|
||||
|
||||
PyFrozenList = FrozenList
|
||||
|
||||
try:
|
||||
from aiohttp._frozenlist import FrozenList as CFrozenList # type: ignore
|
||||
|
||||
if not NO_EXTENSIONS:
|
||||
FrozenList = CFrozenList # type: ignore
|
||||
except ImportError: # pragma: no cover
|
||||
pass
|
46
.venv/lib/python3.9/site-packages/aiohttp/frozenlist.pyi
Normal file
46
.venv/lib/python3.9/site-packages/aiohttp/frozenlist.pyi
Normal file
@@ -0,0 +1,46 @@
|
||||
from typing import (
|
||||
Generic,
|
||||
Iterable,
|
||||
Iterator,
|
||||
List,
|
||||
MutableSequence,
|
||||
Optional,
|
||||
TypeVar,
|
||||
Union,
|
||||
overload,
|
||||
)
|
||||
|
||||
_T = TypeVar("_T")
|
||||
_Arg = Union[List[_T], Iterable[_T]]
|
||||
|
||||
class FrozenList(MutableSequence[_T], Generic[_T]):
|
||||
def __init__(self, items: Optional[_Arg[_T]] = ...) -> None: ...
|
||||
@property
|
||||
def frozen(self) -> bool: ...
|
||||
def freeze(self) -> None: ...
|
||||
@overload
|
||||
def __getitem__(self, i: int) -> _T: ...
|
||||
@overload
|
||||
def __getitem__(self, s: slice) -> FrozenList[_T]: ...
|
||||
@overload
|
||||
def __setitem__(self, i: int, o: _T) -> None: ...
|
||||
@overload
|
||||
def __setitem__(self, s: slice, o: Iterable[_T]) -> None: ...
|
||||
@overload
|
||||
def __delitem__(self, i: int) -> None: ...
|
||||
@overload
|
||||
def __delitem__(self, i: slice) -> None: ...
|
||||
def __len__(self) -> int: ...
|
||||
def __iter__(self) -> Iterator[_T]: ...
|
||||
def __reversed__(self) -> Iterator[_T]: ...
|
||||
def __eq__(self, other: object) -> bool: ...
|
||||
def __le__(self, other: FrozenList[_T]) -> bool: ...
|
||||
def __ne__(self, other: object) -> bool: ...
|
||||
def __lt__(self, other: FrozenList[_T]) -> bool: ...
|
||||
def __ge__(self, other: FrozenList[_T]) -> bool: ...
|
||||
def __gt__(self, other: FrozenList[_T]) -> bool: ...
|
||||
def insert(self, pos: int, item: _T) -> None: ...
|
||||
def __repr__(self) -> str: ...
|
||||
|
||||
# types for C accelerators are the same
|
||||
CFrozenList = PyFrozenList = FrozenList
|
108
.venv/lib/python3.9/site-packages/aiohttp/hdrs.py
Normal file
108
.venv/lib/python3.9/site-packages/aiohttp/hdrs.py
Normal file
@@ -0,0 +1,108 @@
|
||||
"""HTTP Headers constants."""
|
||||
|
||||
# After changing the file content call ./tools/gen.py
|
||||
# to regenerate the headers parser
|
||||
|
||||
from multidict import istr
|
||||
|
||||
METH_ANY = "*"
|
||||
METH_CONNECT = "CONNECT"
|
||||
METH_HEAD = "HEAD"
|
||||
METH_GET = "GET"
|
||||
METH_DELETE = "DELETE"
|
||||
METH_OPTIONS = "OPTIONS"
|
||||
METH_PATCH = "PATCH"
|
||||
METH_POST = "POST"
|
||||
METH_PUT = "PUT"
|
||||
METH_TRACE = "TRACE"
|
||||
|
||||
METH_ALL = {
|
||||
METH_CONNECT,
|
||||
METH_HEAD,
|
||||
METH_GET,
|
||||
METH_DELETE,
|
||||
METH_OPTIONS,
|
||||
METH_PATCH,
|
||||
METH_POST,
|
||||
METH_PUT,
|
||||
METH_TRACE,
|
||||
}
|
||||
|
||||
|
||||
ACCEPT = istr("Accept")
|
||||
ACCEPT_CHARSET = istr("Accept-Charset")
|
||||
ACCEPT_ENCODING = istr("Accept-Encoding")
|
||||
ACCEPT_LANGUAGE = istr("Accept-Language")
|
||||
ACCEPT_RANGES = istr("Accept-Ranges")
|
||||
ACCESS_CONTROL_MAX_AGE = istr("Access-Control-Max-Age")
|
||||
ACCESS_CONTROL_ALLOW_CREDENTIALS = istr("Access-Control-Allow-Credentials")
|
||||
ACCESS_CONTROL_ALLOW_HEADERS = istr("Access-Control-Allow-Headers")
|
||||
ACCESS_CONTROL_ALLOW_METHODS = istr("Access-Control-Allow-Methods")
|
||||
ACCESS_CONTROL_ALLOW_ORIGIN = istr("Access-Control-Allow-Origin")
|
||||
ACCESS_CONTROL_EXPOSE_HEADERS = istr("Access-Control-Expose-Headers")
|
||||
ACCESS_CONTROL_REQUEST_HEADERS = istr("Access-Control-Request-Headers")
|
||||
ACCESS_CONTROL_REQUEST_METHOD = istr("Access-Control-Request-Method")
|
||||
AGE = istr("Age")
|
||||
ALLOW = istr("Allow")
|
||||
AUTHORIZATION = istr("Authorization")
|
||||
CACHE_CONTROL = istr("Cache-Control")
|
||||
CONNECTION = istr("Connection")
|
||||
CONTENT_DISPOSITION = istr("Content-Disposition")
|
||||
CONTENT_ENCODING = istr("Content-Encoding")
|
||||
CONTENT_LANGUAGE = istr("Content-Language")
|
||||
CONTENT_LENGTH = istr("Content-Length")
|
||||
CONTENT_LOCATION = istr("Content-Location")
|
||||
CONTENT_MD5 = istr("Content-MD5")
|
||||
CONTENT_RANGE = istr("Content-Range")
|
||||
CONTENT_TRANSFER_ENCODING = istr("Content-Transfer-Encoding")
|
||||
CONTENT_TYPE = istr("Content-Type")
|
||||
COOKIE = istr("Cookie")
|
||||
DATE = istr("Date")
|
||||
DESTINATION = istr("Destination")
|
||||
DIGEST = istr("Digest")
|
||||
ETAG = istr("Etag")
|
||||
EXPECT = istr("Expect")
|
||||
EXPIRES = istr("Expires")
|
||||
FORWARDED = istr("Forwarded")
|
||||
FROM = istr("From")
|
||||
HOST = istr("Host")
|
||||
IF_MATCH = istr("If-Match")
|
||||
IF_MODIFIED_SINCE = istr("If-Modified-Since")
|
||||
IF_NONE_MATCH = istr("If-None-Match")
|
||||
IF_RANGE = istr("If-Range")
|
||||
IF_UNMODIFIED_SINCE = istr("If-Unmodified-Since")
|
||||
KEEP_ALIVE = istr("Keep-Alive")
|
||||
LAST_EVENT_ID = istr("Last-Event-ID")
|
||||
LAST_MODIFIED = istr("Last-Modified")
|
||||
LINK = istr("Link")
|
||||
LOCATION = istr("Location")
|
||||
MAX_FORWARDS = istr("Max-Forwards")
|
||||
ORIGIN = istr("Origin")
|
||||
PRAGMA = istr("Pragma")
|
||||
PROXY_AUTHENTICATE = istr("Proxy-Authenticate")
|
||||
PROXY_AUTHORIZATION = istr("Proxy-Authorization")
|
||||
RANGE = istr("Range")
|
||||
REFERER = istr("Referer")
|
||||
RETRY_AFTER = istr("Retry-After")
|
||||
SEC_WEBSOCKET_ACCEPT = istr("Sec-WebSocket-Accept")
|
||||
SEC_WEBSOCKET_VERSION = istr("Sec-WebSocket-Version")
|
||||
SEC_WEBSOCKET_PROTOCOL = istr("Sec-WebSocket-Protocol")
|
||||
SEC_WEBSOCKET_EXTENSIONS = istr("Sec-WebSocket-Extensions")
|
||||
SEC_WEBSOCKET_KEY = istr("Sec-WebSocket-Key")
|
||||
SEC_WEBSOCKET_KEY1 = istr("Sec-WebSocket-Key1")
|
||||
SERVER = istr("Server")
|
||||
SET_COOKIE = istr("Set-Cookie")
|
||||
TE = istr("TE")
|
||||
TRAILER = istr("Trailer")
|
||||
TRANSFER_ENCODING = istr("Transfer-Encoding")
|
||||
UPGRADE = istr("Upgrade")
|
||||
URI = istr("URI")
|
||||
USER_AGENT = istr("User-Agent")
|
||||
VARY = istr("Vary")
|
||||
VIA = istr("Via")
|
||||
WANT_DIGEST = istr("Want-Digest")
|
||||
WARNING = istr("Warning")
|
||||
WWW_AUTHENTICATE = istr("WWW-Authenticate")
|
||||
X_FORWARDED_FOR = istr("X-Forwarded-For")
|
||||
X_FORWARDED_HOST = istr("X-Forwarded-Host")
|
||||
X_FORWARDED_PROTO = istr("X-Forwarded-Proto")
|
780
.venv/lib/python3.9/site-packages/aiohttp/helpers.py
Normal file
780
.venv/lib/python3.9/site-packages/aiohttp/helpers.py
Normal file
@@ -0,0 +1,780 @@
|
||||
"""Various helper functions"""
|
||||
|
||||
import asyncio
|
||||
import base64
|
||||
import binascii
|
||||
import cgi
|
||||
import datetime
|
||||
import functools
|
||||
import inspect
|
||||
import netrc
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
import warnings
|
||||
import weakref
|
||||
from collections import namedtuple
|
||||
from contextlib import suppress
|
||||
from math import ceil
|
||||
from pathlib import Path
|
||||
from types import TracebackType
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Dict,
|
||||
Generator,
|
||||
Generic,
|
||||
Iterable,
|
||||
Iterator,
|
||||
List,
|
||||
Mapping,
|
||||
Optional,
|
||||
Pattern,
|
||||
Set,
|
||||
Tuple,
|
||||
Type,
|
||||
TypeVar,
|
||||
Union,
|
||||
cast,
|
||||
)
|
||||
from urllib.parse import quote
|
||||
from urllib.request import getproxies
|
||||
|
||||
import async_timeout
|
||||
import attr
|
||||
from multidict import MultiDict, MultiDictProxy
|
||||
from typing_extensions import Protocol
|
||||
from yarl import URL
|
||||
|
||||
from . import hdrs
|
||||
from .log import client_logger, internal_logger
|
||||
from .typedefs import PathLike # noqa
|
||||
|
||||
__all__ = ("BasicAuth", "ChainMapProxy")
|
||||
|
||||
PY_36 = sys.version_info >= (3, 6)
|
||||
PY_37 = sys.version_info >= (3, 7)
|
||||
PY_38 = sys.version_info >= (3, 8)
|
||||
|
||||
if not PY_37:
|
||||
import idna_ssl
|
||||
|
||||
idna_ssl.patch_match_hostname()
|
||||
|
||||
try:
|
||||
from typing import ContextManager
|
||||
except ImportError:
|
||||
from typing_extensions import ContextManager
|
||||
|
||||
|
||||
def all_tasks(
|
||||
loop: Optional[asyncio.AbstractEventLoop] = None,
|
||||
) -> Set["asyncio.Task[Any]"]:
|
||||
tasks = list(asyncio.Task.all_tasks(loop))
|
||||
return {t for t in tasks if not t.done()}
|
||||
|
||||
|
||||
if PY_37:
|
||||
all_tasks = getattr(asyncio, "all_tasks")
|
||||
|
||||
|
||||
_T = TypeVar("_T")
|
||||
_S = TypeVar("_S")
|
||||
|
||||
|
||||
sentinel = object() # type: Any
|
||||
NO_EXTENSIONS = bool(os.environ.get("AIOHTTP_NO_EXTENSIONS")) # type: bool
|
||||
|
||||
# N.B. sys.flags.dev_mode is available on Python 3.7+, use getattr
|
||||
# for compatibility with older versions
|
||||
DEBUG = getattr(sys.flags, "dev_mode", False) or (
|
||||
not sys.flags.ignore_environment and bool(os.environ.get("PYTHONASYNCIODEBUG"))
|
||||
) # type: bool
|
||||
|
||||
|
||||
CHAR = {chr(i) for i in range(0, 128)}
|
||||
CTL = {chr(i) for i in range(0, 32)} | {
|
||||
chr(127),
|
||||
}
|
||||
SEPARATORS = {
|
||||
"(",
|
||||
")",
|
||||
"<",
|
||||
">",
|
||||
"@",
|
||||
",",
|
||||
";",
|
||||
":",
|
||||
"\\",
|
||||
'"',
|
||||
"/",
|
||||
"[",
|
||||
"]",
|
||||
"?",
|
||||
"=",
|
||||
"{",
|
||||
"}",
|
||||
" ",
|
||||
chr(9),
|
||||
}
|
||||
TOKEN = CHAR ^ CTL ^ SEPARATORS
|
||||
|
||||
|
||||
class noop:
|
||||
def __await__(self) -> Generator[None, None, None]:
|
||||
yield
|
||||
|
||||
|
||||
class BasicAuth(namedtuple("BasicAuth", ["login", "password", "encoding"])):
|
||||
"""Http basic authentication helper."""
|
||||
|
||||
def __new__(
|
||||
cls, login: str, password: str = "", encoding: str = "latin1"
|
||||
) -> "BasicAuth":
|
||||
if login is None:
|
||||
raise ValueError("None is not allowed as login value")
|
||||
|
||||
if password is None:
|
||||
raise ValueError("None is not allowed as password value")
|
||||
|
||||
if ":" in login:
|
||||
raise ValueError('A ":" is not allowed in login (RFC 1945#section-11.1)')
|
||||
|
||||
return super().__new__(cls, login, password, encoding)
|
||||
|
||||
@classmethod
|
||||
def decode(cls, auth_header: str, encoding: str = "latin1") -> "BasicAuth":
|
||||
"""Create a BasicAuth object from an Authorization HTTP header."""
|
||||
try:
|
||||
auth_type, encoded_credentials = auth_header.split(" ", 1)
|
||||
except ValueError:
|
||||
raise ValueError("Could not parse authorization header.")
|
||||
|
||||
if auth_type.lower() != "basic":
|
||||
raise ValueError("Unknown authorization method %s" % auth_type)
|
||||
|
||||
try:
|
||||
decoded = base64.b64decode(
|
||||
encoded_credentials.encode("ascii"), validate=True
|
||||
).decode(encoding)
|
||||
except binascii.Error:
|
||||
raise ValueError("Invalid base64 encoding.")
|
||||
|
||||
try:
|
||||
# RFC 2617 HTTP Authentication
|
||||
# https://www.ietf.org/rfc/rfc2617.txt
|
||||
# the colon must be present, but the username and password may be
|
||||
# otherwise blank.
|
||||
username, password = decoded.split(":", 1)
|
||||
except ValueError:
|
||||
raise ValueError("Invalid credentials.")
|
||||
|
||||
return cls(username, password, encoding=encoding)
|
||||
|
||||
@classmethod
|
||||
def from_url(cls, url: URL, *, encoding: str = "latin1") -> Optional["BasicAuth"]:
|
||||
"""Create BasicAuth from url."""
|
||||
if not isinstance(url, URL):
|
||||
raise TypeError("url should be yarl.URL instance")
|
||||
if url.user is None:
|
||||
return None
|
||||
return cls(url.user, url.password or "", encoding=encoding)
|
||||
|
||||
def encode(self) -> str:
|
||||
"""Encode credentials."""
|
||||
creds = (f"{self.login}:{self.password}").encode(self.encoding)
|
||||
return "Basic %s" % base64.b64encode(creds).decode(self.encoding)
|
||||
|
||||
|
||||
def strip_auth_from_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]:
|
||||
auth = BasicAuth.from_url(url)
|
||||
if auth is None:
|
||||
return url, None
|
||||
else:
|
||||
return url.with_user(None), auth
|
||||
|
||||
|
||||
def netrc_from_env() -> Optional[netrc.netrc]:
|
||||
"""Attempt to load the netrc file from the path specified by the env-var
|
||||
NETRC or in the default location in the user's home directory.
|
||||
|
||||
Returns None if it couldn't be found or fails to parse.
|
||||
"""
|
||||
netrc_env = os.environ.get("NETRC")
|
||||
|
||||
if netrc_env is not None:
|
||||
netrc_path = Path(netrc_env)
|
||||
else:
|
||||
try:
|
||||
home_dir = Path.home()
|
||||
except RuntimeError as e: # pragma: no cover
|
||||
# if pathlib can't resolve home, it may raise a RuntimeError
|
||||
client_logger.debug(
|
||||
"Could not resolve home directory when "
|
||||
"trying to look for .netrc file: %s",
|
||||
e,
|
||||
)
|
||||
return None
|
||||
|
||||
netrc_path = home_dir / (
|
||||
"_netrc" if platform.system() == "Windows" else ".netrc"
|
||||
)
|
||||
|
||||
try:
|
||||
return netrc.netrc(str(netrc_path))
|
||||
except netrc.NetrcParseError as e:
|
||||
client_logger.warning("Could not parse .netrc file: %s", e)
|
||||
except OSError as e:
|
||||
# we couldn't read the file (doesn't exist, permissions, etc.)
|
||||
if netrc_env or netrc_path.is_file():
|
||||
# only warn if the environment wanted us to load it,
|
||||
# or it appears like the default file does actually exist
|
||||
client_logger.warning("Could not read .netrc file: %s", e)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
||||
class ProxyInfo:
|
||||
proxy: URL
|
||||
proxy_auth: Optional[BasicAuth]
|
||||
|
||||
|
||||
def proxies_from_env() -> Dict[str, ProxyInfo]:
|
||||
proxy_urls = {k: URL(v) for k, v in getproxies().items() if k in ("http", "https")}
|
||||
netrc_obj = netrc_from_env()
|
||||
stripped = {k: strip_auth_from_url(v) for k, v in proxy_urls.items()}
|
||||
ret = {}
|
||||
for proto, val in stripped.items():
|
||||
proxy, auth = val
|
||||
if proxy.scheme == "https":
|
||||
client_logger.warning("HTTPS proxies %s are not supported, ignoring", proxy)
|
||||
continue
|
||||
if netrc_obj and auth is None:
|
||||
auth_from_netrc = None
|
||||
if proxy.host is not None:
|
||||
auth_from_netrc = netrc_obj.authenticators(proxy.host)
|
||||
if auth_from_netrc is not None:
|
||||
# auth_from_netrc is a (`user`, `account`, `password`) tuple,
|
||||
# `user` and `account` both can be username,
|
||||
# if `user` is None, use `account`
|
||||
*logins, password = auth_from_netrc
|
||||
login = logins[0] if logins[0] else logins[-1]
|
||||
auth = BasicAuth(cast(str, login), cast(str, password))
|
||||
ret[proto] = ProxyInfo(proxy, auth)
|
||||
return ret
|
||||
|
||||
|
||||
def current_task(
|
||||
loop: Optional[asyncio.AbstractEventLoop] = None,
|
||||
) -> "Optional[asyncio.Task[Any]]":
|
||||
if PY_37:
|
||||
return asyncio.current_task(loop=loop)
|
||||
else:
|
||||
return asyncio.Task.current_task(loop=loop)
|
||||
|
||||
|
||||
def get_running_loop(
|
||||
loop: Optional[asyncio.AbstractEventLoop] = None,
|
||||
) -> asyncio.AbstractEventLoop:
|
||||
if loop is None:
|
||||
loop = asyncio.get_event_loop()
|
||||
if not loop.is_running():
|
||||
warnings.warn(
|
||||
"The object should be created within an async function",
|
||||
DeprecationWarning,
|
||||
stacklevel=3,
|
||||
)
|
||||
if loop.get_debug():
|
||||
internal_logger.warning(
|
||||
"The object should be created within an async function", stack_info=True
|
||||
)
|
||||
return loop
|
||||
|
||||
|
||||
def isasyncgenfunction(obj: Any) -> bool:
|
||||
func = getattr(inspect, "isasyncgenfunction", None)
|
||||
if func is not None:
|
||||
return func(obj)
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
||||
class MimeType:
|
||||
type: str
|
||||
subtype: str
|
||||
suffix: str
|
||||
parameters: "MultiDictProxy[str]"
|
||||
|
||||
|
||||
@functools.lru_cache(maxsize=56)
|
||||
def parse_mimetype(mimetype: str) -> MimeType:
|
||||
"""Parses a MIME type into its components.
|
||||
|
||||
mimetype is a MIME type string.
|
||||
|
||||
Returns a MimeType object.
|
||||
|
||||
Example:
|
||||
|
||||
>>> parse_mimetype('text/html; charset=utf-8')
|
||||
MimeType(type='text', subtype='html', suffix='',
|
||||
parameters={'charset': 'utf-8'})
|
||||
|
||||
"""
|
||||
if not mimetype:
|
||||
return MimeType(
|
||||
type="", subtype="", suffix="", parameters=MultiDictProxy(MultiDict())
|
||||
)
|
||||
|
||||
parts = mimetype.split(";")
|
||||
params = MultiDict() # type: MultiDict[str]
|
||||
for item in parts[1:]:
|
||||
if not item:
|
||||
continue
|
||||
key, value = cast(
|
||||
Tuple[str, str], item.split("=", 1) if "=" in item else (item, "")
|
||||
)
|
||||
params.add(key.lower().strip(), value.strip(' "'))
|
||||
|
||||
fulltype = parts[0].strip().lower()
|
||||
if fulltype == "*":
|
||||
fulltype = "*/*"
|
||||
|
||||
mtype, stype = (
|
||||
cast(Tuple[str, str], fulltype.split("/", 1))
|
||||
if "/" in fulltype
|
||||
else (fulltype, "")
|
||||
)
|
||||
stype, suffix = (
|
||||
cast(Tuple[str, str], stype.split("+", 1)) if "+" in stype else (stype, "")
|
||||
)
|
||||
|
||||
return MimeType(
|
||||
type=mtype, subtype=stype, suffix=suffix, parameters=MultiDictProxy(params)
|
||||
)
|
||||
|
||||
|
||||
def guess_filename(obj: Any, default: Optional[str] = None) -> Optional[str]:
|
||||
name = getattr(obj, "name", None)
|
||||
if name and isinstance(name, str) and name[0] != "<" and name[-1] != ">":
|
||||
return Path(name).name
|
||||
return default
|
||||
|
||||
|
||||
def content_disposition_header(
|
||||
disptype: str, quote_fields: bool = True, **params: str
|
||||
) -> str:
|
||||
"""Sets ``Content-Disposition`` header.
|
||||
|
||||
disptype is a disposition type: inline, attachment, form-data.
|
||||
Should be valid extension token (see RFC 2183)
|
||||
|
||||
params is a dict with disposition params.
|
||||
"""
|
||||
if not disptype or not (TOKEN > set(disptype)):
|
||||
raise ValueError("bad content disposition type {!r}" "".format(disptype))
|
||||
|
||||
value = disptype
|
||||
if params:
|
||||
lparams = []
|
||||
for key, val in params.items():
|
||||
if not key or not (TOKEN > set(key)):
|
||||
raise ValueError(
|
||||
"bad content disposition parameter" " {!r}={!r}".format(key, val)
|
||||
)
|
||||
qval = quote(val, "") if quote_fields else val
|
||||
lparams.append((key, '"%s"' % qval))
|
||||
if key == "filename":
|
||||
lparams.append(("filename*", "utf-8''" + qval))
|
||||
sparams = "; ".join("=".join(pair) for pair in lparams)
|
||||
value = "; ".join((value, sparams))
|
||||
return value
|
||||
|
||||
|
||||
class _TSelf(Protocol):
|
||||
_cache: Dict[str, Any]
|
||||
|
||||
|
||||
class reify(Generic[_T]):
|
||||
"""Use as a class method decorator. It operates almost exactly like
|
||||
the Python `@property` decorator, but it puts the result of the
|
||||
method it decorates into the instance dict after the first call,
|
||||
effectively replacing the function it decorates with an instance
|
||||
variable. It is, in Python parlance, a data descriptor.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, wrapped: Callable[..., _T]) -> None:
|
||||
self.wrapped = wrapped
|
||||
self.__doc__ = wrapped.__doc__
|
||||
self.name = wrapped.__name__
|
||||
|
||||
def __get__(self, inst: _TSelf, owner: Optional[Type[Any]] = None) -> _T:
|
||||
try:
|
||||
try:
|
||||
return inst._cache[self.name]
|
||||
except KeyError:
|
||||
val = self.wrapped(inst)
|
||||
inst._cache[self.name] = val
|
||||
return val
|
||||
except AttributeError:
|
||||
if inst is None:
|
||||
return self
|
||||
raise
|
||||
|
||||
def __set__(self, inst: _TSelf, value: _T) -> None:
|
||||
raise AttributeError("reified property is read-only")
|
||||
|
||||
|
||||
reify_py = reify
|
||||
|
||||
try:
|
||||
from ._helpers import reify as reify_c
|
||||
|
||||
if not NO_EXTENSIONS:
|
||||
reify = reify_c # type: ignore
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
_ipv4_pattern = (
|
||||
r"^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}"
|
||||
r"(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$"
|
||||
)
|
||||
_ipv6_pattern = (
|
||||
r"^(?:(?:(?:[A-F0-9]{1,4}:){6}|(?=(?:[A-F0-9]{0,4}:){0,6}"
|
||||
r"(?:[0-9]{1,3}\.){3}[0-9]{1,3}$)(([0-9A-F]{1,4}:){0,5}|:)"
|
||||
r"((:[0-9A-F]{1,4}){1,5}:|:)|::(?:[A-F0-9]{1,4}:){5})"
|
||||
r"(?:(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])\.){3}"
|
||||
r"(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])|(?:[A-F0-9]{1,4}:){7}"
|
||||
r"[A-F0-9]{1,4}|(?=(?:[A-F0-9]{0,4}:){0,7}[A-F0-9]{0,4}$)"
|
||||
r"(([0-9A-F]{1,4}:){1,7}|:)((:[0-9A-F]{1,4}){1,7}|:)|(?:[A-F0-9]{1,4}:){7}"
|
||||
r":|:(:[A-F0-9]{1,4}){7})$"
|
||||
)
|
||||
_ipv4_regex = re.compile(_ipv4_pattern)
|
||||
_ipv6_regex = re.compile(_ipv6_pattern, flags=re.IGNORECASE)
|
||||
_ipv4_regexb = re.compile(_ipv4_pattern.encode("ascii"))
|
||||
_ipv6_regexb = re.compile(_ipv6_pattern.encode("ascii"), flags=re.IGNORECASE)
|
||||
|
||||
|
||||
def _is_ip_address(
|
||||
regex: Pattern[str], regexb: Pattern[bytes], host: Optional[Union[str, bytes]]
|
||||
) -> bool:
|
||||
if host is None:
|
||||
return False
|
||||
if isinstance(host, str):
|
||||
return bool(regex.match(host))
|
||||
elif isinstance(host, (bytes, bytearray, memoryview)):
|
||||
return bool(regexb.match(host))
|
||||
else:
|
||||
raise TypeError("{} [{}] is not a str or bytes".format(host, type(host)))
|
||||
|
||||
|
||||
is_ipv4_address = functools.partial(_is_ip_address, _ipv4_regex, _ipv4_regexb)
|
||||
is_ipv6_address = functools.partial(_is_ip_address, _ipv6_regex, _ipv6_regexb)
|
||||
|
||||
|
||||
def is_ip_address(host: Optional[Union[str, bytes, bytearray, memoryview]]) -> bool:
|
||||
return is_ipv4_address(host) or is_ipv6_address(host)
|
||||
|
||||
|
||||
def next_whole_second() -> datetime.datetime:
|
||||
"""Return current time rounded up to the next whole second."""
|
||||
return datetime.datetime.now(datetime.timezone.utc).replace(
|
||||
microsecond=0
|
||||
) + datetime.timedelta(seconds=0)
|
||||
|
||||
|
||||
_cached_current_datetime = None # type: Optional[int]
|
||||
_cached_formatted_datetime = ""
|
||||
|
||||
|
||||
def rfc822_formatted_time() -> str:
|
||||
global _cached_current_datetime
|
||||
global _cached_formatted_datetime
|
||||
|
||||
now = int(time.time())
|
||||
if now != _cached_current_datetime:
|
||||
# Weekday and month names for HTTP date/time formatting;
|
||||
# always English!
|
||||
# Tuples are constants stored in codeobject!
|
||||
_weekdayname = ("Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun")
|
||||
_monthname = (
|
||||
"", # Dummy so we can use 1-based month numbers
|
||||
"Jan",
|
||||
"Feb",
|
||||
"Mar",
|
||||
"Apr",
|
||||
"May",
|
||||
"Jun",
|
||||
"Jul",
|
||||
"Aug",
|
||||
"Sep",
|
||||
"Oct",
|
||||
"Nov",
|
||||
"Dec",
|
||||
)
|
||||
|
||||
year, month, day, hh, mm, ss, wd, *tail = time.gmtime(now)
|
||||
_cached_formatted_datetime = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % (
|
||||
_weekdayname[wd],
|
||||
day,
|
||||
_monthname[month],
|
||||
year,
|
||||
hh,
|
||||
mm,
|
||||
ss,
|
||||
)
|
||||
_cached_current_datetime = now
|
||||
return _cached_formatted_datetime
|
||||
|
||||
|
||||
def _weakref_handle(info): # type: ignore
|
||||
ref, name = info
|
||||
ob = ref()
|
||||
if ob is not None:
|
||||
with suppress(Exception):
|
||||
getattr(ob, name)()
|
||||
|
||||
|
||||
def weakref_handle(ob, name, timeout, loop): # type: ignore
|
||||
if timeout is not None and timeout > 0:
|
||||
when = loop.time() + timeout
|
||||
if timeout >= 5:
|
||||
when = ceil(when)
|
||||
|
||||
return loop.call_at(when, _weakref_handle, (weakref.ref(ob), name))
|
||||
|
||||
|
||||
def call_later(cb, timeout, loop): # type: ignore
|
||||
if timeout is not None and timeout > 0:
|
||||
when = loop.time() + timeout
|
||||
if timeout > 5:
|
||||
when = ceil(when)
|
||||
return loop.call_at(when, cb)
|
||||
|
||||
|
||||
class TimeoutHandle:
|
||||
""" Timeout handle """
|
||||
|
||||
def __init__(
|
||||
self, loop: asyncio.AbstractEventLoop, timeout: Optional[float]
|
||||
) -> None:
|
||||
self._timeout = timeout
|
||||
self._loop = loop
|
||||
self._callbacks = (
|
||||
[]
|
||||
) # type: List[Tuple[Callable[..., None], Tuple[Any, ...], Dict[str, Any]]]
|
||||
|
||||
def register(
|
||||
self, callback: Callable[..., None], *args: Any, **kwargs: Any
|
||||
) -> None:
|
||||
self._callbacks.append((callback, args, kwargs))
|
||||
|
||||
def close(self) -> None:
|
||||
self._callbacks.clear()
|
||||
|
||||
def start(self) -> Optional[asyncio.Handle]:
|
||||
timeout = self._timeout
|
||||
if timeout is not None and timeout > 0:
|
||||
when = self._loop.time() + timeout
|
||||
if timeout >= 5:
|
||||
when = ceil(when)
|
||||
return self._loop.call_at(when, self.__call__)
|
||||
else:
|
||||
return None
|
||||
|
||||
def timer(self) -> "BaseTimerContext":
|
||||
if self._timeout is not None and self._timeout > 0:
|
||||
timer = TimerContext(self._loop)
|
||||
self.register(timer.timeout)
|
||||
return timer
|
||||
else:
|
||||
return TimerNoop()
|
||||
|
||||
def __call__(self) -> None:
|
||||
for cb, args, kwargs in self._callbacks:
|
||||
with suppress(Exception):
|
||||
cb(*args, **kwargs)
|
||||
|
||||
self._callbacks.clear()
|
||||
|
||||
|
||||
class BaseTimerContext(ContextManager["BaseTimerContext"]):
|
||||
pass
|
||||
|
||||
|
||||
class TimerNoop(BaseTimerContext):
|
||||
def __enter__(self) -> BaseTimerContext:
|
||||
return self
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: Optional[Type[BaseException]],
|
||||
exc_val: Optional[BaseException],
|
||||
exc_tb: Optional[TracebackType],
|
||||
) -> None:
|
||||
return
|
||||
|
||||
|
||||
class TimerContext(BaseTimerContext):
|
||||
""" Low resolution timeout context manager """
|
||||
|
||||
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
|
||||
self._loop = loop
|
||||
self._tasks = [] # type: List[asyncio.Task[Any]]
|
||||
self._cancelled = False
|
||||
|
||||
def __enter__(self) -> BaseTimerContext:
|
||||
task = current_task(loop=self._loop)
|
||||
|
||||
if task is None:
|
||||
raise RuntimeError(
|
||||
"Timeout context manager should be used " "inside a task"
|
||||
)
|
||||
|
||||
if self._cancelled:
|
||||
task.cancel()
|
||||
raise asyncio.TimeoutError from None
|
||||
|
||||
self._tasks.append(task)
|
||||
return self
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: Optional[Type[BaseException]],
|
||||
exc_val: Optional[BaseException],
|
||||
exc_tb: Optional[TracebackType],
|
||||
) -> Optional[bool]:
|
||||
if self._tasks:
|
||||
self._tasks.pop()
|
||||
|
||||
if exc_type is asyncio.CancelledError and self._cancelled:
|
||||
raise asyncio.TimeoutError from None
|
||||
return None
|
||||
|
||||
def timeout(self) -> None:
|
||||
if not self._cancelled:
|
||||
for task in set(self._tasks):
|
||||
task.cancel()
|
||||
|
||||
self._cancelled = True
|
||||
|
||||
|
||||
class CeilTimeout(async_timeout.timeout):
|
||||
def __enter__(self) -> async_timeout.timeout:
|
||||
if self._timeout is not None:
|
||||
self._task = current_task(loop=self._loop)
|
||||
if self._task is None:
|
||||
raise RuntimeError(
|
||||
"Timeout context manager should be used inside a task"
|
||||
)
|
||||
now = self._loop.time()
|
||||
delay = self._timeout
|
||||
when = now + delay
|
||||
if delay > 5:
|
||||
when = ceil(when)
|
||||
self._cancel_handler = self._loop.call_at(when, self._cancel_task)
|
||||
return self
|
||||
|
||||
|
||||
class HeadersMixin:
|
||||
|
||||
ATTRS = frozenset(["_content_type", "_content_dict", "_stored_content_type"])
|
||||
|
||||
_content_type = None # type: Optional[str]
|
||||
_content_dict = None # type: Optional[Dict[str, str]]
|
||||
_stored_content_type = sentinel
|
||||
|
||||
def _parse_content_type(self, raw: str) -> None:
|
||||
self._stored_content_type = raw
|
||||
if raw is None:
|
||||
# default value according to RFC 2616
|
||||
self._content_type = "application/octet-stream"
|
||||
self._content_dict = {}
|
||||
else:
|
||||
self._content_type, self._content_dict = cgi.parse_header(raw)
|
||||
|
||||
@property
|
||||
def content_type(self) -> str:
|
||||
"""The value of content part for Content-Type HTTP header."""
|
||||
raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore
|
||||
if self._stored_content_type != raw:
|
||||
self._parse_content_type(raw)
|
||||
return self._content_type # type: ignore
|
||||
|
||||
@property
|
||||
def charset(self) -> Optional[str]:
|
||||
"""The value of charset part for Content-Type HTTP header."""
|
||||
raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore
|
||||
if self._stored_content_type != raw:
|
||||
self._parse_content_type(raw)
|
||||
return self._content_dict.get("charset") # type: ignore
|
||||
|
||||
@property
|
||||
def content_length(self) -> Optional[int]:
|
||||
"""The value of Content-Length HTTP header."""
|
||||
content_length = self._headers.get(hdrs.CONTENT_LENGTH) # type: ignore
|
||||
|
||||
if content_length is not None:
|
||||
return int(content_length)
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def set_result(fut: "asyncio.Future[_T]", result: _T) -> None:
|
||||
if not fut.done():
|
||||
fut.set_result(result)
|
||||
|
||||
|
||||
def set_exception(fut: "asyncio.Future[_T]", exc: BaseException) -> None:
|
||||
if not fut.done():
|
||||
fut.set_exception(exc)
|
||||
|
||||
|
||||
class ChainMapProxy(Mapping[str, Any]):
|
||||
__slots__ = ("_maps",)
|
||||
|
||||
def __init__(self, maps: Iterable[Mapping[str, Any]]) -> None:
|
||||
self._maps = tuple(maps)
|
||||
|
||||
def __init_subclass__(cls) -> None:
|
||||
raise TypeError(
|
||||
"Inheritance class {} from ChainMapProxy "
|
||||
"is forbidden".format(cls.__name__)
|
||||
)
|
||||
|
||||
def __getitem__(self, key: str) -> Any:
|
||||
for mapping in self._maps:
|
||||
try:
|
||||
return mapping[key]
|
||||
except KeyError:
|
||||
pass
|
||||
raise KeyError(key)
|
||||
|
||||
def get(self, key: str, default: Any = None) -> Any:
|
||||
return self[key] if key in self else default
|
||||
|
||||
def __len__(self) -> int:
|
||||
# reuses stored hash values if possible
|
||||
return len(set().union(*self._maps)) # type: ignore
|
||||
|
||||
def __iter__(self) -> Iterator[str]:
|
||||
d = {} # type: Dict[str, Any]
|
||||
for mapping in reversed(self._maps):
|
||||
# reuses stored hash values if possible
|
||||
d.update(mapping)
|
||||
return iter(d)
|
||||
|
||||
def __contains__(self, key: object) -> bool:
|
||||
return any(key in m for m in self._maps)
|
||||
|
||||
def __bool__(self) -> bool:
|
||||
return any(self._maps)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
content = ", ".join(map(repr, self._maps))
|
||||
return f"ChainMapProxy({content})"
|
72
.venv/lib/python3.9/site-packages/aiohttp/http.py
Normal file
72
.venv/lib/python3.9/site-packages/aiohttp/http.py
Normal file
@@ -0,0 +1,72 @@
|
||||
import http.server
|
||||
import sys
|
||||
from typing import Mapping, Tuple
|
||||
|
||||
from . import __version__
|
||||
from .http_exceptions import HttpProcessingError as HttpProcessingError
|
||||
from .http_parser import (
|
||||
HeadersParser as HeadersParser,
|
||||
HttpParser as HttpParser,
|
||||
HttpRequestParser as HttpRequestParser,
|
||||
HttpResponseParser as HttpResponseParser,
|
||||
RawRequestMessage as RawRequestMessage,
|
||||
RawResponseMessage as RawResponseMessage,
|
||||
)
|
||||
from .http_websocket import (
|
||||
WS_CLOSED_MESSAGE as WS_CLOSED_MESSAGE,
|
||||
WS_CLOSING_MESSAGE as WS_CLOSING_MESSAGE,
|
||||
WS_KEY as WS_KEY,
|
||||
WebSocketError as WebSocketError,
|
||||
WebSocketReader as WebSocketReader,
|
||||
WebSocketWriter as WebSocketWriter,
|
||||
WSCloseCode as WSCloseCode,
|
||||
WSMessage as WSMessage,
|
||||
WSMsgType as WSMsgType,
|
||||
ws_ext_gen as ws_ext_gen,
|
||||
ws_ext_parse as ws_ext_parse,
|
||||
)
|
||||
from .http_writer import (
|
||||
HttpVersion as HttpVersion,
|
||||
HttpVersion10 as HttpVersion10,
|
||||
HttpVersion11 as HttpVersion11,
|
||||
StreamWriter as StreamWriter,
|
||||
)
|
||||
|
||||
__all__ = (
|
||||
"HttpProcessingError",
|
||||
"RESPONSES",
|
||||
"SERVER_SOFTWARE",
|
||||
# .http_writer
|
||||
"StreamWriter",
|
||||
"HttpVersion",
|
||||
"HttpVersion10",
|
||||
"HttpVersion11",
|
||||
# .http_parser
|
||||
"HeadersParser",
|
||||
"HttpParser",
|
||||
"HttpRequestParser",
|
||||
"HttpResponseParser",
|
||||
"RawRequestMessage",
|
||||
"RawResponseMessage",
|
||||
# .http_websocket
|
||||
"WS_CLOSED_MESSAGE",
|
||||
"WS_CLOSING_MESSAGE",
|
||||
"WS_KEY",
|
||||
"WebSocketReader",
|
||||
"WebSocketWriter",
|
||||
"ws_ext_gen",
|
||||
"ws_ext_parse",
|
||||
"WSMessage",
|
||||
"WebSocketError",
|
||||
"WSMsgType",
|
||||
"WSCloseCode",
|
||||
)
|
||||
|
||||
|
||||
SERVER_SOFTWARE = "Python/{0[0]}.{0[1]} aiohttp/{1}".format(
|
||||
sys.version_info, __version__
|
||||
) # type: str
|
||||
|
||||
RESPONSES = (
|
||||
http.server.BaseHTTPRequestHandler.responses
|
||||
) # type: Mapping[int, Tuple[str, str]]
|
105
.venv/lib/python3.9/site-packages/aiohttp/http_exceptions.py
Normal file
105
.venv/lib/python3.9/site-packages/aiohttp/http_exceptions.py
Normal file
@@ -0,0 +1,105 @@
|
||||
"""Low-level http related exceptions."""
|
||||
|
||||
|
||||
from typing import Optional, Union
|
||||
|
||||
from .typedefs import _CIMultiDict
|
||||
|
||||
__all__ = ("HttpProcessingError",)
|
||||
|
||||
|
||||
class HttpProcessingError(Exception):
|
||||
"""HTTP error.
|
||||
|
||||
Shortcut for raising HTTP errors with custom code, message and headers.
|
||||
|
||||
code: HTTP Error code.
|
||||
message: (optional) Error message.
|
||||
headers: (optional) Headers to be sent in response, a list of pairs
|
||||
"""
|
||||
|
||||
code = 0
|
||||
message = ""
|
||||
headers = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
code: Optional[int] = None,
|
||||
message: str = "",
|
||||
headers: Optional[_CIMultiDict] = None,
|
||||
) -> None:
|
||||
if code is not None:
|
||||
self.code = code
|
||||
self.headers = headers
|
||||
self.message = message
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"{self.code}, message={self.message!r}"
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<{self.__class__.__name__}: {self}>"
|
||||
|
||||
|
||||
class BadHttpMessage(HttpProcessingError):
|
||||
|
||||
code = 400
|
||||
message = "Bad Request"
|
||||
|
||||
def __init__(self, message: str, *, headers: Optional[_CIMultiDict] = None) -> None:
|
||||
super().__init__(message=message, headers=headers)
|
||||
self.args = (message,)
|
||||
|
||||
|
||||
class HttpBadRequest(BadHttpMessage):
|
||||
|
||||
code = 400
|
||||
message = "Bad Request"
|
||||
|
||||
|
||||
class PayloadEncodingError(BadHttpMessage):
|
||||
"""Base class for payload errors"""
|
||||
|
||||
|
||||
class ContentEncodingError(PayloadEncodingError):
|
||||
"""Content encoding error."""
|
||||
|
||||
|
||||
class TransferEncodingError(PayloadEncodingError):
|
||||
"""transfer encoding error."""
|
||||
|
||||
|
||||
class ContentLengthError(PayloadEncodingError):
|
||||
"""Not enough data for satisfy content length header."""
|
||||
|
||||
|
||||
class LineTooLong(BadHttpMessage):
|
||||
def __init__(
|
||||
self, line: str, limit: str = "Unknown", actual_size: str = "Unknown"
|
||||
) -> None:
|
||||
super().__init__(
|
||||
f"Got more than {limit} bytes ({actual_size}) when reading {line}."
|
||||
)
|
||||
self.args = (line, limit, actual_size)
|
||||
|
||||
|
||||
class InvalidHeader(BadHttpMessage):
|
||||
def __init__(self, hdr: Union[bytes, str]) -> None:
|
||||
if isinstance(hdr, bytes):
|
||||
hdr = hdr.decode("utf-8", "surrogateescape")
|
||||
super().__init__(f"Invalid HTTP Header: {hdr}")
|
||||
self.hdr = hdr
|
||||
self.args = (hdr,)
|
||||
|
||||
|
||||
class BadStatusLine(BadHttpMessage):
|
||||
def __init__(self, line: str = "") -> None:
|
||||
if not isinstance(line, str):
|
||||
line = repr(line)
|
||||
super().__init__(f"Bad status line {line!r}")
|
||||
self.args = (line,)
|
||||
self.line = line
|
||||
|
||||
|
||||
class InvalidURLError(BadHttpMessage):
|
||||
pass
|
901
.venv/lib/python3.9/site-packages/aiohttp/http_parser.py
Normal file
901
.venv/lib/python3.9/site-packages/aiohttp/http_parser.py
Normal file
@@ -0,0 +1,901 @@
|
||||
import abc
|
||||
import asyncio
|
||||
import collections
|
||||
import re
|
||||
import string
|
||||
import zlib
|
||||
from enum import IntEnum
|
||||
from typing import Any, List, Optional, Tuple, Type, Union
|
||||
|
||||
from multidict import CIMultiDict, CIMultiDictProxy, istr
|
||||
from yarl import URL
|
||||
|
||||
from . import hdrs
|
||||
from .base_protocol import BaseProtocol
|
||||
from .helpers import NO_EXTENSIONS, BaseTimerContext
|
||||
from .http_exceptions import (
|
||||
BadStatusLine,
|
||||
ContentEncodingError,
|
||||
ContentLengthError,
|
||||
InvalidHeader,
|
||||
LineTooLong,
|
||||
TransferEncodingError,
|
||||
)
|
||||
from .http_writer import HttpVersion, HttpVersion10
|
||||
from .log import internal_logger
|
||||
from .streams import EMPTY_PAYLOAD, StreamReader
|
||||
from .typedefs import RawHeaders
|
||||
|
||||
try:
|
||||
import brotli
|
||||
|
||||
HAS_BROTLI = True
|
||||
except ImportError: # pragma: no cover
|
||||
HAS_BROTLI = False
|
||||
|
||||
|
||||
__all__ = (
|
||||
"HeadersParser",
|
||||
"HttpParser",
|
||||
"HttpRequestParser",
|
||||
"HttpResponseParser",
|
||||
"RawRequestMessage",
|
||||
"RawResponseMessage",
|
||||
)
|
||||
|
||||
ASCIISET = set(string.printable)
|
||||
|
||||
# See https://tools.ietf.org/html/rfc7230#section-3.1.1
|
||||
# and https://tools.ietf.org/html/rfc7230#appendix-B
|
||||
#
|
||||
# method = token
|
||||
# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" / "+" / "-" / "." /
|
||||
# "^" / "_" / "`" / "|" / "~" / DIGIT / ALPHA
|
||||
# token = 1*tchar
|
||||
METHRE = re.compile(r"[!#$%&'*+\-.^_`|~0-9A-Za-z]+")
|
||||
VERSRE = re.compile(r"HTTP/(\d+).(\d+)")
|
||||
HDRRE = re.compile(rb"[\x00-\x1F\x7F()<>@,;:\[\]={} \t\\\\\"]")
|
||||
|
||||
RawRequestMessage = collections.namedtuple(
|
||||
"RawRequestMessage",
|
||||
[
|
||||
"method",
|
||||
"path",
|
||||
"version",
|
||||
"headers",
|
||||
"raw_headers",
|
||||
"should_close",
|
||||
"compression",
|
||||
"upgrade",
|
||||
"chunked",
|
||||
"url",
|
||||
],
|
||||
)
|
||||
|
||||
RawResponseMessage = collections.namedtuple(
|
||||
"RawResponseMessage",
|
||||
[
|
||||
"version",
|
||||
"code",
|
||||
"reason",
|
||||
"headers",
|
||||
"raw_headers",
|
||||
"should_close",
|
||||
"compression",
|
||||
"upgrade",
|
||||
"chunked",
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
class ParseState(IntEnum):
|
||||
|
||||
PARSE_NONE = 0
|
||||
PARSE_LENGTH = 1
|
||||
PARSE_CHUNKED = 2
|
||||
PARSE_UNTIL_EOF = 3
|
||||
|
||||
|
||||
class ChunkState(IntEnum):
|
||||
PARSE_CHUNKED_SIZE = 0
|
||||
PARSE_CHUNKED_CHUNK = 1
|
||||
PARSE_CHUNKED_CHUNK_EOF = 2
|
||||
PARSE_MAYBE_TRAILERS = 3
|
||||
PARSE_TRAILERS = 4
|
||||
|
||||
|
||||
class HeadersParser:
|
||||
def __init__(
|
||||
self,
|
||||
max_line_size: int = 8190,
|
||||
max_headers: int = 32768,
|
||||
max_field_size: int = 8190,
|
||||
) -> None:
|
||||
self.max_line_size = max_line_size
|
||||
self.max_headers = max_headers
|
||||
self.max_field_size = max_field_size
|
||||
|
||||
def parse_headers(
|
||||
self, lines: List[bytes]
|
||||
) -> Tuple["CIMultiDictProxy[str]", RawHeaders]:
|
||||
headers = CIMultiDict() # type: CIMultiDict[str]
|
||||
raw_headers = []
|
||||
|
||||
lines_idx = 1
|
||||
line = lines[1]
|
||||
line_count = len(lines)
|
||||
|
||||
while line:
|
||||
# Parse initial header name : value pair.
|
||||
try:
|
||||
bname, bvalue = line.split(b":", 1)
|
||||
except ValueError:
|
||||
raise InvalidHeader(line) from None
|
||||
|
||||
bname = bname.strip(b" \t")
|
||||
bvalue = bvalue.lstrip()
|
||||
if HDRRE.search(bname):
|
||||
raise InvalidHeader(bname)
|
||||
if len(bname) > self.max_field_size:
|
||||
raise LineTooLong(
|
||||
"request header name {}".format(
|
||||
bname.decode("utf8", "xmlcharrefreplace")
|
||||
),
|
||||
str(self.max_field_size),
|
||||
str(len(bname)),
|
||||
)
|
||||
|
||||
header_length = len(bvalue)
|
||||
|
||||
# next line
|
||||
lines_idx += 1
|
||||
line = lines[lines_idx]
|
||||
|
||||
# consume continuation lines
|
||||
continuation = line and line[0] in (32, 9) # (' ', '\t')
|
||||
|
||||
if continuation:
|
||||
bvalue_lst = [bvalue]
|
||||
while continuation:
|
||||
header_length += len(line)
|
||||
if header_length > self.max_field_size:
|
||||
raise LineTooLong(
|
||||
"request header field {}".format(
|
||||
bname.decode("utf8", "xmlcharrefreplace")
|
||||
),
|
||||
str(self.max_field_size),
|
||||
str(header_length),
|
||||
)
|
||||
bvalue_lst.append(line)
|
||||
|
||||
# next line
|
||||
lines_idx += 1
|
||||
if lines_idx < line_count:
|
||||
line = lines[lines_idx]
|
||||
if line:
|
||||
continuation = line[0] in (32, 9) # (' ', '\t')
|
||||
else:
|
||||
line = b""
|
||||
break
|
||||
bvalue = b"".join(bvalue_lst)
|
||||
else:
|
||||
if header_length > self.max_field_size:
|
||||
raise LineTooLong(
|
||||
"request header field {}".format(
|
||||
bname.decode("utf8", "xmlcharrefreplace")
|
||||
),
|
||||
str(self.max_field_size),
|
||||
str(header_length),
|
||||
)
|
||||
|
||||
bvalue = bvalue.strip()
|
||||
name = bname.decode("utf-8", "surrogateescape")
|
||||
value = bvalue.decode("utf-8", "surrogateescape")
|
||||
|
||||
headers.add(name, value)
|
||||
raw_headers.append((bname, bvalue))
|
||||
|
||||
return (CIMultiDictProxy(headers), tuple(raw_headers))
|
||||
|
||||
|
||||
class HttpParser(abc.ABC):
|
||||
def __init__(
|
||||
self,
|
||||
protocol: Optional[BaseProtocol] = None,
|
||||
loop: Optional[asyncio.AbstractEventLoop] = None,
|
||||
limit: int = 2 ** 16,
|
||||
max_line_size: int = 8190,
|
||||
max_headers: int = 32768,
|
||||
max_field_size: int = 8190,
|
||||
timer: Optional[BaseTimerContext] = None,
|
||||
code: Optional[int] = None,
|
||||
method: Optional[str] = None,
|
||||
readall: bool = False,
|
||||
payload_exception: Optional[Type[BaseException]] = None,
|
||||
response_with_body: bool = True,
|
||||
read_until_eof: bool = False,
|
||||
auto_decompress: bool = True,
|
||||
) -> None:
|
||||
self.protocol = protocol
|
||||
self.loop = loop
|
||||
self.max_line_size = max_line_size
|
||||
self.max_headers = max_headers
|
||||
self.max_field_size = max_field_size
|
||||
self.timer = timer
|
||||
self.code = code
|
||||
self.method = method
|
||||
self.readall = readall
|
||||
self.payload_exception = payload_exception
|
||||
self.response_with_body = response_with_body
|
||||
self.read_until_eof = read_until_eof
|
||||
|
||||
self._lines = [] # type: List[bytes]
|
||||
self._tail = b""
|
||||
self._upgraded = False
|
||||
self._payload = None
|
||||
self._payload_parser = None # type: Optional[HttpPayloadParser]
|
||||
self._auto_decompress = auto_decompress
|
||||
self._limit = limit
|
||||
self._headers_parser = HeadersParser(max_line_size, max_headers, max_field_size)
|
||||
|
||||
@abc.abstractmethod
|
||||
def parse_message(self, lines: List[bytes]) -> Any:
|
||||
pass
|
||||
|
||||
def feed_eof(self) -> Any:
|
||||
if self._payload_parser is not None:
|
||||
self._payload_parser.feed_eof()
|
||||
self._payload_parser = None
|
||||
else:
|
||||
# try to extract partial message
|
||||
if self._tail:
|
||||
self._lines.append(self._tail)
|
||||
|
||||
if self._lines:
|
||||
if self._lines[-1] != "\r\n":
|
||||
self._lines.append(b"")
|
||||
try:
|
||||
return self.parse_message(self._lines)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def feed_data(
|
||||
self,
|
||||
data: bytes,
|
||||
SEP: bytes = b"\r\n",
|
||||
EMPTY: bytes = b"",
|
||||
CONTENT_LENGTH: istr = hdrs.CONTENT_LENGTH,
|
||||
METH_CONNECT: str = hdrs.METH_CONNECT,
|
||||
SEC_WEBSOCKET_KEY1: istr = hdrs.SEC_WEBSOCKET_KEY1,
|
||||
) -> Tuple[List[Any], bool, bytes]:
|
||||
|
||||
messages = []
|
||||
|
||||
if self._tail:
|
||||
data, self._tail = self._tail + data, b""
|
||||
|
||||
data_len = len(data)
|
||||
start_pos = 0
|
||||
loop = self.loop
|
||||
|
||||
while start_pos < data_len:
|
||||
|
||||
# read HTTP message (request/response line + headers), \r\n\r\n
|
||||
# and split by lines
|
||||
if self._payload_parser is None and not self._upgraded:
|
||||
pos = data.find(SEP, start_pos)
|
||||
# consume \r\n
|
||||
if pos == start_pos and not self._lines:
|
||||
start_pos = pos + 2
|
||||
continue
|
||||
|
||||
if pos >= start_pos:
|
||||
# line found
|
||||
self._lines.append(data[start_pos:pos])
|
||||
start_pos = pos + 2
|
||||
|
||||
# \r\n\r\n found
|
||||
if self._lines[-1] == EMPTY:
|
||||
try:
|
||||
msg = self.parse_message(self._lines)
|
||||
finally:
|
||||
self._lines.clear()
|
||||
|
||||
# payload length
|
||||
length = msg.headers.get(CONTENT_LENGTH)
|
||||
if length is not None:
|
||||
try:
|
||||
length = int(length)
|
||||
except ValueError:
|
||||
raise InvalidHeader(CONTENT_LENGTH)
|
||||
if length < 0:
|
||||
raise InvalidHeader(CONTENT_LENGTH)
|
||||
|
||||
# do not support old websocket spec
|
||||
if SEC_WEBSOCKET_KEY1 in msg.headers:
|
||||
raise InvalidHeader(SEC_WEBSOCKET_KEY1)
|
||||
|
||||
self._upgraded = msg.upgrade
|
||||
|
||||
method = getattr(msg, "method", self.method)
|
||||
|
||||
assert self.protocol is not None
|
||||
# calculate payload
|
||||
if (
|
||||
(length is not None and length > 0)
|
||||
or msg.chunked
|
||||
and not msg.upgrade
|
||||
):
|
||||
payload = StreamReader(
|
||||
self.protocol,
|
||||
timer=self.timer,
|
||||
loop=loop,
|
||||
limit=self._limit,
|
||||
)
|
||||
payload_parser = HttpPayloadParser(
|
||||
payload,
|
||||
length=length,
|
||||
chunked=msg.chunked,
|
||||
method=method,
|
||||
compression=msg.compression,
|
||||
code=self.code,
|
||||
readall=self.readall,
|
||||
response_with_body=self.response_with_body,
|
||||
auto_decompress=self._auto_decompress,
|
||||
)
|
||||
if not payload_parser.done:
|
||||
self._payload_parser = payload_parser
|
||||
elif method == METH_CONNECT:
|
||||
payload = StreamReader(
|
||||
self.protocol,
|
||||
timer=self.timer,
|
||||
loop=loop,
|
||||
limit=self._limit,
|
||||
)
|
||||
self._upgraded = True
|
||||
self._payload_parser = HttpPayloadParser(
|
||||
payload,
|
||||
method=msg.method,
|
||||
compression=msg.compression,
|
||||
readall=True,
|
||||
auto_decompress=self._auto_decompress,
|
||||
)
|
||||
else:
|
||||
if (
|
||||
getattr(msg, "code", 100) >= 199
|
||||
and length is None
|
||||
and self.read_until_eof
|
||||
):
|
||||
payload = StreamReader(
|
||||
self.protocol,
|
||||
timer=self.timer,
|
||||
loop=loop,
|
||||
limit=self._limit,
|
||||
)
|
||||
payload_parser = HttpPayloadParser(
|
||||
payload,
|
||||
length=length,
|
||||
chunked=msg.chunked,
|
||||
method=method,
|
||||
compression=msg.compression,
|
||||
code=self.code,
|
||||
readall=True,
|
||||
response_with_body=self.response_with_body,
|
||||
auto_decompress=self._auto_decompress,
|
||||
)
|
||||
if not payload_parser.done:
|
||||
self._payload_parser = payload_parser
|
||||
else:
|
||||
payload = EMPTY_PAYLOAD # type: ignore
|
||||
|
||||
messages.append((msg, payload))
|
||||
else:
|
||||
self._tail = data[start_pos:]
|
||||
data = EMPTY
|
||||
break
|
||||
|
||||
# no parser, just store
|
||||
elif self._payload_parser is None and self._upgraded:
|
||||
assert not self._lines
|
||||
break
|
||||
|
||||
# feed payload
|
||||
elif data and start_pos < data_len:
|
||||
assert not self._lines
|
||||
assert self._payload_parser is not None
|
||||
try:
|
||||
eof, data = self._payload_parser.feed_data(data[start_pos:])
|
||||
except BaseException as exc:
|
||||
if self.payload_exception is not None:
|
||||
self._payload_parser.payload.set_exception(
|
||||
self.payload_exception(str(exc))
|
||||
)
|
||||
else:
|
||||
self._payload_parser.payload.set_exception(exc)
|
||||
|
||||
eof = True
|
||||
data = b""
|
||||
|
||||
if eof:
|
||||
start_pos = 0
|
||||
data_len = len(data)
|
||||
self._payload_parser = None
|
||||
continue
|
||||
else:
|
||||
break
|
||||
|
||||
if data and start_pos < data_len:
|
||||
data = data[start_pos:]
|
||||
else:
|
||||
data = EMPTY
|
||||
|
||||
return messages, self._upgraded, data
|
||||
|
||||
def parse_headers(
|
||||
self, lines: List[bytes]
|
||||
) -> Tuple[
|
||||
"CIMultiDictProxy[str]", RawHeaders, Optional[bool], Optional[str], bool, bool
|
||||
]:
|
||||
"""Parses RFC 5322 headers from a stream.
|
||||
|
||||
Line continuations are supported. Returns list of header name
|
||||
and value pairs. Header name is in upper case.
|
||||
"""
|
||||
headers, raw_headers = self._headers_parser.parse_headers(lines)
|
||||
close_conn = None
|
||||
encoding = None
|
||||
upgrade = False
|
||||
chunked = False
|
||||
|
||||
# keep-alive
|
||||
conn = headers.get(hdrs.CONNECTION)
|
||||
if conn:
|
||||
v = conn.lower()
|
||||
if v == "close":
|
||||
close_conn = True
|
||||
elif v == "keep-alive":
|
||||
close_conn = False
|
||||
elif v == "upgrade":
|
||||
upgrade = True
|
||||
|
||||
# encoding
|
||||
enc = headers.get(hdrs.CONTENT_ENCODING)
|
||||
if enc:
|
||||
enc = enc.lower()
|
||||
if enc in ("gzip", "deflate", "br"):
|
||||
encoding = enc
|
||||
|
||||
# chunking
|
||||
te = headers.get(hdrs.TRANSFER_ENCODING)
|
||||
if te and "chunked" in te.lower():
|
||||
chunked = True
|
||||
|
||||
return (headers, raw_headers, close_conn, encoding, upgrade, chunked)
|
||||
|
||||
def set_upgraded(self, val: bool) -> None:
|
||||
"""Set connection upgraded (to websocket) mode.
|
||||
:param bool val: new state.
|
||||
"""
|
||||
self._upgraded = val
|
||||
|
||||
|
||||
class HttpRequestParser(HttpParser):
|
||||
"""Read request status line. Exception .http_exceptions.BadStatusLine
|
||||
could be raised in case of any errors in status line.
|
||||
Returns RawRequestMessage.
|
||||
"""
|
||||
|
||||
def parse_message(self, lines: List[bytes]) -> Any:
|
||||
# request line
|
||||
line = lines[0].decode("utf-8", "surrogateescape")
|
||||
try:
|
||||
method, path, version = line.split(None, 2)
|
||||
except ValueError:
|
||||
raise BadStatusLine(line) from None
|
||||
|
||||
if len(path) > self.max_line_size:
|
||||
raise LineTooLong(
|
||||
"Status line is too long", str(self.max_line_size), str(len(path))
|
||||
)
|
||||
|
||||
path_part, _hash_separator, url_fragment = path.partition("#")
|
||||
path_part, _question_mark_separator, qs_part = path_part.partition("?")
|
||||
|
||||
# method
|
||||
if not METHRE.match(method):
|
||||
raise BadStatusLine(method)
|
||||
|
||||
# version
|
||||
try:
|
||||
if version.startswith("HTTP/"):
|
||||
n1, n2 = version[5:].split(".", 1)
|
||||
version_o = HttpVersion(int(n1), int(n2))
|
||||
else:
|
||||
raise BadStatusLine(version)
|
||||
except Exception:
|
||||
raise BadStatusLine(version)
|
||||
|
||||
# read headers
|
||||
(
|
||||
headers,
|
||||
raw_headers,
|
||||
close,
|
||||
compression,
|
||||
upgrade,
|
||||
chunked,
|
||||
) = self.parse_headers(lines)
|
||||
|
||||
if close is None: # then the headers weren't set in the request
|
||||
if version_o <= HttpVersion10: # HTTP 1.0 must asks to not close
|
||||
close = True
|
||||
else: # HTTP 1.1 must ask to close.
|
||||
close = False
|
||||
|
||||
return RawRequestMessage(
|
||||
method,
|
||||
path,
|
||||
version_o,
|
||||
headers,
|
||||
raw_headers,
|
||||
close,
|
||||
compression,
|
||||
upgrade,
|
||||
chunked,
|
||||
# NOTE: `yarl.URL.build()` is used to mimic what the Cython-based
|
||||
# NOTE: parser does, otherwise it results into the same
|
||||
# NOTE: HTTP Request-Line input producing different
|
||||
# NOTE: `yarl.URL()` objects
|
||||
URL.build(
|
||||
path=path_part,
|
||||
query_string=qs_part,
|
||||
fragment=url_fragment,
|
||||
encoded=True,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class HttpResponseParser(HttpParser):
|
||||
"""Read response status line and headers.
|
||||
|
||||
BadStatusLine could be raised in case of any errors in status line.
|
||||
Returns RawResponseMessage"""
|
||||
|
||||
def parse_message(self, lines: List[bytes]) -> Any:
|
||||
line = lines[0].decode("utf-8", "surrogateescape")
|
||||
try:
|
||||
version, status = line.split(None, 1)
|
||||
except ValueError:
|
||||
raise BadStatusLine(line) from None
|
||||
|
||||
try:
|
||||
status, reason = status.split(None, 1)
|
||||
except ValueError:
|
||||
reason = ""
|
||||
|
||||
if len(reason) > self.max_line_size:
|
||||
raise LineTooLong(
|
||||
"Status line is too long", str(self.max_line_size), str(len(reason))
|
||||
)
|
||||
|
||||
# version
|
||||
match = VERSRE.match(version)
|
||||
if match is None:
|
||||
raise BadStatusLine(line)
|
||||
version_o = HttpVersion(int(match.group(1)), int(match.group(2)))
|
||||
|
||||
# The status code is a three-digit number
|
||||
try:
|
||||
status_i = int(status)
|
||||
except ValueError:
|
||||
raise BadStatusLine(line) from None
|
||||
|
||||
if status_i > 999:
|
||||
raise BadStatusLine(line)
|
||||
|
||||
# read headers
|
||||
(
|
||||
headers,
|
||||
raw_headers,
|
||||
close,
|
||||
compression,
|
||||
upgrade,
|
||||
chunked,
|
||||
) = self.parse_headers(lines)
|
||||
|
||||
if close is None:
|
||||
close = version_o <= HttpVersion10
|
||||
|
||||
return RawResponseMessage(
|
||||
version_o,
|
||||
status_i,
|
||||
reason.strip(),
|
||||
headers,
|
||||
raw_headers,
|
||||
close,
|
||||
compression,
|
||||
upgrade,
|
||||
chunked,
|
||||
)
|
||||
|
||||
|
||||
class HttpPayloadParser:
|
||||
def __init__(
|
||||
self,
|
||||
payload: StreamReader,
|
||||
length: Optional[int] = None,
|
||||
chunked: bool = False,
|
||||
compression: Optional[str] = None,
|
||||
code: Optional[int] = None,
|
||||
method: Optional[str] = None,
|
||||
readall: bool = False,
|
||||
response_with_body: bool = True,
|
||||
auto_decompress: bool = True,
|
||||
) -> None:
|
||||
self._length = 0
|
||||
self._type = ParseState.PARSE_NONE
|
||||
self._chunk = ChunkState.PARSE_CHUNKED_SIZE
|
||||
self._chunk_size = 0
|
||||
self._chunk_tail = b""
|
||||
self._auto_decompress = auto_decompress
|
||||
self.done = False
|
||||
|
||||
# payload decompression wrapper
|
||||
if response_with_body and compression and self._auto_decompress:
|
||||
real_payload = DeflateBuffer(
|
||||
payload, compression
|
||||
) # type: Union[StreamReader, DeflateBuffer]
|
||||
else:
|
||||
real_payload = payload
|
||||
|
||||
# payload parser
|
||||
if not response_with_body:
|
||||
# don't parse payload if it's not expected to be received
|
||||
self._type = ParseState.PARSE_NONE
|
||||
real_payload.feed_eof()
|
||||
self.done = True
|
||||
|
||||
elif chunked:
|
||||
self._type = ParseState.PARSE_CHUNKED
|
||||
elif length is not None:
|
||||
self._type = ParseState.PARSE_LENGTH
|
||||
self._length = length
|
||||
if self._length == 0:
|
||||
real_payload.feed_eof()
|
||||
self.done = True
|
||||
else:
|
||||
if readall and code != 204:
|
||||
self._type = ParseState.PARSE_UNTIL_EOF
|
||||
elif method in ("PUT", "POST"):
|
||||
internal_logger.warning( # pragma: no cover
|
||||
"Content-Length or Transfer-Encoding header is required"
|
||||
)
|
||||
self._type = ParseState.PARSE_NONE
|
||||
real_payload.feed_eof()
|
||||
self.done = True
|
||||
|
||||
self.payload = real_payload
|
||||
|
||||
def feed_eof(self) -> None:
|
||||
if self._type == ParseState.PARSE_UNTIL_EOF:
|
||||
self.payload.feed_eof()
|
||||
elif self._type == ParseState.PARSE_LENGTH:
|
||||
raise ContentLengthError(
|
||||
"Not enough data for satisfy content length header."
|
||||
)
|
||||
elif self._type == ParseState.PARSE_CHUNKED:
|
||||
raise TransferEncodingError(
|
||||
"Not enough data for satisfy transfer length header."
|
||||
)
|
||||
|
||||
def feed_data(
|
||||
self, chunk: bytes, SEP: bytes = b"\r\n", CHUNK_EXT: bytes = b";"
|
||||
) -> Tuple[bool, bytes]:
|
||||
# Read specified amount of bytes
|
||||
if self._type == ParseState.PARSE_LENGTH:
|
||||
required = self._length
|
||||
chunk_len = len(chunk)
|
||||
|
||||
if required >= chunk_len:
|
||||
self._length = required - chunk_len
|
||||
self.payload.feed_data(chunk, chunk_len)
|
||||
if self._length == 0:
|
||||
self.payload.feed_eof()
|
||||
return True, b""
|
||||
else:
|
||||
self._length = 0
|
||||
self.payload.feed_data(chunk[:required], required)
|
||||
self.payload.feed_eof()
|
||||
return True, chunk[required:]
|
||||
|
||||
# Chunked transfer encoding parser
|
||||
elif self._type == ParseState.PARSE_CHUNKED:
|
||||
if self._chunk_tail:
|
||||
chunk = self._chunk_tail + chunk
|
||||
self._chunk_tail = b""
|
||||
|
||||
while chunk:
|
||||
|
||||
# read next chunk size
|
||||
if self._chunk == ChunkState.PARSE_CHUNKED_SIZE:
|
||||
pos = chunk.find(SEP)
|
||||
if pos >= 0:
|
||||
i = chunk.find(CHUNK_EXT, 0, pos)
|
||||
if i >= 0:
|
||||
size_b = chunk[:i] # strip chunk-extensions
|
||||
else:
|
||||
size_b = chunk[:pos]
|
||||
|
||||
try:
|
||||
size = int(bytes(size_b), 16)
|
||||
except ValueError:
|
||||
exc = TransferEncodingError(
|
||||
chunk[:pos].decode("ascii", "surrogateescape")
|
||||
)
|
||||
self.payload.set_exception(exc)
|
||||
raise exc from None
|
||||
|
||||
chunk = chunk[pos + 2 :]
|
||||
if size == 0: # eof marker
|
||||
self._chunk = ChunkState.PARSE_MAYBE_TRAILERS
|
||||
else:
|
||||
self._chunk = ChunkState.PARSE_CHUNKED_CHUNK
|
||||
self._chunk_size = size
|
||||
self.payload.begin_http_chunk_receiving()
|
||||
else:
|
||||
self._chunk_tail = chunk
|
||||
return False, b""
|
||||
|
||||
# read chunk and feed buffer
|
||||
if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK:
|
||||
required = self._chunk_size
|
||||
chunk_len = len(chunk)
|
||||
|
||||
if required > chunk_len:
|
||||
self._chunk_size = required - chunk_len
|
||||
self.payload.feed_data(chunk, chunk_len)
|
||||
return False, b""
|
||||
else:
|
||||
self._chunk_size = 0
|
||||
self.payload.feed_data(chunk[:required], required)
|
||||
chunk = chunk[required:]
|
||||
self._chunk = ChunkState.PARSE_CHUNKED_CHUNK_EOF
|
||||
self.payload.end_http_chunk_receiving()
|
||||
|
||||
# toss the CRLF at the end of the chunk
|
||||
if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK_EOF:
|
||||
if chunk[:2] == SEP:
|
||||
chunk = chunk[2:]
|
||||
self._chunk = ChunkState.PARSE_CHUNKED_SIZE
|
||||
else:
|
||||
self._chunk_tail = chunk
|
||||
return False, b""
|
||||
|
||||
# if stream does not contain trailer, after 0\r\n
|
||||
# we should get another \r\n otherwise
|
||||
# trailers needs to be skiped until \r\n\r\n
|
||||
if self._chunk == ChunkState.PARSE_MAYBE_TRAILERS:
|
||||
head = chunk[:2]
|
||||
if head == SEP:
|
||||
# end of stream
|
||||
self.payload.feed_eof()
|
||||
return True, chunk[2:]
|
||||
# Both CR and LF, or only LF may not be received yet. It is
|
||||
# expected that CRLF or LF will be shown at the very first
|
||||
# byte next time, otherwise trailers should come. The last
|
||||
# CRLF which marks the end of response might not be
|
||||
# contained in the same TCP segment which delivered the
|
||||
# size indicator.
|
||||
if not head:
|
||||
return False, b""
|
||||
if head == SEP[:1]:
|
||||
self._chunk_tail = head
|
||||
return False, b""
|
||||
self._chunk = ChunkState.PARSE_TRAILERS
|
||||
|
||||
# read and discard trailer up to the CRLF terminator
|
||||
if self._chunk == ChunkState.PARSE_TRAILERS:
|
||||
pos = chunk.find(SEP)
|
||||
if pos >= 0:
|
||||
chunk = chunk[pos + 2 :]
|
||||
self._chunk = ChunkState.PARSE_MAYBE_TRAILERS
|
||||
else:
|
||||
self._chunk_tail = chunk
|
||||
return False, b""
|
||||
|
||||
# Read all bytes until eof
|
||||
elif self._type == ParseState.PARSE_UNTIL_EOF:
|
||||
self.payload.feed_data(chunk, len(chunk))
|
||||
|
||||
return False, b""
|
||||
|
||||
|
||||
class DeflateBuffer:
|
||||
"""DeflateStream decompress stream and feed data into specified stream."""
|
||||
|
||||
def __init__(self, out: StreamReader, encoding: Optional[str]) -> None:
|
||||
self.out = out
|
||||
self.size = 0
|
||||
self.encoding = encoding
|
||||
self._started_decoding = False
|
||||
|
||||
if encoding == "br":
|
||||
if not HAS_BROTLI: # pragma: no cover
|
||||
raise ContentEncodingError(
|
||||
"Can not decode content-encoding: brotli (br). "
|
||||
"Please install `brotlipy`"
|
||||
)
|
||||
self.decompressor = brotli.Decompressor()
|
||||
else:
|
||||
zlib_mode = 16 + zlib.MAX_WBITS if encoding == "gzip" else zlib.MAX_WBITS
|
||||
self.decompressor = zlib.decompressobj(wbits=zlib_mode)
|
||||
|
||||
def set_exception(self, exc: BaseException) -> None:
|
||||
self.out.set_exception(exc)
|
||||
|
||||
def feed_data(self, chunk: bytes, size: int) -> None:
|
||||
if not size:
|
||||
return
|
||||
|
||||
self.size += size
|
||||
|
||||
# RFC1950
|
||||
# bits 0..3 = CM = 0b1000 = 8 = "deflate"
|
||||
# bits 4..7 = CINFO = 1..7 = windows size.
|
||||
if (
|
||||
not self._started_decoding
|
||||
and self.encoding == "deflate"
|
||||
and chunk[0] & 0xF != 8
|
||||
):
|
||||
# Change the decoder to decompress incorrectly compressed data
|
||||
# Actually we should issue a warning about non-RFC-compliant data.
|
||||
self.decompressor = zlib.decompressobj(wbits=-zlib.MAX_WBITS)
|
||||
|
||||
try:
|
||||
chunk = self.decompressor.decompress(chunk)
|
||||
except Exception:
|
||||
raise ContentEncodingError(
|
||||
"Can not decode content-encoding: %s" % self.encoding
|
||||
)
|
||||
|
||||
self._started_decoding = True
|
||||
|
||||
if chunk:
|
||||
self.out.feed_data(chunk, len(chunk))
|
||||
|
||||
def feed_eof(self) -> None:
|
||||
chunk = self.decompressor.flush()
|
||||
|
||||
if chunk or self.size > 0:
|
||||
self.out.feed_data(chunk, len(chunk))
|
||||
if self.encoding == "deflate" and not self.decompressor.eof:
|
||||
raise ContentEncodingError("deflate")
|
||||
|
||||
self.out.feed_eof()
|
||||
|
||||
def begin_http_chunk_receiving(self) -> None:
|
||||
self.out.begin_http_chunk_receiving()
|
||||
|
||||
def end_http_chunk_receiving(self) -> None:
|
||||
self.out.end_http_chunk_receiving()
|
||||
|
||||
|
||||
HttpRequestParserPy = HttpRequestParser
|
||||
HttpResponseParserPy = HttpResponseParser
|
||||
RawRequestMessagePy = RawRequestMessage
|
||||
RawResponseMessagePy = RawResponseMessage
|
||||
|
||||
try:
|
||||
if not NO_EXTENSIONS:
|
||||
from ._http_parser import ( # type: ignore
|
||||
HttpRequestParser,
|
||||
HttpResponseParser,
|
||||
RawRequestMessage,
|
||||
RawResponseMessage,
|
||||
)
|
||||
|
||||
HttpRequestParserC = HttpRequestParser
|
||||
HttpResponseParserC = HttpResponseParser
|
||||
RawRequestMessageC = RawRequestMessage
|
||||
RawResponseMessageC = RawResponseMessage
|
||||
except ImportError: # pragma: no cover
|
||||
pass
|
698
.venv/lib/python3.9/site-packages/aiohttp/http_websocket.py
Normal file
698
.venv/lib/python3.9/site-packages/aiohttp/http_websocket.py
Normal file
@@ -0,0 +1,698 @@
|
||||
"""WebSocket protocol versions 13 and 8."""
|
||||
|
||||
import asyncio
|
||||
import collections
|
||||
import json
|
||||
import random
|
||||
import re
|
||||
import sys
|
||||
import zlib
|
||||
from enum import IntEnum
|
||||
from struct import Struct
|
||||
from typing import Any, Callable, List, Optional, Tuple, Union
|
||||
|
||||
from .base_protocol import BaseProtocol
|
||||
from .helpers import NO_EXTENSIONS
|
||||
from .streams import DataQueue
|
||||
|
||||
__all__ = (
|
||||
"WS_CLOSED_MESSAGE",
|
||||
"WS_CLOSING_MESSAGE",
|
||||
"WS_KEY",
|
||||
"WebSocketReader",
|
||||
"WebSocketWriter",
|
||||
"WSMessage",
|
||||
"WebSocketError",
|
||||
"WSMsgType",
|
||||
"WSCloseCode",
|
||||
)
|
||||
|
||||
|
||||
class WSCloseCode(IntEnum):
|
||||
OK = 1000
|
||||
GOING_AWAY = 1001
|
||||
PROTOCOL_ERROR = 1002
|
||||
UNSUPPORTED_DATA = 1003
|
||||
INVALID_TEXT = 1007
|
||||
POLICY_VIOLATION = 1008
|
||||
MESSAGE_TOO_BIG = 1009
|
||||
MANDATORY_EXTENSION = 1010
|
||||
INTERNAL_ERROR = 1011
|
||||
SERVICE_RESTART = 1012
|
||||
TRY_AGAIN_LATER = 1013
|
||||
|
||||
|
||||
ALLOWED_CLOSE_CODES = {int(i) for i in WSCloseCode}
|
||||
|
||||
|
||||
class WSMsgType(IntEnum):
|
||||
# websocket spec types
|
||||
CONTINUATION = 0x0
|
||||
TEXT = 0x1
|
||||
BINARY = 0x2
|
||||
PING = 0x9
|
||||
PONG = 0xA
|
||||
CLOSE = 0x8
|
||||
|
||||
# aiohttp specific types
|
||||
CLOSING = 0x100
|
||||
CLOSED = 0x101
|
||||
ERROR = 0x102
|
||||
|
||||
text = TEXT
|
||||
binary = BINARY
|
||||
ping = PING
|
||||
pong = PONG
|
||||
close = CLOSE
|
||||
closing = CLOSING
|
||||
closed = CLOSED
|
||||
error = ERROR
|
||||
|
||||
|
||||
WS_KEY = b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11"
|
||||
|
||||
|
||||
UNPACK_LEN2 = Struct("!H").unpack_from
|
||||
UNPACK_LEN3 = Struct("!Q").unpack_from
|
||||
UNPACK_CLOSE_CODE = Struct("!H").unpack
|
||||
PACK_LEN1 = Struct("!BB").pack
|
||||
PACK_LEN2 = Struct("!BBH").pack
|
||||
PACK_LEN3 = Struct("!BBQ").pack
|
||||
PACK_CLOSE_CODE = Struct("!H").pack
|
||||
MSG_SIZE = 2 ** 14
|
||||
DEFAULT_LIMIT = 2 ** 16
|
||||
|
||||
|
||||
_WSMessageBase = collections.namedtuple("_WSMessageBase", ["type", "data", "extra"])
|
||||
|
||||
|
||||
class WSMessage(_WSMessageBase):
|
||||
def json(self, *, loads: Callable[[Any], Any] = json.loads) -> Any:
|
||||
"""Return parsed JSON data.
|
||||
|
||||
.. versionadded:: 0.22
|
||||
"""
|
||||
return loads(self.data)
|
||||
|
||||
|
||||
WS_CLOSED_MESSAGE = WSMessage(WSMsgType.CLOSED, None, None)
|
||||
WS_CLOSING_MESSAGE = WSMessage(WSMsgType.CLOSING, None, None)
|
||||
|
||||
|
||||
class WebSocketError(Exception):
|
||||
"""WebSocket protocol parser error."""
|
||||
|
||||
def __init__(self, code: int, message: str) -> None:
|
||||
self.code = code
|
||||
super().__init__(code, message)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return self.args[1]
|
||||
|
||||
|
||||
class WSHandshakeError(Exception):
|
||||
"""WebSocket protocol handshake error."""
|
||||
|
||||
|
||||
native_byteorder = sys.byteorder
|
||||
|
||||
|
||||
# Used by _websocket_mask_python
|
||||
_XOR_TABLE = [bytes(a ^ b for a in range(256)) for b in range(256)]
|
||||
|
||||
|
||||
def _websocket_mask_python(mask: bytes, data: bytearray) -> None:
|
||||
"""Websocket masking function.
|
||||
|
||||
`mask` is a `bytes` object of length 4; `data` is a `bytearray`
|
||||
object of any length. The contents of `data` are masked with `mask`,
|
||||
as specified in section 5.3 of RFC 6455.
|
||||
|
||||
Note that this function mutates the `data` argument.
|
||||
|
||||
This pure-python implementation may be replaced by an optimized
|
||||
version when available.
|
||||
|
||||
"""
|
||||
assert isinstance(data, bytearray), data
|
||||
assert len(mask) == 4, mask
|
||||
|
||||
if data:
|
||||
a, b, c, d = (_XOR_TABLE[n] for n in mask)
|
||||
data[::4] = data[::4].translate(a)
|
||||
data[1::4] = data[1::4].translate(b)
|
||||
data[2::4] = data[2::4].translate(c)
|
||||
data[3::4] = data[3::4].translate(d)
|
||||
|
||||
|
||||
if NO_EXTENSIONS: # pragma: no cover
|
||||
_websocket_mask = _websocket_mask_python
|
||||
else:
|
||||
try:
|
||||
from ._websocket import _websocket_mask_cython # type: ignore
|
||||
|
||||
_websocket_mask = _websocket_mask_cython
|
||||
except ImportError: # pragma: no cover
|
||||
_websocket_mask = _websocket_mask_python
|
||||
|
||||
_WS_DEFLATE_TRAILING = bytes([0x00, 0x00, 0xFF, 0xFF])
|
||||
|
||||
|
||||
_WS_EXT_RE = re.compile(
|
||||
r"^(?:;\s*(?:"
|
||||
r"(server_no_context_takeover)|"
|
||||
r"(client_no_context_takeover)|"
|
||||
r"(server_max_window_bits(?:=(\d+))?)|"
|
||||
r"(client_max_window_bits(?:=(\d+))?)))*$"
|
||||
)
|
||||
|
||||
_WS_EXT_RE_SPLIT = re.compile(r"permessage-deflate([^,]+)?")
|
||||
|
||||
|
||||
def ws_ext_parse(extstr: Optional[str], isserver: bool = False) -> Tuple[int, bool]:
|
||||
if not extstr:
|
||||
return 0, False
|
||||
|
||||
compress = 0
|
||||
notakeover = False
|
||||
for ext in _WS_EXT_RE_SPLIT.finditer(extstr):
|
||||
defext = ext.group(1)
|
||||
# Return compress = 15 when get `permessage-deflate`
|
||||
if not defext:
|
||||
compress = 15
|
||||
break
|
||||
match = _WS_EXT_RE.match(defext)
|
||||
if match:
|
||||
compress = 15
|
||||
if isserver:
|
||||
# Server never fail to detect compress handshake.
|
||||
# Server does not need to send max wbit to client
|
||||
if match.group(4):
|
||||
compress = int(match.group(4))
|
||||
# Group3 must match if group4 matches
|
||||
# Compress wbit 8 does not support in zlib
|
||||
# If compress level not support,
|
||||
# CONTINUE to next extension
|
||||
if compress > 15 or compress < 9:
|
||||
compress = 0
|
||||
continue
|
||||
if match.group(1):
|
||||
notakeover = True
|
||||
# Ignore regex group 5 & 6 for client_max_window_bits
|
||||
break
|
||||
else:
|
||||
if match.group(6):
|
||||
compress = int(match.group(6))
|
||||
# Group5 must match if group6 matches
|
||||
# Compress wbit 8 does not support in zlib
|
||||
# If compress level not support,
|
||||
# FAIL the parse progress
|
||||
if compress > 15 or compress < 9:
|
||||
raise WSHandshakeError("Invalid window size")
|
||||
if match.group(2):
|
||||
notakeover = True
|
||||
# Ignore regex group 5 & 6 for client_max_window_bits
|
||||
break
|
||||
# Return Fail if client side and not match
|
||||
elif not isserver:
|
||||
raise WSHandshakeError("Extension for deflate not supported" + ext.group(1))
|
||||
|
||||
return compress, notakeover
|
||||
|
||||
|
||||
def ws_ext_gen(
|
||||
compress: int = 15, isserver: bool = False, server_notakeover: bool = False
|
||||
) -> str:
|
||||
# client_notakeover=False not used for server
|
||||
# compress wbit 8 does not support in zlib
|
||||
if compress < 9 or compress > 15:
|
||||
raise ValueError(
|
||||
"Compress wbits must between 9 and 15, " "zlib does not support wbits=8"
|
||||
)
|
||||
enabledext = ["permessage-deflate"]
|
||||
if not isserver:
|
||||
enabledext.append("client_max_window_bits")
|
||||
|
||||
if compress < 15:
|
||||
enabledext.append("server_max_window_bits=" + str(compress))
|
||||
if server_notakeover:
|
||||
enabledext.append("server_no_context_takeover")
|
||||
# if client_notakeover:
|
||||
# enabledext.append('client_no_context_takeover')
|
||||
return "; ".join(enabledext)
|
||||
|
||||
|
||||
class WSParserState(IntEnum):
|
||||
READ_HEADER = 1
|
||||
READ_PAYLOAD_LENGTH = 2
|
||||
READ_PAYLOAD_MASK = 3
|
||||
READ_PAYLOAD = 4
|
||||
|
||||
|
||||
class WebSocketReader:
|
||||
def __init__(
|
||||
self, queue: DataQueue[WSMessage], max_msg_size: int, compress: bool = True
|
||||
) -> None:
|
||||
self.queue = queue
|
||||
self._max_msg_size = max_msg_size
|
||||
|
||||
self._exc = None # type: Optional[BaseException]
|
||||
self._partial = bytearray()
|
||||
self._state = WSParserState.READ_HEADER
|
||||
|
||||
self._opcode = None # type: Optional[int]
|
||||
self._frame_fin = False
|
||||
self._frame_opcode = None # type: Optional[int]
|
||||
self._frame_payload = bytearray()
|
||||
|
||||
self._tail = b""
|
||||
self._has_mask = False
|
||||
self._frame_mask = None # type: Optional[bytes]
|
||||
self._payload_length = 0
|
||||
self._payload_length_flag = 0
|
||||
self._compressed = None # type: Optional[bool]
|
||||
self._decompressobj = None # type: Any # zlib.decompressobj actually
|
||||
self._compress = compress
|
||||
|
||||
def feed_eof(self) -> None:
|
||||
self.queue.feed_eof()
|
||||
|
||||
def feed_data(self, data: bytes) -> Tuple[bool, bytes]:
|
||||
if self._exc:
|
||||
return True, data
|
||||
|
||||
try:
|
||||
return self._feed_data(data)
|
||||
except Exception as exc:
|
||||
self._exc = exc
|
||||
self.queue.set_exception(exc)
|
||||
return True, b""
|
||||
|
||||
def _feed_data(self, data: bytes) -> Tuple[bool, bytes]:
|
||||
for fin, opcode, payload, compressed in self.parse_frame(data):
|
||||
if compressed and not self._decompressobj:
|
||||
self._decompressobj = zlib.decompressobj(wbits=-zlib.MAX_WBITS)
|
||||
if opcode == WSMsgType.CLOSE:
|
||||
if len(payload) >= 2:
|
||||
close_code = UNPACK_CLOSE_CODE(payload[:2])[0]
|
||||
if close_code < 3000 and close_code not in ALLOWED_CLOSE_CODES:
|
||||
raise WebSocketError(
|
||||
WSCloseCode.PROTOCOL_ERROR,
|
||||
f"Invalid close code: {close_code}",
|
||||
)
|
||||
try:
|
||||
close_message = payload[2:].decode("utf-8")
|
||||
except UnicodeDecodeError as exc:
|
||||
raise WebSocketError(
|
||||
WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message"
|
||||
) from exc
|
||||
msg = WSMessage(WSMsgType.CLOSE, close_code, close_message)
|
||||
elif payload:
|
||||
raise WebSocketError(
|
||||
WSCloseCode.PROTOCOL_ERROR,
|
||||
f"Invalid close frame: {fin} {opcode} {payload!r}",
|
||||
)
|
||||
else:
|
||||
msg = WSMessage(WSMsgType.CLOSE, 0, "")
|
||||
|
||||
self.queue.feed_data(msg, 0)
|
||||
|
||||
elif opcode == WSMsgType.PING:
|
||||
self.queue.feed_data(
|
||||
WSMessage(WSMsgType.PING, payload, ""), len(payload)
|
||||
)
|
||||
|
||||
elif opcode == WSMsgType.PONG:
|
||||
self.queue.feed_data(
|
||||
WSMessage(WSMsgType.PONG, payload, ""), len(payload)
|
||||
)
|
||||
|
||||
elif (
|
||||
opcode not in (WSMsgType.TEXT, WSMsgType.BINARY)
|
||||
and self._opcode is None
|
||||
):
|
||||
raise WebSocketError(
|
||||
WSCloseCode.PROTOCOL_ERROR, f"Unexpected opcode={opcode!r}"
|
||||
)
|
||||
else:
|
||||
# load text/binary
|
||||
if not fin:
|
||||
# got partial frame payload
|
||||
if opcode != WSMsgType.CONTINUATION:
|
||||
self._opcode = opcode
|
||||
self._partial.extend(payload)
|
||||
if self._max_msg_size and len(self._partial) >= self._max_msg_size:
|
||||
raise WebSocketError(
|
||||
WSCloseCode.MESSAGE_TOO_BIG,
|
||||
"Message size {} exceeds limit {}".format(
|
||||
len(self._partial), self._max_msg_size
|
||||
),
|
||||
)
|
||||
else:
|
||||
# previous frame was non finished
|
||||
# we should get continuation opcode
|
||||
if self._partial:
|
||||
if opcode != WSMsgType.CONTINUATION:
|
||||
raise WebSocketError(
|
||||
WSCloseCode.PROTOCOL_ERROR,
|
||||
"The opcode in non-fin frame is expected "
|
||||
"to be zero, got {!r}".format(opcode),
|
||||
)
|
||||
|
||||
if opcode == WSMsgType.CONTINUATION:
|
||||
assert self._opcode is not None
|
||||
opcode = self._opcode
|
||||
self._opcode = None
|
||||
|
||||
self._partial.extend(payload)
|
||||
if self._max_msg_size and len(self._partial) >= self._max_msg_size:
|
||||
raise WebSocketError(
|
||||
WSCloseCode.MESSAGE_TOO_BIG,
|
||||
"Message size {} exceeds limit {}".format(
|
||||
len(self._partial), self._max_msg_size
|
||||
),
|
||||
)
|
||||
|
||||
# Decompress process must to be done after all packets
|
||||
# received.
|
||||
if compressed:
|
||||
self._partial.extend(_WS_DEFLATE_TRAILING)
|
||||
payload_merged = self._decompressobj.decompress(
|
||||
self._partial, self._max_msg_size
|
||||
)
|
||||
if self._decompressobj.unconsumed_tail:
|
||||
left = len(self._decompressobj.unconsumed_tail)
|
||||
raise WebSocketError(
|
||||
WSCloseCode.MESSAGE_TOO_BIG,
|
||||
"Decompressed message size {} exceeds limit {}".format(
|
||||
self._max_msg_size + left, self._max_msg_size
|
||||
),
|
||||
)
|
||||
else:
|
||||
payload_merged = bytes(self._partial)
|
||||
|
||||
self._partial.clear()
|
||||
|
||||
if opcode == WSMsgType.TEXT:
|
||||
try:
|
||||
text = payload_merged.decode("utf-8")
|
||||
self.queue.feed_data(
|
||||
WSMessage(WSMsgType.TEXT, text, ""), len(text)
|
||||
)
|
||||
except UnicodeDecodeError as exc:
|
||||
raise WebSocketError(
|
||||
WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message"
|
||||
) from exc
|
||||
else:
|
||||
self.queue.feed_data(
|
||||
WSMessage(WSMsgType.BINARY, payload_merged, ""),
|
||||
len(payload_merged),
|
||||
)
|
||||
|
||||
return False, b""
|
||||
|
||||
def parse_frame(
|
||||
self, buf: bytes
|
||||
) -> List[Tuple[bool, Optional[int], bytearray, Optional[bool]]]:
|
||||
"""Return the next frame from the socket."""
|
||||
frames = []
|
||||
if self._tail:
|
||||
buf, self._tail = self._tail + buf, b""
|
||||
|
||||
start_pos = 0
|
||||
buf_length = len(buf)
|
||||
|
||||
while True:
|
||||
# read header
|
||||
if self._state == WSParserState.READ_HEADER:
|
||||
if buf_length - start_pos >= 2:
|
||||
data = buf[start_pos : start_pos + 2]
|
||||
start_pos += 2
|
||||
first_byte, second_byte = data
|
||||
|
||||
fin = (first_byte >> 7) & 1
|
||||
rsv1 = (first_byte >> 6) & 1
|
||||
rsv2 = (first_byte >> 5) & 1
|
||||
rsv3 = (first_byte >> 4) & 1
|
||||
opcode = first_byte & 0xF
|
||||
|
||||
# frame-fin = %x0 ; more frames of this message follow
|
||||
# / %x1 ; final frame of this message
|
||||
# frame-rsv1 = %x0 ;
|
||||
# 1 bit, MUST be 0 unless negotiated otherwise
|
||||
# frame-rsv2 = %x0 ;
|
||||
# 1 bit, MUST be 0 unless negotiated otherwise
|
||||
# frame-rsv3 = %x0 ;
|
||||
# 1 bit, MUST be 0 unless negotiated otherwise
|
||||
#
|
||||
# Remove rsv1 from this test for deflate development
|
||||
if rsv2 or rsv3 or (rsv1 and not self._compress):
|
||||
raise WebSocketError(
|
||||
WSCloseCode.PROTOCOL_ERROR,
|
||||
"Received frame with non-zero reserved bits",
|
||||
)
|
||||
|
||||
if opcode > 0x7 and fin == 0:
|
||||
raise WebSocketError(
|
||||
WSCloseCode.PROTOCOL_ERROR,
|
||||
"Received fragmented control frame",
|
||||
)
|
||||
|
||||
has_mask = (second_byte >> 7) & 1
|
||||
length = second_byte & 0x7F
|
||||
|
||||
# Control frames MUST have a payload
|
||||
# length of 125 bytes or less
|
||||
if opcode > 0x7 and length > 125:
|
||||
raise WebSocketError(
|
||||
WSCloseCode.PROTOCOL_ERROR,
|
||||
"Control frame payload cannot be " "larger than 125 bytes",
|
||||
)
|
||||
|
||||
# Set compress status if last package is FIN
|
||||
# OR set compress status if this is first fragment
|
||||
# Raise error if not first fragment with rsv1 = 0x1
|
||||
if self._frame_fin or self._compressed is None:
|
||||
self._compressed = True if rsv1 else False
|
||||
elif rsv1:
|
||||
raise WebSocketError(
|
||||
WSCloseCode.PROTOCOL_ERROR,
|
||||
"Received frame with non-zero reserved bits",
|
||||
)
|
||||
|
||||
self._frame_fin = bool(fin)
|
||||
self._frame_opcode = opcode
|
||||
self._has_mask = bool(has_mask)
|
||||
self._payload_length_flag = length
|
||||
self._state = WSParserState.READ_PAYLOAD_LENGTH
|
||||
else:
|
||||
break
|
||||
|
||||
# read payload length
|
||||
if self._state == WSParserState.READ_PAYLOAD_LENGTH:
|
||||
length = self._payload_length_flag
|
||||
if length == 126:
|
||||
if buf_length - start_pos >= 2:
|
||||
data = buf[start_pos : start_pos + 2]
|
||||
start_pos += 2
|
||||
length = UNPACK_LEN2(data)[0]
|
||||
self._payload_length = length
|
||||
self._state = (
|
||||
WSParserState.READ_PAYLOAD_MASK
|
||||
if self._has_mask
|
||||
else WSParserState.READ_PAYLOAD
|
||||
)
|
||||
else:
|
||||
break
|
||||
elif length > 126:
|
||||
if buf_length - start_pos >= 8:
|
||||
data = buf[start_pos : start_pos + 8]
|
||||
start_pos += 8
|
||||
length = UNPACK_LEN3(data)[0]
|
||||
self._payload_length = length
|
||||
self._state = (
|
||||
WSParserState.READ_PAYLOAD_MASK
|
||||
if self._has_mask
|
||||
else WSParserState.READ_PAYLOAD
|
||||
)
|
||||
else:
|
||||
break
|
||||
else:
|
||||
self._payload_length = length
|
||||
self._state = (
|
||||
WSParserState.READ_PAYLOAD_MASK
|
||||
if self._has_mask
|
||||
else WSParserState.READ_PAYLOAD
|
||||
)
|
||||
|
||||
# read payload mask
|
||||
if self._state == WSParserState.READ_PAYLOAD_MASK:
|
||||
if buf_length - start_pos >= 4:
|
||||
self._frame_mask = buf[start_pos : start_pos + 4]
|
||||
start_pos += 4
|
||||
self._state = WSParserState.READ_PAYLOAD
|
||||
else:
|
||||
break
|
||||
|
||||
if self._state == WSParserState.READ_PAYLOAD:
|
||||
length = self._payload_length
|
||||
payload = self._frame_payload
|
||||
|
||||
chunk_len = buf_length - start_pos
|
||||
if length >= chunk_len:
|
||||
self._payload_length = length - chunk_len
|
||||
payload.extend(buf[start_pos:])
|
||||
start_pos = buf_length
|
||||
else:
|
||||
self._payload_length = 0
|
||||
payload.extend(buf[start_pos : start_pos + length])
|
||||
start_pos = start_pos + length
|
||||
|
||||
if self._payload_length == 0:
|
||||
if self._has_mask:
|
||||
assert self._frame_mask is not None
|
||||
_websocket_mask(self._frame_mask, payload)
|
||||
|
||||
frames.append(
|
||||
(self._frame_fin, self._frame_opcode, payload, self._compressed)
|
||||
)
|
||||
|
||||
self._frame_payload = bytearray()
|
||||
self._state = WSParserState.READ_HEADER
|
||||
else:
|
||||
break
|
||||
|
||||
self._tail = buf[start_pos:]
|
||||
|
||||
return frames
|
||||
|
||||
|
||||
class WebSocketWriter:
|
||||
def __init__(
|
||||
self,
|
||||
protocol: BaseProtocol,
|
||||
transport: asyncio.Transport,
|
||||
*,
|
||||
use_mask: bool = False,
|
||||
limit: int = DEFAULT_LIMIT,
|
||||
random: Any = random.Random(),
|
||||
compress: int = 0,
|
||||
notakeover: bool = False,
|
||||
) -> None:
|
||||
self.protocol = protocol
|
||||
self.transport = transport
|
||||
self.use_mask = use_mask
|
||||
self.randrange = random.randrange
|
||||
self.compress = compress
|
||||
self.notakeover = notakeover
|
||||
self._closing = False
|
||||
self._limit = limit
|
||||
self._output_size = 0
|
||||
self._compressobj = None # type: Any # actually compressobj
|
||||
|
||||
async def _send_frame(
|
||||
self, message: bytes, opcode: int, compress: Optional[int] = None
|
||||
) -> None:
|
||||
"""Send a frame over the websocket with message as its payload."""
|
||||
if self._closing and not (opcode & WSMsgType.CLOSE):
|
||||
raise ConnectionResetError("Cannot write to closing transport")
|
||||
|
||||
rsv = 0
|
||||
|
||||
# Only compress larger packets (disabled)
|
||||
# Does small packet needs to be compressed?
|
||||
# if self.compress and opcode < 8 and len(message) > 124:
|
||||
if (compress or self.compress) and opcode < 8:
|
||||
if compress:
|
||||
# Do not set self._compress if compressing is for this frame
|
||||
compressobj = zlib.compressobj(level=zlib.Z_BEST_SPEED, wbits=-compress)
|
||||
else: # self.compress
|
||||
if not self._compressobj:
|
||||
self._compressobj = zlib.compressobj(
|
||||
level=zlib.Z_BEST_SPEED, wbits=-self.compress
|
||||
)
|
||||
compressobj = self._compressobj
|
||||
|
||||
message = compressobj.compress(message)
|
||||
message = message + compressobj.flush(
|
||||
zlib.Z_FULL_FLUSH if self.notakeover else zlib.Z_SYNC_FLUSH
|
||||
)
|
||||
if message.endswith(_WS_DEFLATE_TRAILING):
|
||||
message = message[:-4]
|
||||
rsv = rsv | 0x40
|
||||
|
||||
msg_length = len(message)
|
||||
|
||||
use_mask = self.use_mask
|
||||
if use_mask:
|
||||
mask_bit = 0x80
|
||||
else:
|
||||
mask_bit = 0
|
||||
|
||||
if msg_length < 126:
|
||||
header = PACK_LEN1(0x80 | rsv | opcode, msg_length | mask_bit)
|
||||
elif msg_length < (1 << 16):
|
||||
header = PACK_LEN2(0x80 | rsv | opcode, 126 | mask_bit, msg_length)
|
||||
else:
|
||||
header = PACK_LEN3(0x80 | rsv | opcode, 127 | mask_bit, msg_length)
|
||||
if use_mask:
|
||||
mask = self.randrange(0, 0xFFFFFFFF)
|
||||
mask = mask.to_bytes(4, "big")
|
||||
message = bytearray(message)
|
||||
_websocket_mask(mask, message)
|
||||
self._write(header + mask + message)
|
||||
self._output_size += len(header) + len(mask) + len(message)
|
||||
else:
|
||||
if len(message) > MSG_SIZE:
|
||||
self._write(header)
|
||||
self._write(message)
|
||||
else:
|
||||
self._write(header + message)
|
||||
|
||||
self._output_size += len(header) + len(message)
|
||||
|
||||
if self._output_size > self._limit:
|
||||
self._output_size = 0
|
||||
await self.protocol._drain_helper()
|
||||
|
||||
def _write(self, data: bytes) -> None:
|
||||
if self.transport is None or self.transport.is_closing():
|
||||
raise ConnectionResetError("Cannot write to closing transport")
|
||||
self.transport.write(data)
|
||||
|
||||
async def pong(self, message: bytes = b"") -> None:
|
||||
"""Send pong message."""
|
||||
if isinstance(message, str):
|
||||
message = message.encode("utf-8")
|
||||
await self._send_frame(message, WSMsgType.PONG)
|
||||
|
||||
async def ping(self, message: bytes = b"") -> None:
|
||||
"""Send ping message."""
|
||||
if isinstance(message, str):
|
||||
message = message.encode("utf-8")
|
||||
await self._send_frame(message, WSMsgType.PING)
|
||||
|
||||
async def send(
|
||||
self,
|
||||
message: Union[str, bytes],
|
||||
binary: bool = False,
|
||||
compress: Optional[int] = None,
|
||||
) -> None:
|
||||
"""Send a frame over the websocket with message as its payload."""
|
||||
if isinstance(message, str):
|
||||
message = message.encode("utf-8")
|
||||
if binary:
|
||||
await self._send_frame(message, WSMsgType.BINARY, compress)
|
||||
else:
|
||||
await self._send_frame(message, WSMsgType.TEXT, compress)
|
||||
|
||||
async def close(self, code: int = 1000, message: bytes = b"") -> None:
|
||||
"""Close the websocket, sending the specified code and message."""
|
||||
if isinstance(message, str):
|
||||
message = message.encode("utf-8")
|
||||
try:
|
||||
await self._send_frame(
|
||||
PACK_CLOSE_CODE(code) + message, opcode=WSMsgType.CLOSE
|
||||
)
|
||||
finally:
|
||||
self._closing = True
|
182
.venv/lib/python3.9/site-packages/aiohttp/http_writer.py
Normal file
182
.venv/lib/python3.9/site-packages/aiohttp/http_writer.py
Normal file
@@ -0,0 +1,182 @@
|
||||
"""Http related parsers and protocol."""
|
||||
|
||||
import asyncio
|
||||
import collections
|
||||
import zlib
|
||||
from typing import Any, Awaitable, Callable, Optional, Union # noqa
|
||||
|
||||
from multidict import CIMultiDict
|
||||
|
||||
from .abc import AbstractStreamWriter
|
||||
from .base_protocol import BaseProtocol
|
||||
from .helpers import NO_EXTENSIONS
|
||||
|
||||
__all__ = ("StreamWriter", "HttpVersion", "HttpVersion10", "HttpVersion11")
|
||||
|
||||
HttpVersion = collections.namedtuple("HttpVersion", ["major", "minor"])
|
||||
HttpVersion10 = HttpVersion(1, 0)
|
||||
HttpVersion11 = HttpVersion(1, 1)
|
||||
|
||||
|
||||
_T_OnChunkSent = Optional[Callable[[bytes], Awaitable[None]]]
|
||||
|
||||
|
||||
class StreamWriter(AbstractStreamWriter):
|
||||
def __init__(
|
||||
self,
|
||||
protocol: BaseProtocol,
|
||||
loop: asyncio.AbstractEventLoop,
|
||||
on_chunk_sent: _T_OnChunkSent = None,
|
||||
) -> None:
|
||||
self._protocol = protocol
|
||||
self._transport = protocol.transport
|
||||
|
||||
self.loop = loop
|
||||
self.length = None
|
||||
self.chunked = False
|
||||
self.buffer_size = 0
|
||||
self.output_size = 0
|
||||
|
||||
self._eof = False
|
||||
self._compress = None # type: Any
|
||||
self._drain_waiter = None
|
||||
|
||||
self._on_chunk_sent = on_chunk_sent # type: _T_OnChunkSent
|
||||
|
||||
@property
|
||||
def transport(self) -> Optional[asyncio.Transport]:
|
||||
return self._transport
|
||||
|
||||
@property
|
||||
def protocol(self) -> BaseProtocol:
|
||||
return self._protocol
|
||||
|
||||
def enable_chunking(self) -> None:
|
||||
self.chunked = True
|
||||
|
||||
def enable_compression(self, encoding: str = "deflate") -> None:
|
||||
zlib_mode = 16 + zlib.MAX_WBITS if encoding == "gzip" else zlib.MAX_WBITS
|
||||
self._compress = zlib.compressobj(wbits=zlib_mode)
|
||||
|
||||
def _write(self, chunk: bytes) -> None:
|
||||
size = len(chunk)
|
||||
self.buffer_size += size
|
||||
self.output_size += size
|
||||
|
||||
if self._transport is None or self._transport.is_closing():
|
||||
raise ConnectionResetError("Cannot write to closing transport")
|
||||
self._transport.write(chunk)
|
||||
|
||||
async def write(
|
||||
self, chunk: bytes, *, drain: bool = True, LIMIT: int = 0x10000
|
||||
) -> None:
|
||||
"""Writes chunk of data to a stream.
|
||||
|
||||
write_eof() indicates end of stream.
|
||||
writer can't be used after write_eof() method being called.
|
||||
write() return drain future.
|
||||
"""
|
||||
if self._on_chunk_sent is not None:
|
||||
await self._on_chunk_sent(chunk)
|
||||
|
||||
if isinstance(chunk, memoryview):
|
||||
if chunk.nbytes != len(chunk):
|
||||
# just reshape it
|
||||
chunk = chunk.cast("c")
|
||||
|
||||
if self._compress is not None:
|
||||
chunk = self._compress.compress(chunk)
|
||||
if not chunk:
|
||||
return
|
||||
|
||||
if self.length is not None:
|
||||
chunk_len = len(chunk)
|
||||
if self.length >= chunk_len:
|
||||
self.length = self.length - chunk_len
|
||||
else:
|
||||
chunk = chunk[: self.length]
|
||||
self.length = 0
|
||||
if not chunk:
|
||||
return
|
||||
|
||||
if chunk:
|
||||
if self.chunked:
|
||||
chunk_len_pre = ("%x\r\n" % len(chunk)).encode("ascii")
|
||||
chunk = chunk_len_pre + chunk + b"\r\n"
|
||||
|
||||
self._write(chunk)
|
||||
|
||||
if self.buffer_size > LIMIT and drain:
|
||||
self.buffer_size = 0
|
||||
await self.drain()
|
||||
|
||||
async def write_headers(
|
||||
self, status_line: str, headers: "CIMultiDict[str]"
|
||||
) -> None:
|
||||
"""Write request/response status and headers."""
|
||||
# status + headers
|
||||
buf = _serialize_headers(status_line, headers)
|
||||
self._write(buf)
|
||||
|
||||
async def write_eof(self, chunk: bytes = b"") -> None:
|
||||
if self._eof:
|
||||
return
|
||||
|
||||
if chunk and self._on_chunk_sent is not None:
|
||||
await self._on_chunk_sent(chunk)
|
||||
|
||||
if self._compress:
|
||||
if chunk:
|
||||
chunk = self._compress.compress(chunk)
|
||||
|
||||
chunk = chunk + self._compress.flush()
|
||||
if chunk and self.chunked:
|
||||
chunk_len = ("%x\r\n" % len(chunk)).encode("ascii")
|
||||
chunk = chunk_len + chunk + b"\r\n0\r\n\r\n"
|
||||
else:
|
||||
if self.chunked:
|
||||
if chunk:
|
||||
chunk_len = ("%x\r\n" % len(chunk)).encode("ascii")
|
||||
chunk = chunk_len + chunk + b"\r\n0\r\n\r\n"
|
||||
else:
|
||||
chunk = b"0\r\n\r\n"
|
||||
|
||||
if chunk:
|
||||
self._write(chunk)
|
||||
|
||||
await self.drain()
|
||||
|
||||
self._eof = True
|
||||
self._transport = None
|
||||
|
||||
async def drain(self) -> None:
|
||||
"""Flush the write buffer.
|
||||
|
||||
The intended use is to write
|
||||
|
||||
await w.write(data)
|
||||
await w.drain()
|
||||
"""
|
||||
if self._protocol.transport is not None:
|
||||
await self._protocol._drain_helper()
|
||||
|
||||
|
||||
def _py_serialize_headers(status_line: str, headers: "CIMultiDict[str]") -> bytes:
|
||||
line = (
|
||||
status_line
|
||||
+ "\r\n"
|
||||
+ "".join([k + ": " + v + "\r\n" for k, v in headers.items()])
|
||||
)
|
||||
return line.encode("utf-8") + b"\r\n"
|
||||
|
||||
|
||||
_serialize_headers = _py_serialize_headers
|
||||
|
||||
try:
|
||||
import aiohttp._http_writer as _http_writer # type: ignore
|
||||
|
||||
_c_serialize_headers = _http_writer._serialize_headers
|
||||
if not NO_EXTENSIONS:
|
||||
_serialize_headers = _c_serialize_headers
|
||||
except ImportError:
|
||||
pass
|
45
.venv/lib/python3.9/site-packages/aiohttp/locks.py
Normal file
45
.venv/lib/python3.9/site-packages/aiohttp/locks.py
Normal file
@@ -0,0 +1,45 @@
|
||||
import asyncio
|
||||
import collections
|
||||
from typing import Any, Optional
|
||||
|
||||
try:
|
||||
from typing import Deque
|
||||
except ImportError:
|
||||
from typing_extensions import Deque
|
||||
|
||||
|
||||
class EventResultOrError:
|
||||
"""
|
||||
This class wrappers the Event asyncio lock allowing either awake the
|
||||
locked Tasks without any error or raising an exception.
|
||||
|
||||
thanks to @vorpalsmith for the simple design.
|
||||
"""
|
||||
|
||||
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
|
||||
self._loop = loop
|
||||
self._exc = None # type: Optional[BaseException]
|
||||
self._event = asyncio.Event()
|
||||
self._waiters = collections.deque() # type: Deque[asyncio.Future[Any]]
|
||||
|
||||
def set(self, exc: Optional[BaseException] = None) -> None:
|
||||
self._exc = exc
|
||||
self._event.set()
|
||||
|
||||
async def wait(self) -> Any:
|
||||
waiter = self._loop.create_task(self._event.wait())
|
||||
self._waiters.append(waiter)
|
||||
try:
|
||||
val = await waiter
|
||||
finally:
|
||||
self._waiters.remove(waiter)
|
||||
|
||||
if self._exc is not None:
|
||||
raise self._exc
|
||||
|
||||
return val
|
||||
|
||||
def cancel(self) -> None:
|
||||
""" Cancel all waiters """
|
||||
for waiter in self._waiters:
|
||||
waiter.cancel()
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user