content
stringlengths
1
103k
path
stringlengths
8
216
filename
stringlengths
2
179
language
stringclasses
15 values
size_bytes
int64
2
189k
quality_score
float64
0.5
0.95
complexity
float64
0
1
documentation_ratio
float64
0
1
repository
stringclasses
5 values
stars
int64
0
1k
created_date
stringdate
2023-07-10 19:21:08
2025-07-09 19:11:45
license
stringclasses
4 values
is_test
bool
2 classes
file_hash
stringlengths
32
32
import asyncio\nimport collections.abc\nimport datetime\nimport enum\nimport json\nimport math\nimport time\nimport warnings\nfrom concurrent.futures import Executor\nfrom http import HTTPStatus\nfrom http.cookies import SimpleCookie\nfrom typing import (\n TYPE_CHECKING,\n Any,\n Dict,\n Iterator,\n MutableMapping,\n Optional,\n Union,\n cast,\n)\n\nfrom multidict import CIMultiDict, istr\n\nfrom . import hdrs, payload\nfrom .abc import AbstractStreamWriter\nfrom .compression_utils import ZLibCompressor\nfrom .helpers import (\n ETAG_ANY,\n QUOTED_ETAG_RE,\n ETag,\n HeadersMixin,\n must_be_empty_body,\n parse_http_date,\n rfc822_formatted_time,\n sentinel,\n should_remove_content_length,\n validate_etag_value,\n)\nfrom .http import SERVER_SOFTWARE, HttpVersion10, HttpVersion11\nfrom .payload import Payload\nfrom .typedefs import JSONEncoder, LooseHeaders\n\nREASON_PHRASES = {http_status.value: http_status.phrase for http_status in HTTPStatus}\nLARGE_BODY_SIZE = 1024**2\n\n__all__ = ("ContentCoding", "StreamResponse", "Response", "json_response")\n\n\nif TYPE_CHECKING:\n from .web_request import BaseRequest\n\n BaseClass = MutableMapping[str, Any]\nelse:\n BaseClass = collections.abc.MutableMapping\n\n\n# TODO(py311): Convert to StrEnum for wider use\nclass ContentCoding(enum.Enum):\n # The content codings that we have support for.\n #\n # Additional registered codings are listed at:\n # https://www.iana.org/assignments/http-parameters/http-parameters.xhtml#content-coding\n deflate = "deflate"\n gzip = "gzip"\n identity = "identity"\n\n\nCONTENT_CODINGS = {coding.value: coding for coding in ContentCoding}\n\n############################################################\n# HTTP Response classes\n############################################################\n\n\nclass StreamResponse(BaseClass, HeadersMixin):\n\n _body: Union[None, bytes, bytearray, Payload]\n _length_check = True\n _body = None\n _keep_alive: Optional[bool] = None\n _chunked: bool = False\n _compression: bool = False\n _compression_strategy: Optional[int] = None\n _compression_force: Optional[ContentCoding] = None\n _req: Optional["BaseRequest"] = None\n _payload_writer: Optional[AbstractStreamWriter] = None\n _eof_sent: bool = False\n _must_be_empty_body: Optional[bool] = None\n _body_length = 0\n _cookies: Optional[SimpleCookie] = None\n _send_headers_immediately = True\n\n def __init__(\n self,\n *,\n status: int = 200,\n reason: Optional[str] = None,\n headers: Optional[LooseHeaders] = None,\n _real_headers: Optional[CIMultiDict[str]] = None,\n ) -> None:\n """Initialize a new stream response object.\n\n _real_headers is an internal parameter used to pass a pre-populated\n headers object. It is used by the `Response` class to avoid copying\n the headers when creating a new response object. It is not intended\n to be used by external code.\n """\n self._state: Dict[str, Any] = {}\n\n if _real_headers is not None:\n self._headers = _real_headers\n elif headers is not None:\n self._headers: CIMultiDict[str] = CIMultiDict(headers)\n else:\n self._headers = CIMultiDict()\n\n self._set_status(status, reason)\n\n @property\n def prepared(self) -> bool:\n return self._eof_sent or self._payload_writer is not None\n\n @property\n def task(self) -> "Optional[asyncio.Task[None]]":\n if self._req:\n return self._req.task\n else:\n return None\n\n @property\n def status(self) -> int:\n return self._status\n\n @property\n def chunked(self) -> bool:\n return self._chunked\n\n @property\n def compression(self) -> bool:\n return self._compression\n\n @property\n def reason(self) -> str:\n return self._reason\n\n def set_status(\n self,\n status: int,\n reason: Optional[str] = None,\n ) -> None:\n assert (\n not self.prepared\n ), "Cannot change the response status code after the headers have been sent"\n self._set_status(status, reason)\n\n def _set_status(self, status: int, reason: Optional[str]) -> None:\n self._status = int(status)\n if reason is None:\n reason = REASON_PHRASES.get(self._status, "")\n elif "\n" in reason:\n raise ValueError("Reason cannot contain \\n")\n self._reason = reason\n\n @property\n def keep_alive(self) -> Optional[bool]:\n return self._keep_alive\n\n def force_close(self) -> None:\n self._keep_alive = False\n\n @property\n def body_length(self) -> int:\n return self._body_length\n\n @property\n def output_length(self) -> int:\n warnings.warn("output_length is deprecated", DeprecationWarning)\n assert self._payload_writer\n return self._payload_writer.buffer_size\n\n def enable_chunked_encoding(self, chunk_size: Optional[int] = None) -> None:\n """Enables automatic chunked transfer encoding."""\n if hdrs.CONTENT_LENGTH in self._headers:\n raise RuntimeError(\n "You can't enable chunked encoding when a content length is set"\n )\n if chunk_size is not None:\n warnings.warn("Chunk size is deprecated #1615", DeprecationWarning)\n self._chunked = True\n\n def enable_compression(\n self,\n force: Optional[Union[bool, ContentCoding]] = None,\n strategy: Optional[int] = None,\n ) -> None:\n """Enables response compression encoding."""\n # Backwards compatibility for when force was a bool <0.17.\n if isinstance(force, bool):\n force = ContentCoding.deflate if force else ContentCoding.identity\n warnings.warn(\n "Using boolean for force is deprecated #3318", DeprecationWarning\n )\n elif force is not None:\n assert isinstance(\n force, ContentCoding\n ), "force should one of None, bool or ContentEncoding"\n\n self._compression = True\n self._compression_force = force\n self._compression_strategy = strategy\n\n @property\n def headers(self) -> "CIMultiDict[str]":\n return self._headers\n\n @property\n def cookies(self) -> SimpleCookie:\n if self._cookies is None:\n self._cookies = SimpleCookie()\n return self._cookies\n\n def set_cookie(\n self,\n name: str,\n value: str,\n *,\n expires: Optional[str] = None,\n domain: Optional[str] = None,\n max_age: Optional[Union[int, str]] = None,\n path: str = "/",\n secure: Optional[bool] = None,\n httponly: Optional[bool] = None,\n version: Optional[str] = None,\n samesite: Optional[str] = None,\n partitioned: Optional[bool] = None,\n ) -> None:\n """Set or update response cookie.\n\n Sets new cookie or updates existent with new value.\n Also updates only those params which are not None.\n """\n if self._cookies is None:\n self._cookies = SimpleCookie()\n\n self._cookies[name] = value\n c = self._cookies[name]\n\n if expires is not None:\n c["expires"] = expires\n elif c.get("expires") == "Thu, 01 Jan 1970 00:00:00 GMT":\n del c["expires"]\n\n if domain is not None:\n c["domain"] = domain\n\n if max_age is not None:\n c["max-age"] = str(max_age)\n elif "max-age" in c:\n del c["max-age"]\n\n c["path"] = path\n\n if secure is not None:\n c["secure"] = secure\n if httponly is not None:\n c["httponly"] = httponly\n if version is not None:\n c["version"] = version\n if samesite is not None:\n c["samesite"] = samesite\n\n if partitioned is not None:\n c["partitioned"] = partitioned\n\n def del_cookie(\n self,\n name: str,\n *,\n domain: Optional[str] = None,\n path: str = "/",\n secure: Optional[bool] = None,\n httponly: Optional[bool] = None,\n samesite: Optional[str] = None,\n ) -> None:\n """Delete cookie.\n\n Creates new empty expired cookie.\n """\n # TODO: do we need domain/path here?\n if self._cookies is not None:\n self._cookies.pop(name, None)\n self.set_cookie(\n name,\n "",\n max_age=0,\n expires="Thu, 01 Jan 1970 00:00:00 GMT",\n domain=domain,\n path=path,\n secure=secure,\n httponly=httponly,\n samesite=samesite,\n )\n\n @property\n def content_length(self) -> Optional[int]:\n # Just a placeholder for adding setter\n return super().content_length\n\n @content_length.setter\n def content_length(self, value: Optional[int]) -> None:\n if value is not None:\n value = int(value)\n if self._chunked:\n raise RuntimeError(\n "You can't set content length when chunked encoding is enable"\n )\n self._headers[hdrs.CONTENT_LENGTH] = str(value)\n else:\n self._headers.pop(hdrs.CONTENT_LENGTH, None)\n\n @property\n def content_type(self) -> str:\n # Just a placeholder for adding setter\n return super().content_type\n\n @content_type.setter\n def content_type(self, value: str) -> None:\n self.content_type # read header values if needed\n self._content_type = str(value)\n self._generate_content_type_header()\n\n @property\n def charset(self) -> Optional[str]:\n # Just a placeholder for adding setter\n return super().charset\n\n @charset.setter\n def charset(self, value: Optional[str]) -> None:\n ctype = self.content_type # read header values if needed\n if ctype == "application/octet-stream":\n raise RuntimeError(\n "Setting charset for application/octet-stream "\n "doesn't make sense, setup content_type first"\n )\n assert self._content_dict is not None\n if value is None:\n self._content_dict.pop("charset", None)\n else:\n self._content_dict["charset"] = str(value).lower()\n self._generate_content_type_header()\n\n @property\n def last_modified(self) -> Optional[datetime.datetime]:\n """The value of Last-Modified HTTP header, or None.\n\n This header is represented as a `datetime` object.\n """\n return parse_http_date(self._headers.get(hdrs.LAST_MODIFIED))\n\n @last_modified.setter\n def last_modified(\n self, value: Optional[Union[int, float, datetime.datetime, str]]\n ) -> None:\n if value is None:\n self._headers.pop(hdrs.LAST_MODIFIED, None)\n elif isinstance(value, (int, float)):\n self._headers[hdrs.LAST_MODIFIED] = time.strftime(\n "%a, %d %b %Y %H:%M:%S GMT", time.gmtime(math.ceil(value))\n )\n elif isinstance(value, datetime.datetime):\n self._headers[hdrs.LAST_MODIFIED] = time.strftime(\n "%a, %d %b %Y %H:%M:%S GMT", value.utctimetuple()\n )\n elif isinstance(value, str):\n self._headers[hdrs.LAST_MODIFIED] = value\n else:\n msg = f"Unsupported type for last_modified: {type(value).__name__}"\n raise TypeError(msg)\n\n @property\n def etag(self) -> Optional[ETag]:\n quoted_value = self._headers.get(hdrs.ETAG)\n if not quoted_value:\n return None\n elif quoted_value == ETAG_ANY:\n return ETag(value=ETAG_ANY)\n match = QUOTED_ETAG_RE.fullmatch(quoted_value)\n if not match:\n return None\n is_weak, value = match.group(1, 2)\n return ETag(\n is_weak=bool(is_weak),\n value=value,\n )\n\n @etag.setter\n def etag(self, value: Optional[Union[ETag, str]]) -> None:\n if value is None:\n self._headers.pop(hdrs.ETAG, None)\n elif (isinstance(value, str) and value == ETAG_ANY) or (\n isinstance(value, ETag) and value.value == ETAG_ANY\n ):\n self._headers[hdrs.ETAG] = ETAG_ANY\n elif isinstance(value, str):\n validate_etag_value(value)\n self._headers[hdrs.ETAG] = f'"{value}"'\n elif isinstance(value, ETag) and isinstance(value.value, str):\n validate_etag_value(value.value)\n hdr_value = f'W/"{value.value}"' if value.is_weak else f'"{value.value}"'\n self._headers[hdrs.ETAG] = hdr_value\n else:\n raise ValueError(\n f"Unsupported etag type: {type(value)}. "\n f"etag must be str, ETag or None"\n )\n\n def _generate_content_type_header(\n self, CONTENT_TYPE: istr = hdrs.CONTENT_TYPE\n ) -> None:\n assert self._content_dict is not None\n assert self._content_type is not None\n params = "; ".join(f"{k}={v}" for k, v in self._content_dict.items())\n if params:\n ctype = self._content_type + "; " + params\n else:\n ctype = self._content_type\n self._headers[CONTENT_TYPE] = ctype\n\n async def _do_start_compression(self, coding: ContentCoding) -> None:\n if coding is ContentCoding.identity:\n return\n assert self._payload_writer is not None\n self._headers[hdrs.CONTENT_ENCODING] = coding.value\n self._payload_writer.enable_compression(\n coding.value, self._compression_strategy\n )\n # Compressed payload may have different content length,\n # remove the header\n self._headers.popall(hdrs.CONTENT_LENGTH, None)\n\n async def _start_compression(self, request: "BaseRequest") -> None:\n if self._compression_force:\n await self._do_start_compression(self._compression_force)\n return\n # Encoding comparisons should be case-insensitive\n # https://www.rfc-editor.org/rfc/rfc9110#section-8.4.1\n accept_encoding = request.headers.get(hdrs.ACCEPT_ENCODING, "").lower()\n for value, coding in CONTENT_CODINGS.items():\n if value in accept_encoding:\n await self._do_start_compression(coding)\n return\n\n async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]:\n if self._eof_sent:\n return None\n if self._payload_writer is not None:\n return self._payload_writer\n self._must_be_empty_body = must_be_empty_body(request.method, self.status)\n return await self._start(request)\n\n async def _start(self, request: "BaseRequest") -> AbstractStreamWriter:\n self._req = request\n writer = self._payload_writer = request._payload_writer\n\n await self._prepare_headers()\n await request._prepare_hook(self)\n await self._write_headers()\n\n return writer\n\n async def _prepare_headers(self) -> None:\n request = self._req\n assert request is not None\n writer = self._payload_writer\n assert writer is not None\n keep_alive = self._keep_alive\n if keep_alive is None:\n keep_alive = request.keep_alive\n self._keep_alive = keep_alive\n\n version = request.version\n\n headers = self._headers\n if self._cookies:\n for cookie in self._cookies.values():\n value = cookie.output(header="")[1:]\n headers.add(hdrs.SET_COOKIE, value)\n\n if self._compression:\n await self._start_compression(request)\n\n if self._chunked:\n if version != HttpVersion11:\n raise RuntimeError(\n "Using chunked encoding is forbidden "\n "for HTTP/{0.major}.{0.minor}".format(request.version)\n )\n if not self._must_be_empty_body:\n writer.enable_chunking()\n headers[hdrs.TRANSFER_ENCODING] = "chunked"\n elif self._length_check: # Disabled for WebSockets\n writer.length = self.content_length\n if writer.length is None:\n if version >= HttpVersion11:\n if not self._must_be_empty_body:\n writer.enable_chunking()\n headers[hdrs.TRANSFER_ENCODING] = "chunked"\n elif not self._must_be_empty_body:\n keep_alive = False\n\n # HTTP 1.1: https://tools.ietf.org/html/rfc7230#section-3.3.2\n # HTTP 1.0: https://tools.ietf.org/html/rfc1945#section-10.4\n if self._must_be_empty_body:\n if hdrs.CONTENT_LENGTH in headers and should_remove_content_length(\n request.method, self.status\n ):\n del headers[hdrs.CONTENT_LENGTH]\n # https://datatracker.ietf.org/doc/html/rfc9112#section-6.1-10\n # https://datatracker.ietf.org/doc/html/rfc9112#section-6.1-13\n if hdrs.TRANSFER_ENCODING in headers:\n del headers[hdrs.TRANSFER_ENCODING]\n elif (writer.length if self._length_check else self.content_length) != 0:\n # https://www.rfc-editor.org/rfc/rfc9110#section-8.3-5\n headers.setdefault(hdrs.CONTENT_TYPE, "application/octet-stream")\n headers.setdefault(hdrs.DATE, rfc822_formatted_time())\n headers.setdefault(hdrs.SERVER, SERVER_SOFTWARE)\n\n # connection header\n if hdrs.CONNECTION not in headers:\n if keep_alive:\n if version == HttpVersion10:\n headers[hdrs.CONNECTION] = "keep-alive"\n elif version == HttpVersion11:\n headers[hdrs.CONNECTION] = "close"\n\n async def _write_headers(self) -> None:\n request = self._req\n assert request is not None\n writer = self._payload_writer\n assert writer is not None\n # status line\n version = request.version\n status_line = f"HTTP/{version[0]}.{version[1]} {self._status} {self._reason}"\n await writer.write_headers(status_line, self._headers)\n # Send headers immediately if not opted into buffering\n if self._send_headers_immediately:\n writer.send_headers()\n\n async def write(self, data: Union[bytes, bytearray, memoryview]) -> None:\n assert isinstance(\n data, (bytes, bytearray, memoryview)\n ), "data argument must be byte-ish (%r)" % type(data)\n\n if self._eof_sent:\n raise RuntimeError("Cannot call write() after write_eof()")\n if self._payload_writer is None:\n raise RuntimeError("Cannot call write() before prepare()")\n\n await self._payload_writer.write(data)\n\n async def drain(self) -> None:\n assert not self._eof_sent, "EOF has already been sent"\n assert self._payload_writer is not None, "Response has not been started"\n warnings.warn(\n "drain method is deprecated, use await resp.write()",\n DeprecationWarning,\n stacklevel=2,\n )\n await self._payload_writer.drain()\n\n async def write_eof(self, data: bytes = b"") -> None:\n assert isinstance(\n data, (bytes, bytearray, memoryview)\n ), "data argument must be byte-ish (%r)" % type(data)\n\n if self._eof_sent:\n return\n\n assert self._payload_writer is not None, "Response has not been started"\n\n await self._payload_writer.write_eof(data)\n self._eof_sent = True\n self._req = None\n self._body_length = self._payload_writer.output_size\n self._payload_writer = None\n\n def __repr__(self) -> str:\n if self._eof_sent:\n info = "eof"\n elif self.prepared:\n assert self._req is not None\n info = f"{self._req.method} {self._req.path} "\n else:\n info = "not prepared"\n return f"<{self.__class__.__name__} {self.reason} {info}>"\n\n def __getitem__(self, key: str) -> Any:\n return self._state[key]\n\n def __setitem__(self, key: str, value: Any) -> None:\n self._state[key] = value\n\n def __delitem__(self, key: str) -> None:\n del self._state[key]\n\n def __len__(self) -> int:\n return len(self._state)\n\n def __iter__(self) -> Iterator[str]:\n return iter(self._state)\n\n def __hash__(self) -> int:\n return hash(id(self))\n\n def __eq__(self, other: object) -> bool:\n return self is other\n\n def __bool__(self) -> bool:\n return True\n\n\nclass Response(StreamResponse):\n\n _compressed_body: Optional[bytes] = None\n _send_headers_immediately = False\n\n def __init__(\n self,\n *,\n body: Any = None,\n status: int = 200,\n reason: Optional[str] = None,\n text: Optional[str] = None,\n headers: Optional[LooseHeaders] = None,\n content_type: Optional[str] = None,\n charset: Optional[str] = None,\n zlib_executor_size: Optional[int] = None,\n zlib_executor: Optional[Executor] = None,\n ) -> None:\n if body is not None and text is not None:\n raise ValueError("body and text are not allowed together")\n\n if headers is None:\n real_headers: CIMultiDict[str] = CIMultiDict()\n else:\n real_headers = CIMultiDict(headers)\n\n if content_type is not None and "charset" in content_type:\n raise ValueError("charset must not be in content_type argument")\n\n if text is not None:\n if hdrs.CONTENT_TYPE in real_headers:\n if content_type or charset:\n raise ValueError(\n "passing both Content-Type header and "\n "content_type or charset params "\n "is forbidden"\n )\n else:\n # fast path for filling headers\n if not isinstance(text, str):\n raise TypeError("text argument must be str (%r)" % type(text))\n if content_type is None:\n content_type = "text/plain"\n if charset is None:\n charset = "utf-8"\n real_headers[hdrs.CONTENT_TYPE] = content_type + "; charset=" + charset\n body = text.encode(charset)\n text = None\n elif hdrs.CONTENT_TYPE in real_headers:\n if content_type is not None or charset is not None:\n raise ValueError(\n "passing both Content-Type header and "\n "content_type or charset params "\n "is forbidden"\n )\n elif content_type is not None:\n if charset is not None:\n content_type += "; charset=" + charset\n real_headers[hdrs.CONTENT_TYPE] = content_type\n\n super().__init__(status=status, reason=reason, _real_headers=real_headers)\n\n if text is not None:\n self.text = text\n else:\n self.body = body\n\n self._zlib_executor_size = zlib_executor_size\n self._zlib_executor = zlib_executor\n\n @property\n def body(self) -> Optional[Union[bytes, Payload]]:\n return self._body\n\n @body.setter\n def body(self, body: Any) -> None:\n if body is None:\n self._body = None\n elif isinstance(body, (bytes, bytearray)):\n self._body = body\n else:\n try:\n self._body = body = payload.PAYLOAD_REGISTRY.get(body)\n except payload.LookupError:\n raise ValueError("Unsupported body type %r" % type(body))\n\n headers = self._headers\n\n # set content-type\n if hdrs.CONTENT_TYPE not in headers:\n headers[hdrs.CONTENT_TYPE] = body.content_type\n\n # copy payload headers\n if body.headers:\n for key, value in body.headers.items():\n if key not in headers:\n headers[key] = value\n\n self._compressed_body = None\n\n @property\n def text(self) -> Optional[str]:\n if self._body is None:\n return None\n # Note: When _body is a Payload (e.g. FilePayload), this may do blocking I/O\n # This is generally safe as most common payloads (BytesPayload, StringPayload)\n # don't do blocking I/O, but be careful with file-based payloads\n return self._body.decode(self.charset or "utf-8")\n\n @text.setter\n def text(self, text: str) -> None:\n assert text is None or isinstance(\n text, str\n ), "text argument must be str (%r)" % type(text)\n\n if self.content_type == "application/octet-stream":\n self.content_type = "text/plain"\n if self.charset is None:\n self.charset = "utf-8"\n\n self._body = text.encode(self.charset)\n self._compressed_body = None\n\n @property\n def content_length(self) -> Optional[int]:\n if self._chunked:\n return None\n\n if hdrs.CONTENT_LENGTH in self._headers:\n return int(self._headers[hdrs.CONTENT_LENGTH])\n\n if self._compressed_body is not None:\n # Return length of the compressed body\n return len(self._compressed_body)\n elif isinstance(self._body, Payload):\n # A payload without content length, or a compressed payload\n return None\n elif self._body is not None:\n return len(self._body)\n else:\n return 0\n\n @content_length.setter\n def content_length(self, value: Optional[int]) -> None:\n raise RuntimeError("Content length is set automatically")\n\n async def write_eof(self, data: bytes = b"") -> None:\n if self._eof_sent:\n return\n if self._compressed_body is None:\n body: Optional[Union[bytes, Payload]] = self._body\n else:\n body = self._compressed_body\n assert not data, f"data arg is not supported, got {data!r}"\n assert self._req is not None\n assert self._payload_writer is not None\n if body is None or self._must_be_empty_body:\n await super().write_eof()\n elif isinstance(self._body, Payload):\n await self._body.write(self._payload_writer)\n await self._body.close()\n await super().write_eof()\n else:\n await super().write_eof(cast(bytes, body))\n\n async def _start(self, request: "BaseRequest") -> AbstractStreamWriter:\n if hdrs.CONTENT_LENGTH in self._headers:\n if should_remove_content_length(request.method, self.status):\n del self._headers[hdrs.CONTENT_LENGTH]\n elif not self._chunked:\n if isinstance(self._body, Payload):\n if self._body.size is not None:\n self._headers[hdrs.CONTENT_LENGTH] = str(self._body.size)\n else:\n body_len = len(self._body) if self._body else "0"\n # https://www.rfc-editor.org/rfc/rfc9110.html#section-8.6-7\n if body_len != "0" or (\n self.status != 304 and request.method not in hdrs.METH_HEAD_ALL\n ):\n self._headers[hdrs.CONTENT_LENGTH] = str(body_len)\n\n return await super()._start(request)\n\n async def _do_start_compression(self, coding: ContentCoding) -> None:\n if self._chunked or isinstance(self._body, Payload):\n return await super()._do_start_compression(coding)\n if coding is ContentCoding.identity:\n return\n # Instead of using _payload_writer.enable_compression,\n # compress the whole body\n compressor = ZLibCompressor(\n encoding=coding.value,\n max_sync_chunk_size=self._zlib_executor_size,\n executor=self._zlib_executor,\n )\n assert self._body is not None\n if self._zlib_executor_size is None and len(self._body) > LARGE_BODY_SIZE:\n warnings.warn(\n "Synchronous compression of large response bodies "\n f"({len(self._body)} bytes) might block the async event loop. "\n "Consider providing a custom value to zlib_executor_size/"\n "zlib_executor response properties or disabling compression on it."\n )\n self._compressed_body = (\n await compressor.compress(self._body) + compressor.flush()\n )\n self._headers[hdrs.CONTENT_ENCODING] = coding.value\n self._headers[hdrs.CONTENT_LENGTH] = str(len(self._compressed_body))\n\n\ndef json_response(\n data: Any = sentinel,\n *,\n text: Optional[str] = None,\n body: Optional[bytes] = None,\n status: int = 200,\n reason: Optional[str] = None,\n headers: Optional[LooseHeaders] = None,\n content_type: str = "application/json",\n dumps: JSONEncoder = json.dumps,\n) -> Response:\n if data is not sentinel:\n if text or body:\n raise ValueError("only one of data, text, or body should be specified")\n else:\n text = dumps(data)\n return Response(\n text=text,\n body=body,\n status=status,\n reason=reason,\n headers=headers,\n content_type=content_type,\n )\n
.venv\Lib\site-packages\aiohttp\web_response.py
web_response.py
Python
30,198
0.95
0.205607
0.055707
vue-tools
37
2024-07-02T01:10:15.989706
Apache-2.0
false
38d78f6082ab1c20cf3dc84d0cb467cb
import abc\nimport os # noqa\nfrom typing import (\n TYPE_CHECKING,\n Any,\n Callable,\n Dict,\n Iterator,\n List,\n Optional,\n Sequence,\n Type,\n Union,\n overload,\n)\n\nimport attr\n\nfrom . import hdrs\nfrom .abc import AbstractView\nfrom .typedefs import Handler, PathLike\n\nif TYPE_CHECKING:\n from .web_request import Request\n from .web_response import StreamResponse\n from .web_urldispatcher import AbstractRoute, UrlDispatcher\nelse:\n Request = StreamResponse = UrlDispatcher = AbstractRoute = None\n\n\n__all__ = (\n "AbstractRouteDef",\n "RouteDef",\n "StaticDef",\n "RouteTableDef",\n "head",\n "options",\n "get",\n "post",\n "patch",\n "put",\n "delete",\n "route",\n "view",\n "static",\n)\n\n\nclass AbstractRouteDef(abc.ABC):\n @abc.abstractmethod\n def register(self, router: UrlDispatcher) -> List[AbstractRoute]:\n pass # pragma: no cover\n\n\n_HandlerType = Union[Type[AbstractView], Handler]\n\n\n@attr.s(auto_attribs=True, frozen=True, repr=False, slots=True)\nclass RouteDef(AbstractRouteDef):\n method: str\n path: str\n handler: _HandlerType\n kwargs: Dict[str, Any]\n\n def __repr__(self) -> str:\n info = []\n for name, value in sorted(self.kwargs.items()):\n info.append(f", {name}={value!r}")\n return "<RouteDef {method} {path} -> {handler.__name__!r}{info}>".format(\n method=self.method, path=self.path, handler=self.handler, info="".join(info)\n )\n\n def register(self, router: UrlDispatcher) -> List[AbstractRoute]:\n if self.method in hdrs.METH_ALL:\n reg = getattr(router, "add_" + self.method.lower())\n return [reg(self.path, self.handler, **self.kwargs)]\n else:\n return [\n router.add_route(self.method, self.path, self.handler, **self.kwargs)\n ]\n\n\n@attr.s(auto_attribs=True, frozen=True, repr=False, slots=True)\nclass StaticDef(AbstractRouteDef):\n prefix: str\n path: PathLike\n kwargs: Dict[str, Any]\n\n def __repr__(self) -> str:\n info = []\n for name, value in sorted(self.kwargs.items()):\n info.append(f", {name}={value!r}")\n return "<StaticDef {prefix} -> {path}{info}>".format(\n prefix=self.prefix, path=self.path, info="".join(info)\n )\n\n def register(self, router: UrlDispatcher) -> List[AbstractRoute]:\n resource = router.add_static(self.prefix, self.path, **self.kwargs)\n routes = resource.get_info().get("routes", {})\n return list(routes.values())\n\n\ndef route(method: str, path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:\n return RouteDef(method, path, handler, kwargs)\n\n\ndef head(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:\n return route(hdrs.METH_HEAD, path, handler, **kwargs)\n\n\ndef options(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:\n return route(hdrs.METH_OPTIONS, path, handler, **kwargs)\n\n\ndef get(\n path: str,\n handler: _HandlerType,\n *,\n name: Optional[str] = None,\n allow_head: bool = True,\n **kwargs: Any,\n) -> RouteDef:\n return route(\n hdrs.METH_GET, path, handler, name=name, allow_head=allow_head, **kwargs\n )\n\n\ndef post(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:\n return route(hdrs.METH_POST, path, handler, **kwargs)\n\n\ndef put(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:\n return route(hdrs.METH_PUT, path, handler, **kwargs)\n\n\ndef patch(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:\n return route(hdrs.METH_PATCH, path, handler, **kwargs)\n\n\ndef delete(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:\n return route(hdrs.METH_DELETE, path, handler, **kwargs)\n\n\ndef view(path: str, handler: Type[AbstractView], **kwargs: Any) -> RouteDef:\n return route(hdrs.METH_ANY, path, handler, **kwargs)\n\n\ndef static(prefix: str, path: PathLike, **kwargs: Any) -> StaticDef:\n return StaticDef(prefix, path, kwargs)\n\n\n_Deco = Callable[[_HandlerType], _HandlerType]\n\n\nclass RouteTableDef(Sequence[AbstractRouteDef]):\n """Route definition table"""\n\n def __init__(self) -> None:\n self._items: List[AbstractRouteDef] = []\n\n def __repr__(self) -> str:\n return f"<RouteTableDef count={len(self._items)}>"\n\n @overload\n def __getitem__(self, index: int) -> AbstractRouteDef: ...\n\n @overload\n def __getitem__(self, index: slice) -> List[AbstractRouteDef]: ...\n\n def __getitem__(self, index): # type: ignore[no-untyped-def]\n return self._items[index]\n\n def __iter__(self) -> Iterator[AbstractRouteDef]:\n return iter(self._items)\n\n def __len__(self) -> int:\n return len(self._items)\n\n def __contains__(self, item: object) -> bool:\n return item in self._items\n\n def route(self, method: str, path: str, **kwargs: Any) -> _Deco:\n def inner(handler: _HandlerType) -> _HandlerType:\n self._items.append(RouteDef(method, path, handler, kwargs))\n return handler\n\n return inner\n\n def head(self, path: str, **kwargs: Any) -> _Deco:\n return self.route(hdrs.METH_HEAD, path, **kwargs)\n\n def get(self, path: str, **kwargs: Any) -> _Deco:\n return self.route(hdrs.METH_GET, path, **kwargs)\n\n def post(self, path: str, **kwargs: Any) -> _Deco:\n return self.route(hdrs.METH_POST, path, **kwargs)\n\n def put(self, path: str, **kwargs: Any) -> _Deco:\n return self.route(hdrs.METH_PUT, path, **kwargs)\n\n def patch(self, path: str, **kwargs: Any) -> _Deco:\n return self.route(hdrs.METH_PATCH, path, **kwargs)\n\n def delete(self, path: str, **kwargs: Any) -> _Deco:\n return self.route(hdrs.METH_DELETE, path, **kwargs)\n\n def options(self, path: str, **kwargs: Any) -> _Deco:\n return self.route(hdrs.METH_OPTIONS, path, **kwargs)\n\n def view(self, path: str, **kwargs: Any) -> _Deco:\n return self.route(hdrs.METH_ANY, path, **kwargs)\n\n def static(self, prefix: str, path: PathLike, **kwargs: Any) -> None:\n self._items.append(StaticDef(prefix, path, kwargs))\n
.venv\Lib\site-packages\aiohttp\web_routedef.py
web_routedef.py
Python
6,324
0.95
0.200935
0.012987
vue-tools
764
2024-08-03T18:07:56.688026
Apache-2.0
false
c51a671d07b4120765a2d2941672b382
import asyncio\nimport signal\nimport socket\nimport warnings\nfrom abc import ABC, abstractmethod\nfrom typing import TYPE_CHECKING, Any, List, Optional, Set\n\nfrom yarl import URL\n\nfrom .typedefs import PathLike\nfrom .web_app import Application\nfrom .web_server import Server\n\nif TYPE_CHECKING:\n from ssl import SSLContext\nelse:\n try:\n from ssl import SSLContext\n except ImportError: # pragma: no cover\n SSLContext = object # type: ignore[misc,assignment]\n\n__all__ = (\n "BaseSite",\n "TCPSite",\n "UnixSite",\n "NamedPipeSite",\n "SockSite",\n "BaseRunner",\n "AppRunner",\n "ServerRunner",\n "GracefulExit",\n)\n\n\nclass GracefulExit(SystemExit):\n code = 1\n\n\ndef _raise_graceful_exit() -> None:\n raise GracefulExit()\n\n\nclass BaseSite(ABC):\n __slots__ = ("_runner", "_ssl_context", "_backlog", "_server")\n\n def __init__(\n self,\n runner: "BaseRunner",\n *,\n shutdown_timeout: float = 60.0,\n ssl_context: Optional[SSLContext] = None,\n backlog: int = 128,\n ) -> None:\n if runner.server is None:\n raise RuntimeError("Call runner.setup() before making a site")\n if shutdown_timeout != 60.0:\n msg = "shutdown_timeout should be set on BaseRunner"\n warnings.warn(msg, DeprecationWarning, stacklevel=2)\n runner._shutdown_timeout = shutdown_timeout\n self._runner = runner\n self._ssl_context = ssl_context\n self._backlog = backlog\n self._server: Optional[asyncio.AbstractServer] = None\n\n @property\n @abstractmethod\n def name(self) -> str:\n pass # pragma: no cover\n\n @abstractmethod\n async def start(self) -> None:\n self._runner._reg_site(self)\n\n async def stop(self) -> None:\n self._runner._check_site(self)\n if self._server is not None: # Maybe not started yet\n self._server.close()\n\n self._runner._unreg_site(self)\n\n\nclass TCPSite(BaseSite):\n __slots__ = ("_host", "_port", "_reuse_address", "_reuse_port")\n\n def __init__(\n self,\n runner: "BaseRunner",\n host: Optional[str] = None,\n port: Optional[int] = None,\n *,\n shutdown_timeout: float = 60.0,\n ssl_context: Optional[SSLContext] = None,\n backlog: int = 128,\n reuse_address: Optional[bool] = None,\n reuse_port: Optional[bool] = None,\n ) -> None:\n super().__init__(\n runner,\n shutdown_timeout=shutdown_timeout,\n ssl_context=ssl_context,\n backlog=backlog,\n )\n self._host = host\n if port is None:\n port = 8443 if self._ssl_context else 8080\n self._port = port\n self._reuse_address = reuse_address\n self._reuse_port = reuse_port\n\n @property\n def name(self) -> str:\n scheme = "https" if self._ssl_context else "http"\n host = "0.0.0.0" if not self._host else self._host\n return str(URL.build(scheme=scheme, host=host, port=self._port))\n\n async def start(self) -> None:\n await super().start()\n loop = asyncio.get_event_loop()\n server = self._runner.server\n assert server is not None\n self._server = await loop.create_server(\n server,\n self._host,\n self._port,\n ssl=self._ssl_context,\n backlog=self._backlog,\n reuse_address=self._reuse_address,\n reuse_port=self._reuse_port,\n )\n\n\nclass UnixSite(BaseSite):\n __slots__ = ("_path",)\n\n def __init__(\n self,\n runner: "BaseRunner",\n path: PathLike,\n *,\n shutdown_timeout: float = 60.0,\n ssl_context: Optional[SSLContext] = None,\n backlog: int = 128,\n ) -> None:\n super().__init__(\n runner,\n shutdown_timeout=shutdown_timeout,\n ssl_context=ssl_context,\n backlog=backlog,\n )\n self._path = path\n\n @property\n def name(self) -> str:\n scheme = "https" if self._ssl_context else "http"\n return f"{scheme}://unix:{self._path}:"\n\n async def start(self) -> None:\n await super().start()\n loop = asyncio.get_event_loop()\n server = self._runner.server\n assert server is not None\n self._server = await loop.create_unix_server(\n server,\n self._path,\n ssl=self._ssl_context,\n backlog=self._backlog,\n )\n\n\nclass NamedPipeSite(BaseSite):\n __slots__ = ("_path",)\n\n def __init__(\n self, runner: "BaseRunner", path: str, *, shutdown_timeout: float = 60.0\n ) -> None:\n loop = asyncio.get_event_loop()\n if not isinstance(\n loop, asyncio.ProactorEventLoop # type: ignore[attr-defined]\n ):\n raise RuntimeError(\n "Named Pipes only available in proactor loop under windows"\n )\n super().__init__(runner, shutdown_timeout=shutdown_timeout)\n self._path = path\n\n @property\n def name(self) -> str:\n return self._path\n\n async def start(self) -> None:\n await super().start()\n loop = asyncio.get_event_loop()\n server = self._runner.server\n assert server is not None\n _server = await loop.start_serving_pipe( # type: ignore[attr-defined]\n server, self._path\n )\n self._server = _server[0]\n\n\nclass SockSite(BaseSite):\n __slots__ = ("_sock", "_name")\n\n def __init__(\n self,\n runner: "BaseRunner",\n sock: socket.socket,\n *,\n shutdown_timeout: float = 60.0,\n ssl_context: Optional[SSLContext] = None,\n backlog: int = 128,\n ) -> None:\n super().__init__(\n runner,\n shutdown_timeout=shutdown_timeout,\n ssl_context=ssl_context,\n backlog=backlog,\n )\n self._sock = sock\n scheme = "https" if self._ssl_context else "http"\n if hasattr(socket, "AF_UNIX") and sock.family == socket.AF_UNIX:\n name = f"{scheme}://unix:{sock.getsockname()}:"\n else:\n host, port = sock.getsockname()[:2]\n name = str(URL.build(scheme=scheme, host=host, port=port))\n self._name = name\n\n @property\n def name(self) -> str:\n return self._name\n\n async def start(self) -> None:\n await super().start()\n loop = asyncio.get_event_loop()\n server = self._runner.server\n assert server is not None\n self._server = await loop.create_server(\n server, sock=self._sock, ssl=self._ssl_context, backlog=self._backlog\n )\n\n\nclass BaseRunner(ABC):\n __slots__ = ("_handle_signals", "_kwargs", "_server", "_sites", "_shutdown_timeout")\n\n def __init__(\n self,\n *,\n handle_signals: bool = False,\n shutdown_timeout: float = 60.0,\n **kwargs: Any,\n ) -> None:\n self._handle_signals = handle_signals\n self._kwargs = kwargs\n self._server: Optional[Server] = None\n self._sites: List[BaseSite] = []\n self._shutdown_timeout = shutdown_timeout\n\n @property\n def server(self) -> Optional[Server]:\n return self._server\n\n @property\n def addresses(self) -> List[Any]:\n ret: List[Any] = []\n for site in self._sites:\n server = site._server\n if server is not None:\n sockets = server.sockets # type: ignore[attr-defined]\n if sockets is not None:\n for sock in sockets:\n ret.append(sock.getsockname())\n return ret\n\n @property\n def sites(self) -> Set[BaseSite]:\n return set(self._sites)\n\n async def setup(self) -> None:\n loop = asyncio.get_event_loop()\n\n if self._handle_signals:\n try:\n loop.add_signal_handler(signal.SIGINT, _raise_graceful_exit)\n loop.add_signal_handler(signal.SIGTERM, _raise_graceful_exit)\n except NotImplementedError: # pragma: no cover\n # add_signal_handler is not implemented on Windows\n pass\n\n self._server = await self._make_server()\n\n @abstractmethod\n async def shutdown(self) -> None:\n """Call any shutdown hooks to help server close gracefully."""\n\n async def cleanup(self) -> None:\n # The loop over sites is intentional, an exception on gather()\n # leaves self._sites in unpredictable state.\n # The loop guaranties that a site is either deleted on success or\n # still present on failure\n for site in list(self._sites):\n await site.stop()\n\n if self._server: # If setup succeeded\n # Yield to event loop to ensure incoming requests prior to stopping the sites\n # have all started to be handled before we proceed to close idle connections.\n await asyncio.sleep(0)\n self._server.pre_shutdown()\n await self.shutdown()\n await self._server.shutdown(self._shutdown_timeout)\n await self._cleanup_server()\n\n self._server = None\n if self._handle_signals:\n loop = asyncio.get_running_loop()\n try:\n loop.remove_signal_handler(signal.SIGINT)\n loop.remove_signal_handler(signal.SIGTERM)\n except NotImplementedError: # pragma: no cover\n # remove_signal_handler is not implemented on Windows\n pass\n\n @abstractmethod\n async def _make_server(self) -> Server:\n pass # pragma: no cover\n\n @abstractmethod\n async def _cleanup_server(self) -> None:\n pass # pragma: no cover\n\n def _reg_site(self, site: BaseSite) -> None:\n if site in self._sites:\n raise RuntimeError(f"Site {site} is already registered in runner {self}")\n self._sites.append(site)\n\n def _check_site(self, site: BaseSite) -> None:\n if site not in self._sites:\n raise RuntimeError(f"Site {site} is not registered in runner {self}")\n\n def _unreg_site(self, site: BaseSite) -> None:\n if site not in self._sites:\n raise RuntimeError(f"Site {site} is not registered in runner {self}")\n self._sites.remove(site)\n\n\nclass ServerRunner(BaseRunner):\n """Low-level web server runner"""\n\n __slots__ = ("_web_server",)\n\n def __init__(\n self, web_server: Server, *, handle_signals: bool = False, **kwargs: Any\n ) -> None:\n super().__init__(handle_signals=handle_signals, **kwargs)\n self._web_server = web_server\n\n async def shutdown(self) -> None:\n pass\n\n async def _make_server(self) -> Server:\n return self._web_server\n\n async def _cleanup_server(self) -> None:\n pass\n\n\nclass AppRunner(BaseRunner):\n """Web Application runner"""\n\n __slots__ = ("_app",)\n\n def __init__(\n self, app: Application, *, handle_signals: bool = False, **kwargs: Any\n ) -> None:\n super().__init__(handle_signals=handle_signals, **kwargs)\n if not isinstance(app, Application):\n raise TypeError(\n "The first argument should be web.Application "\n "instance, got {!r}".format(app)\n )\n self._app = app\n\n @property\n def app(self) -> Application:\n return self._app\n\n async def shutdown(self) -> None:\n await self._app.shutdown()\n\n async def _make_server(self) -> Server:\n loop = asyncio.get_event_loop()\n self._app._set_loop(loop)\n self._app.on_startup.freeze()\n await self._app.startup()\n self._app.freeze()\n\n return self._app._make_handler(loop=loop, **self._kwargs)\n\n async def _cleanup_server(self) -> None:\n await self._app.cleanup()\n
.venv\Lib\site-packages\aiohttp\web_runner.py
web_runner.py
Python
12,185
0.95
0.185464
0.042424
react-lib
135
2024-02-17T11:24:18.563344
GPL-3.0
false
36b07f540ca26515fa7efab30e3276c1
import abc\nimport asyncio\nimport base64\nimport functools\nimport hashlib\nimport html\nimport inspect\nimport keyword\nimport os\nimport re\nimport sys\nimport warnings\nfrom functools import wraps\nfrom pathlib import Path\nfrom types import MappingProxyType\nfrom typing import (\n TYPE_CHECKING,\n Any,\n Awaitable,\n Callable,\n Container,\n Dict,\n Final,\n Generator,\n Iterable,\n Iterator,\n List,\n Mapping,\n NoReturn,\n Optional,\n Pattern,\n Set,\n Sized,\n Tuple,\n Type,\n TypedDict,\n Union,\n cast,\n)\n\nfrom yarl import URL, __version__ as yarl_version\n\nfrom . import hdrs\nfrom .abc import AbstractMatchInfo, AbstractRouter, AbstractView\nfrom .helpers import DEBUG\nfrom .http import HttpVersion11\nfrom .typedefs import Handler, PathLike\nfrom .web_exceptions import (\n HTTPException,\n HTTPExpectationFailed,\n HTTPForbidden,\n HTTPMethodNotAllowed,\n HTTPNotFound,\n)\nfrom .web_fileresponse import FileResponse\nfrom .web_request import Request\nfrom .web_response import Response, StreamResponse\nfrom .web_routedef import AbstractRouteDef\n\n__all__ = (\n "UrlDispatcher",\n "UrlMappingMatchInfo",\n "AbstractResource",\n "Resource",\n "PlainResource",\n "DynamicResource",\n "AbstractRoute",\n "ResourceRoute",\n "StaticResource",\n "View",\n)\n\n\nif TYPE_CHECKING:\n from .web_app import Application\n\n BaseDict = Dict[str, str]\nelse:\n BaseDict = dict\n\nCIRCULAR_SYMLINK_ERROR = (\n (OSError,)\n if sys.version_info < (3, 10) and sys.platform.startswith("win32")\n else (RuntimeError,) if sys.version_info < (3, 13) else ()\n)\n\nYARL_VERSION: Final[Tuple[int, ...]] = tuple(map(int, yarl_version.split(".")[:2]))\n\nHTTP_METHOD_RE: Final[Pattern[str]] = re.compile(\n r"^[0-9A-Za-z!#\$%&'\*\+\-\.\^_`\|~]+$"\n)\nROUTE_RE: Final[Pattern[str]] = re.compile(\n r"(\{[_a-zA-Z][^{}]*(?:\{[^{}]*\}[^{}]*)*\})"\n)\nPATH_SEP: Final[str] = re.escape("/")\n\n\n_ExpectHandler = Callable[[Request], Awaitable[Optional[StreamResponse]]]\n_Resolve = Tuple[Optional["UrlMappingMatchInfo"], Set[str]]\n\nhtml_escape = functools.partial(html.escape, quote=True)\n\n\nclass _InfoDict(TypedDict, total=False):\n path: str\n\n formatter: str\n pattern: Pattern[str]\n\n directory: Path\n prefix: str\n routes: Mapping[str, "AbstractRoute"]\n\n app: "Application"\n\n domain: str\n\n rule: "AbstractRuleMatching"\n\n http_exception: HTTPException\n\n\nclass AbstractResource(Sized, Iterable["AbstractRoute"]):\n def __init__(self, *, name: Optional[str] = None) -> None:\n self._name = name\n\n @property\n def name(self) -> Optional[str]:\n return self._name\n\n @property\n @abc.abstractmethod\n def canonical(self) -> str:\n """Exposes the resource's canonical path.\n\n For example '/foo/bar/{name}'\n\n """\n\n @abc.abstractmethod # pragma: no branch\n def url_for(self, **kwargs: str) -> URL:\n """Construct url for resource with additional params."""\n\n @abc.abstractmethod # pragma: no branch\n async def resolve(self, request: Request) -> _Resolve:\n """Resolve resource.\n\n Return (UrlMappingMatchInfo, allowed_methods) pair.\n """\n\n @abc.abstractmethod\n def add_prefix(self, prefix: str) -> None:\n """Add a prefix to processed URLs.\n\n Required for subapplications support.\n """\n\n @abc.abstractmethod\n def get_info(self) -> _InfoDict:\n """Return a dict with additional info useful for introspection"""\n\n def freeze(self) -> None:\n pass\n\n @abc.abstractmethod\n def raw_match(self, path: str) -> bool:\n """Perform a raw match against path"""\n\n\nclass AbstractRoute(abc.ABC):\n def __init__(\n self,\n method: str,\n handler: Union[Handler, Type[AbstractView]],\n *,\n expect_handler: Optional[_ExpectHandler] = None,\n resource: Optional[AbstractResource] = None,\n ) -> None:\n\n if expect_handler is None:\n expect_handler = _default_expect_handler\n\n assert inspect.iscoroutinefunction(expect_handler) or (\n sys.version_info < (3, 14) and asyncio.iscoroutinefunction(expect_handler)\n ), f"Coroutine is expected, got {expect_handler!r}"\n\n method = method.upper()\n if not HTTP_METHOD_RE.match(method):\n raise ValueError(f"{method} is not allowed HTTP method")\n\n assert callable(handler), handler\n if inspect.iscoroutinefunction(handler) or (\n sys.version_info < (3, 14) and asyncio.iscoroutinefunction(handler)\n ):\n pass\n elif inspect.isgeneratorfunction(handler):\n warnings.warn(\n "Bare generators are deprecated, use @coroutine wrapper",\n DeprecationWarning,\n )\n elif isinstance(handler, type) and issubclass(handler, AbstractView):\n pass\n else:\n warnings.warn(\n "Bare functions are deprecated, use async ones", DeprecationWarning\n )\n\n @wraps(handler)\n async def handler_wrapper(request: Request) -> StreamResponse:\n result = old_handler(request) # type: ignore[call-arg]\n if asyncio.iscoroutine(result):\n result = await result\n assert isinstance(result, StreamResponse)\n return result\n\n old_handler = handler\n handler = handler_wrapper\n\n self._method = method\n self._handler = handler\n self._expect_handler = expect_handler\n self._resource = resource\n\n @property\n def method(self) -> str:\n return self._method\n\n @property\n def handler(self) -> Handler:\n return self._handler\n\n @property\n @abc.abstractmethod\n def name(self) -> Optional[str]:\n """Optional route's name, always equals to resource's name."""\n\n @property\n def resource(self) -> Optional[AbstractResource]:\n return self._resource\n\n @abc.abstractmethod\n def get_info(self) -> _InfoDict:\n """Return a dict with additional info useful for introspection"""\n\n @abc.abstractmethod # pragma: no branch\n def url_for(self, *args: str, **kwargs: str) -> URL:\n """Construct url for route with additional params."""\n\n async def handle_expect_header(self, request: Request) -> Optional[StreamResponse]:\n return await self._expect_handler(request)\n\n\nclass UrlMappingMatchInfo(BaseDict, AbstractMatchInfo):\n\n __slots__ = ("_route", "_apps", "_current_app", "_frozen")\n\n def __init__(self, match_dict: Dict[str, str], route: AbstractRoute) -> None:\n super().__init__(match_dict)\n self._route = route\n self._apps: List[Application] = []\n self._current_app: Optional[Application] = None\n self._frozen = False\n\n @property\n def handler(self) -> Handler:\n return self._route.handler\n\n @property\n def route(self) -> AbstractRoute:\n return self._route\n\n @property\n def expect_handler(self) -> _ExpectHandler:\n return self._route.handle_expect_header\n\n @property\n def http_exception(self) -> Optional[HTTPException]:\n return None\n\n def get_info(self) -> _InfoDict: # type: ignore[override]\n return self._route.get_info()\n\n @property\n def apps(self) -> Tuple["Application", ...]:\n return tuple(self._apps)\n\n def add_app(self, app: "Application") -> None:\n if self._frozen:\n raise RuntimeError("Cannot change apps stack after .freeze() call")\n if self._current_app is None:\n self._current_app = app\n self._apps.insert(0, app)\n\n @property\n def current_app(self) -> "Application":\n app = self._current_app\n assert app is not None\n return app\n\n @current_app.setter\n def current_app(self, app: "Application") -> None:\n if DEBUG: # pragma: no cover\n if app not in self._apps:\n raise RuntimeError(\n "Expected one of the following apps {!r}, got {!r}".format(\n self._apps, app\n )\n )\n self._current_app = app\n\n def freeze(self) -> None:\n self._frozen = True\n\n def __repr__(self) -> str:\n return f"<MatchInfo {super().__repr__()}: {self._route}>"\n\n\nclass MatchInfoError(UrlMappingMatchInfo):\n\n __slots__ = ("_exception",)\n\n def __init__(self, http_exception: HTTPException) -> None:\n self._exception = http_exception\n super().__init__({}, SystemRoute(self._exception))\n\n @property\n def http_exception(self) -> HTTPException:\n return self._exception\n\n def __repr__(self) -> str:\n return "<MatchInfoError {}: {}>".format(\n self._exception.status, self._exception.reason\n )\n\n\nasync def _default_expect_handler(request: Request) -> None:\n """Default handler for Expect header.\n\n Just send "100 Continue" to client.\n raise HTTPExpectationFailed if value of header is not "100-continue"\n """\n expect = request.headers.get(hdrs.EXPECT, "")\n if request.version == HttpVersion11:\n if expect.lower() == "100-continue":\n await request.writer.write(b"HTTP/1.1 100 Continue\r\n\r\n")\n # Reset output_size as we haven't started the main body yet.\n request.writer.output_size = 0\n else:\n raise HTTPExpectationFailed(text="Unknown Expect: %s" % expect)\n\n\nclass Resource(AbstractResource):\n def __init__(self, *, name: Optional[str] = None) -> None:\n super().__init__(name=name)\n self._routes: Dict[str, ResourceRoute] = {}\n self._any_route: Optional[ResourceRoute] = None\n self._allowed_methods: Set[str] = set()\n\n def add_route(\n self,\n method: str,\n handler: Union[Type[AbstractView], Handler],\n *,\n expect_handler: Optional[_ExpectHandler] = None,\n ) -> "ResourceRoute":\n if route := self._routes.get(method, self._any_route):\n raise RuntimeError(\n "Added route will never be executed, "\n f"method {route.method} is already "\n "registered"\n )\n\n route_obj = ResourceRoute(method, handler, self, expect_handler=expect_handler)\n self.register_route(route_obj)\n return route_obj\n\n def register_route(self, route: "ResourceRoute") -> None:\n assert isinstance(\n route, ResourceRoute\n ), f"Instance of Route class is required, got {route!r}"\n if route.method == hdrs.METH_ANY:\n self._any_route = route\n self._allowed_methods.add(route.method)\n self._routes[route.method] = route\n\n async def resolve(self, request: Request) -> _Resolve:\n if (match_dict := self._match(request.rel_url.path_safe)) is None:\n return None, set()\n if route := self._routes.get(request.method, self._any_route):\n return UrlMappingMatchInfo(match_dict, route), self._allowed_methods\n return None, self._allowed_methods\n\n @abc.abstractmethod\n def _match(self, path: str) -> Optional[Dict[str, str]]:\n pass # pragma: no cover\n\n def __len__(self) -> int:\n return len(self._routes)\n\n def __iter__(self) -> Iterator["ResourceRoute"]:\n return iter(self._routes.values())\n\n # TODO: implement all abstract methods\n\n\nclass PlainResource(Resource):\n def __init__(self, path: str, *, name: Optional[str] = None) -> None:\n super().__init__(name=name)\n assert not path or path.startswith("/")\n self._path = path\n\n @property\n def canonical(self) -> str:\n return self._path\n\n def freeze(self) -> None:\n if not self._path:\n self._path = "/"\n\n def add_prefix(self, prefix: str) -> None:\n assert prefix.startswith("/")\n assert not prefix.endswith("/")\n assert len(prefix) > 1\n self._path = prefix + self._path\n\n def _match(self, path: str) -> Optional[Dict[str, str]]:\n # string comparison is about 10 times faster than regexp matching\n if self._path == path:\n return {}\n return None\n\n def raw_match(self, path: str) -> bool:\n return self._path == path\n\n def get_info(self) -> _InfoDict:\n return {"path": self._path}\n\n def url_for(self) -> URL: # type: ignore[override]\n return URL.build(path=self._path, encoded=True)\n\n def __repr__(self) -> str:\n name = "'" + self.name + "' " if self.name is not None else ""\n return f"<PlainResource {name} {self._path}>"\n\n\nclass DynamicResource(Resource):\n\n DYN = re.compile(r"\{(?P<var>[_a-zA-Z][_a-zA-Z0-9]*)\}")\n DYN_WITH_RE = re.compile(r"\{(?P<var>[_a-zA-Z][_a-zA-Z0-9]*):(?P<re>.+)\}")\n GOOD = r"[^{}/]+"\n\n def __init__(self, path: str, *, name: Optional[str] = None) -> None:\n super().__init__(name=name)\n self._orig_path = path\n pattern = ""\n formatter = ""\n for part in ROUTE_RE.split(path):\n match = self.DYN.fullmatch(part)\n if match:\n pattern += "(?P<{}>{})".format(match.group("var"), self.GOOD)\n formatter += "{" + match.group("var") + "}"\n continue\n\n match = self.DYN_WITH_RE.fullmatch(part)\n if match:\n pattern += "(?P<{var}>{re})".format(**match.groupdict())\n formatter += "{" + match.group("var") + "}"\n continue\n\n if "{" in part or "}" in part:\n raise ValueError(f"Invalid path '{path}'['{part}']")\n\n part = _requote_path(part)\n formatter += part\n pattern += re.escape(part)\n\n try:\n compiled = re.compile(pattern)\n except re.error as exc:\n raise ValueError(f"Bad pattern '{pattern}': {exc}") from None\n assert compiled.pattern.startswith(PATH_SEP)\n assert formatter.startswith("/")\n self._pattern = compiled\n self._formatter = formatter\n\n @property\n def canonical(self) -> str:\n return self._formatter\n\n def add_prefix(self, prefix: str) -> None:\n assert prefix.startswith("/")\n assert not prefix.endswith("/")\n assert len(prefix) > 1\n self._pattern = re.compile(re.escape(prefix) + self._pattern.pattern)\n self._formatter = prefix + self._formatter\n\n def _match(self, path: str) -> Optional[Dict[str, str]]:\n match = self._pattern.fullmatch(path)\n if match is None:\n return None\n return {\n key: _unquote_path_safe(value) for key, value in match.groupdict().items()\n }\n\n def raw_match(self, path: str) -> bool:\n return self._orig_path == path\n\n def get_info(self) -> _InfoDict:\n return {"formatter": self._formatter, "pattern": self._pattern}\n\n def url_for(self, **parts: str) -> URL:\n url = self._formatter.format_map({k: _quote_path(v) for k, v in parts.items()})\n return URL.build(path=url, encoded=True)\n\n def __repr__(self) -> str:\n name = "'" + self.name + "' " if self.name is not None else ""\n return "<DynamicResource {name} {formatter}>".format(\n name=name, formatter=self._formatter\n )\n\n\nclass PrefixResource(AbstractResource):\n def __init__(self, prefix: str, *, name: Optional[str] = None) -> None:\n assert not prefix or prefix.startswith("/"), prefix\n assert prefix in ("", "/") or not prefix.endswith("/"), prefix\n super().__init__(name=name)\n self._prefix = _requote_path(prefix)\n self._prefix2 = self._prefix + "/"\n\n @property\n def canonical(self) -> str:\n return self._prefix\n\n def add_prefix(self, prefix: str) -> None:\n assert prefix.startswith("/")\n assert not prefix.endswith("/")\n assert len(prefix) > 1\n self._prefix = prefix + self._prefix\n self._prefix2 = self._prefix + "/"\n\n def raw_match(self, prefix: str) -> bool:\n return False\n\n # TODO: impl missing abstract methods\n\n\nclass StaticResource(PrefixResource):\n VERSION_KEY = "v"\n\n def __init__(\n self,\n prefix: str,\n directory: PathLike,\n *,\n name: Optional[str] = None,\n expect_handler: Optional[_ExpectHandler] = None,\n chunk_size: int = 256 * 1024,\n show_index: bool = False,\n follow_symlinks: bool = False,\n append_version: bool = False,\n ) -> None:\n super().__init__(prefix, name=name)\n try:\n directory = Path(directory).expanduser().resolve(strict=True)\n except FileNotFoundError as error:\n raise ValueError(f"'{directory}' does not exist") from error\n if not directory.is_dir():\n raise ValueError(f"'{directory}' is not a directory")\n self._directory = directory\n self._show_index = show_index\n self._chunk_size = chunk_size\n self._follow_symlinks = follow_symlinks\n self._expect_handler = expect_handler\n self._append_version = append_version\n\n self._routes = {\n "GET": ResourceRoute(\n "GET", self._handle, self, expect_handler=expect_handler\n ),\n "HEAD": ResourceRoute(\n "HEAD", self._handle, self, expect_handler=expect_handler\n ),\n }\n self._allowed_methods = set(self._routes)\n\n def url_for( # type: ignore[override]\n self,\n *,\n filename: PathLike,\n append_version: Optional[bool] = None,\n ) -> URL:\n if append_version is None:\n append_version = self._append_version\n filename = str(filename).lstrip("/")\n\n url = URL.build(path=self._prefix, encoded=True)\n # filename is not encoded\n if YARL_VERSION < (1, 6):\n url = url / filename.replace("%", "%25")\n else:\n url = url / filename\n\n if append_version:\n unresolved_path = self._directory.joinpath(filename)\n try:\n if self._follow_symlinks:\n normalized_path = Path(os.path.normpath(unresolved_path))\n normalized_path.relative_to(self._directory)\n filepath = normalized_path.resolve()\n else:\n filepath = unresolved_path.resolve()\n filepath.relative_to(self._directory)\n except (ValueError, FileNotFoundError):\n # ValueError for case when path point to symlink\n # with follow_symlinks is False\n return url # relatively safe\n if filepath.is_file():\n # TODO cache file content\n # with file watcher for cache invalidation\n with filepath.open("rb") as f:\n file_bytes = f.read()\n h = self._get_file_hash(file_bytes)\n url = url.with_query({self.VERSION_KEY: h})\n return url\n return url\n\n @staticmethod\n def _get_file_hash(byte_array: bytes) -> str:\n m = hashlib.sha256() # todo sha256 can be configurable param\n m.update(byte_array)\n b64 = base64.urlsafe_b64encode(m.digest())\n return b64.decode("ascii")\n\n def get_info(self) -> _InfoDict:\n return {\n "directory": self._directory,\n "prefix": self._prefix,\n "routes": self._routes,\n }\n\n def set_options_route(self, handler: Handler) -> None:\n if "OPTIONS" in self._routes:\n raise RuntimeError("OPTIONS route was set already")\n self._routes["OPTIONS"] = ResourceRoute(\n "OPTIONS", handler, self, expect_handler=self._expect_handler\n )\n self._allowed_methods.add("OPTIONS")\n\n async def resolve(self, request: Request) -> _Resolve:\n path = request.rel_url.path_safe\n method = request.method\n if not path.startswith(self._prefix2) and path != self._prefix:\n return None, set()\n\n allowed_methods = self._allowed_methods\n if method not in allowed_methods:\n return None, allowed_methods\n\n match_dict = {"filename": _unquote_path_safe(path[len(self._prefix) + 1 :])}\n return (UrlMappingMatchInfo(match_dict, self._routes[method]), allowed_methods)\n\n def __len__(self) -> int:\n return len(self._routes)\n\n def __iter__(self) -> Iterator[AbstractRoute]:\n return iter(self._routes.values())\n\n async def _handle(self, request: Request) -> StreamResponse:\n rel_url = request.match_info["filename"]\n filename = Path(rel_url)\n if filename.anchor:\n # rel_url is an absolute name like\n # /static/\\machine_name\c$ or /static/D:\path\n # where the static dir is totally different\n raise HTTPForbidden()\n\n unresolved_path = self._directory.joinpath(filename)\n loop = asyncio.get_running_loop()\n return await loop.run_in_executor(\n None, self._resolve_path_to_response, unresolved_path\n )\n\n def _resolve_path_to_response(self, unresolved_path: Path) -> StreamResponse:\n """Take the unresolved path and query the file system to form a response."""\n # Check for access outside the root directory. For follow symlinks, URI\n # cannot traverse out, but symlinks can. Otherwise, no access outside\n # root is permitted.\n try:\n if self._follow_symlinks:\n normalized_path = Path(os.path.normpath(unresolved_path))\n normalized_path.relative_to(self._directory)\n file_path = normalized_path.resolve()\n else:\n file_path = unresolved_path.resolve()\n file_path.relative_to(self._directory)\n except (ValueError, *CIRCULAR_SYMLINK_ERROR) as error:\n # ValueError is raised for the relative check. Circular symlinks\n # raise here on resolving for python < 3.13.\n raise HTTPNotFound() from error\n\n # if path is a directory, return the contents if permitted. Note the\n # directory check will raise if a segment is not readable.\n try:\n if file_path.is_dir():\n if self._show_index:\n return Response(\n text=self._directory_as_html(file_path),\n content_type="text/html",\n )\n else:\n raise HTTPForbidden()\n except PermissionError as error:\n raise HTTPForbidden() from error\n\n # Return the file response, which handles all other checks.\n return FileResponse(file_path, chunk_size=self._chunk_size)\n\n def _directory_as_html(self, dir_path: Path) -> str:\n """returns directory's index as html."""\n assert dir_path.is_dir()\n\n relative_path_to_dir = dir_path.relative_to(self._directory).as_posix()\n index_of = f"Index of /{html_escape(relative_path_to_dir)}"\n h1 = f"<h1>{index_of}</h1>"\n\n index_list = []\n dir_index = dir_path.iterdir()\n for _file in sorted(dir_index):\n # show file url as relative to static path\n rel_path = _file.relative_to(self._directory).as_posix()\n quoted_file_url = _quote_path(f"{self._prefix}/{rel_path}")\n\n # if file is a directory, add '/' to the end of the name\n if _file.is_dir():\n file_name = f"{_file.name}/"\n else:\n file_name = _file.name\n\n index_list.append(\n f'<li><a href="{quoted_file_url}">{html_escape(file_name)}</a></li>'\n )\n ul = "<ul>\n{}\n</ul>".format("\n".join(index_list))\n body = f"<body>\n{h1}\n{ul}\n</body>"\n\n head_str = f"<head>\n<title>{index_of}</title>\n</head>"\n html = f"<html>\n{head_str}\n{body}\n</html>"\n\n return html\n\n def __repr__(self) -> str:\n name = "'" + self.name + "'" if self.name is not None else ""\n return "<StaticResource {name} {path} -> {directory!r}>".format(\n name=name, path=self._prefix, directory=self._directory\n )\n\n\nclass PrefixedSubAppResource(PrefixResource):\n def __init__(self, prefix: str, app: "Application") -> None:\n super().__init__(prefix)\n self._app = app\n self._add_prefix_to_resources(prefix)\n\n def add_prefix(self, prefix: str) -> None:\n super().add_prefix(prefix)\n self._add_prefix_to_resources(prefix)\n\n def _add_prefix_to_resources(self, prefix: str) -> None:\n router = self._app.router\n for resource in router.resources():\n # Since the canonical path of a resource is about\n # to change, we need to unindex it and then reindex\n router.unindex_resource(resource)\n resource.add_prefix(prefix)\n router.index_resource(resource)\n\n def url_for(self, *args: str, **kwargs: str) -> URL:\n raise RuntimeError(".url_for() is not supported by sub-application root")\n\n def get_info(self) -> _InfoDict:\n return {"app": self._app, "prefix": self._prefix}\n\n async def resolve(self, request: Request) -> _Resolve:\n match_info = await self._app.router.resolve(request)\n match_info.add_app(self._app)\n if isinstance(match_info.http_exception, HTTPMethodNotAllowed):\n methods = match_info.http_exception.allowed_methods\n else:\n methods = set()\n return match_info, methods\n\n def __len__(self) -> int:\n return len(self._app.router.routes())\n\n def __iter__(self) -> Iterator[AbstractRoute]:\n return iter(self._app.router.routes())\n\n def __repr__(self) -> str:\n return "<PrefixedSubAppResource {prefix} -> {app!r}>".format(\n prefix=self._prefix, app=self._app\n )\n\n\nclass AbstractRuleMatching(abc.ABC):\n @abc.abstractmethod # pragma: no branch\n async def match(self, request: Request) -> bool:\n """Return bool if the request satisfies the criteria"""\n\n @abc.abstractmethod # pragma: no branch\n def get_info(self) -> _InfoDict:\n """Return a dict with additional info useful for introspection"""\n\n @property\n @abc.abstractmethod # pragma: no branch\n def canonical(self) -> str:\n """Return a str"""\n\n\nclass Domain(AbstractRuleMatching):\n re_part = re.compile(r"(?!-)[a-z\d-]{1,63}(?<!-)")\n\n def __init__(self, domain: str) -> None:\n super().__init__()\n self._domain = self.validation(domain)\n\n @property\n def canonical(self) -> str:\n return self._domain\n\n def validation(self, domain: str) -> str:\n if not isinstance(domain, str):\n raise TypeError("Domain must be str")\n domain = domain.rstrip(".").lower()\n if not domain:\n raise ValueError("Domain cannot be empty")\n elif "://" in domain:\n raise ValueError("Scheme not supported")\n url = URL("http://" + domain)\n assert url.raw_host is not None\n if not all(self.re_part.fullmatch(x) for x in url.raw_host.split(".")):\n raise ValueError("Domain not valid")\n if url.port == 80:\n return url.raw_host\n return f"{url.raw_host}:{url.port}"\n\n async def match(self, request: Request) -> bool:\n host = request.headers.get(hdrs.HOST)\n if not host:\n return False\n return self.match_domain(host)\n\n def match_domain(self, host: str) -> bool:\n return host.lower() == self._domain\n\n def get_info(self) -> _InfoDict:\n return {"domain": self._domain}\n\n\nclass MaskDomain(Domain):\n re_part = re.compile(r"(?!-)[a-z\d\*-]{1,63}(?<!-)")\n\n def __init__(self, domain: str) -> None:\n super().__init__(domain)\n mask = self._domain.replace(".", r"\.").replace("*", ".*")\n self._mask = re.compile(mask)\n\n @property\n def canonical(self) -> str:\n return self._mask.pattern\n\n def match_domain(self, host: str) -> bool:\n return self._mask.fullmatch(host) is not None\n\n\nclass MatchedSubAppResource(PrefixedSubAppResource):\n def __init__(self, rule: AbstractRuleMatching, app: "Application") -> None:\n AbstractResource.__init__(self)\n self._prefix = ""\n self._app = app\n self._rule = rule\n\n @property\n def canonical(self) -> str:\n return self._rule.canonical\n\n def get_info(self) -> _InfoDict:\n return {"app": self._app, "rule": self._rule}\n\n async def resolve(self, request: Request) -> _Resolve:\n if not await self._rule.match(request):\n return None, set()\n match_info = await self._app.router.resolve(request)\n match_info.add_app(self._app)\n if isinstance(match_info.http_exception, HTTPMethodNotAllowed):\n methods = match_info.http_exception.allowed_methods\n else:\n methods = set()\n return match_info, methods\n\n def __repr__(self) -> str:\n return f"<MatchedSubAppResource -> {self._app!r}>"\n\n\nclass ResourceRoute(AbstractRoute):\n """A route with resource"""\n\n def __init__(\n self,\n method: str,\n handler: Union[Handler, Type[AbstractView]],\n resource: AbstractResource,\n *,\n expect_handler: Optional[_ExpectHandler] = None,\n ) -> None:\n super().__init__(\n method, handler, expect_handler=expect_handler, resource=resource\n )\n\n def __repr__(self) -> str:\n return "<ResourceRoute [{method}] {resource} -> {handler!r}".format(\n method=self.method, resource=self._resource, handler=self.handler\n )\n\n @property\n def name(self) -> Optional[str]:\n if self._resource is None:\n return None\n return self._resource.name\n\n def url_for(self, *args: str, **kwargs: str) -> URL:\n """Construct url for route with additional params."""\n assert self._resource is not None\n return self._resource.url_for(*args, **kwargs)\n\n def get_info(self) -> _InfoDict:\n assert self._resource is not None\n return self._resource.get_info()\n\n\nclass SystemRoute(AbstractRoute):\n def __init__(self, http_exception: HTTPException) -> None:\n super().__init__(hdrs.METH_ANY, self._handle)\n self._http_exception = http_exception\n\n def url_for(self, *args: str, **kwargs: str) -> URL:\n raise RuntimeError(".url_for() is not allowed for SystemRoute")\n\n @property\n def name(self) -> Optional[str]:\n return None\n\n def get_info(self) -> _InfoDict:\n return {"http_exception": self._http_exception}\n\n async def _handle(self, request: Request) -> StreamResponse:\n raise self._http_exception\n\n @property\n def status(self) -> int:\n return self._http_exception.status\n\n @property\n def reason(self) -> str:\n return self._http_exception.reason\n\n def __repr__(self) -> str:\n return "<SystemRoute {self.status}: {self.reason}>".format(self=self)\n\n\nclass View(AbstractView):\n async def _iter(self) -> StreamResponse:\n if self.request.method not in hdrs.METH_ALL:\n self._raise_allowed_methods()\n method: Optional[Callable[[], Awaitable[StreamResponse]]]\n method = getattr(self, self.request.method.lower(), None)\n if method is None:\n self._raise_allowed_methods()\n ret = await method()\n assert isinstance(ret, StreamResponse)\n return ret\n\n def __await__(self) -> Generator[Any, None, StreamResponse]:\n return self._iter().__await__()\n\n def _raise_allowed_methods(self) -> NoReturn:\n allowed_methods = {m for m in hdrs.METH_ALL if hasattr(self, m.lower())}\n raise HTTPMethodNotAllowed(self.request.method, allowed_methods)\n\n\nclass ResourcesView(Sized, Iterable[AbstractResource], Container[AbstractResource]):\n def __init__(self, resources: List[AbstractResource]) -> None:\n self._resources = resources\n\n def __len__(self) -> int:\n return len(self._resources)\n\n def __iter__(self) -> Iterator[AbstractResource]:\n yield from self._resources\n\n def __contains__(self, resource: object) -> bool:\n return resource in self._resources\n\n\nclass RoutesView(Sized, Iterable[AbstractRoute], Container[AbstractRoute]):\n def __init__(self, resources: List[AbstractResource]):\n self._routes: List[AbstractRoute] = []\n for resource in resources:\n for route in resource:\n self._routes.append(route)\n\n def __len__(self) -> int:\n return len(self._routes)\n\n def __iter__(self) -> Iterator[AbstractRoute]:\n yield from self._routes\n\n def __contains__(self, route: object) -> bool:\n return route in self._routes\n\n\nclass UrlDispatcher(AbstractRouter, Mapping[str, AbstractResource]):\n\n NAME_SPLIT_RE = re.compile(r"[.:-]")\n\n def __init__(self) -> None:\n super().__init__()\n self._resources: List[AbstractResource] = []\n self._named_resources: Dict[str, AbstractResource] = {}\n self._resource_index: dict[str, list[AbstractResource]] = {}\n self._matched_sub_app_resources: List[MatchedSubAppResource] = []\n\n async def resolve(self, request: Request) -> UrlMappingMatchInfo:\n resource_index = self._resource_index\n allowed_methods: Set[str] = set()\n\n # Walk the url parts looking for candidates. We walk the url backwards\n # to ensure the most explicit match is found first. If there are multiple\n # candidates for a given url part because there are multiple resources\n # registered for the same canonical path, we resolve them in a linear\n # fashion to ensure registration order is respected.\n url_part = request.rel_url.path_safe\n while url_part:\n for candidate in resource_index.get(url_part, ()):\n match_dict, allowed = await candidate.resolve(request)\n if match_dict is not None:\n return match_dict\n else:\n allowed_methods |= allowed\n if url_part == "/":\n break\n url_part = url_part.rpartition("/")[0] or "/"\n\n #\n # We didn't find any candidates, so we'll try the matched sub-app\n # resources which we have to walk in a linear fashion because they\n # have regex/wildcard match rules and we cannot index them.\n #\n # For most cases we do not expect there to be many of these since\n # currently they are only added by `add_domain`\n #\n for resource in self._matched_sub_app_resources:\n match_dict, allowed = await resource.resolve(request)\n if match_dict is not None:\n return match_dict\n else:\n allowed_methods |= allowed\n\n if allowed_methods:\n return MatchInfoError(HTTPMethodNotAllowed(request.method, allowed_methods))\n\n return MatchInfoError(HTTPNotFound())\n\n def __iter__(self) -> Iterator[str]:\n return iter(self._named_resources)\n\n def __len__(self) -> int:\n return len(self._named_resources)\n\n def __contains__(self, resource: object) -> bool:\n return resource in self._named_resources\n\n def __getitem__(self, name: str) -> AbstractResource:\n return self._named_resources[name]\n\n def resources(self) -> ResourcesView:\n return ResourcesView(self._resources)\n\n def routes(self) -> RoutesView:\n return RoutesView(self._resources)\n\n def named_resources(self) -> Mapping[str, AbstractResource]:\n return MappingProxyType(self._named_resources)\n\n def register_resource(self, resource: AbstractResource) -> None:\n assert isinstance(\n resource, AbstractResource\n ), f"Instance of AbstractResource class is required, got {resource!r}"\n if self.frozen:\n raise RuntimeError("Cannot register a resource into frozen router.")\n\n name = resource.name\n\n if name is not None:\n parts = self.NAME_SPLIT_RE.split(name)\n for part in parts:\n if keyword.iskeyword(part):\n raise ValueError(\n f"Incorrect route name {name!r}, "\n "python keywords cannot be used "\n "for route name"\n )\n if not part.isidentifier():\n raise ValueError(\n "Incorrect route name {!r}, "\n "the name should be a sequence of "\n "python identifiers separated "\n "by dash, dot or column".format(name)\n )\n if name in self._named_resources:\n raise ValueError(\n "Duplicate {!r}, "\n "already handled by {!r}".format(name, self._named_resources[name])\n )\n self._named_resources[name] = resource\n self._resources.append(resource)\n\n if isinstance(resource, MatchedSubAppResource):\n # We cannot index match sub-app resources because they have match rules\n self._matched_sub_app_resources.append(resource)\n else:\n self.index_resource(resource)\n\n def _get_resource_index_key(self, resource: AbstractResource) -> str:\n """Return a key to index the resource in the resource index."""\n if "{" in (index_key := resource.canonical):\n # strip at the first { to allow for variables, and than\n # rpartition at / to allow for variable parts in the path\n # For example if the canonical path is `/core/locations{tail:.*}`\n # the index key will be `/core` since index is based on the\n # url parts split by `/`\n index_key = index_key.partition("{")[0].rpartition("/")[0]\n return index_key.rstrip("/") or "/"\n\n def index_resource(self, resource: AbstractResource) -> None:\n """Add a resource to the resource index."""\n resource_key = self._get_resource_index_key(resource)\n # There may be multiple resources for a canonical path\n # so we keep them in a list to ensure that registration\n # order is respected.\n self._resource_index.setdefault(resource_key, []).append(resource)\n\n def unindex_resource(self, resource: AbstractResource) -> None:\n """Remove a resource from the resource index."""\n resource_key = self._get_resource_index_key(resource)\n self._resource_index[resource_key].remove(resource)\n\n def add_resource(self, path: str, *, name: Optional[str] = None) -> Resource:\n if path and not path.startswith("/"):\n raise ValueError("path should be started with / or be empty")\n # Reuse last added resource if path and name are the same\n if self._resources:\n resource = self._resources[-1]\n if resource.name == name and resource.raw_match(path):\n return cast(Resource, resource)\n if not ("{" in path or "}" in path or ROUTE_RE.search(path)):\n resource = PlainResource(path, name=name)\n self.register_resource(resource)\n return resource\n resource = DynamicResource(path, name=name)\n self.register_resource(resource)\n return resource\n\n def add_route(\n self,\n method: str,\n path: str,\n handler: Union[Handler, Type[AbstractView]],\n *,\n name: Optional[str] = None,\n expect_handler: Optional[_ExpectHandler] = None,\n ) -> AbstractRoute:\n resource = self.add_resource(path, name=name)\n return resource.add_route(method, handler, expect_handler=expect_handler)\n\n def add_static(\n self,\n prefix: str,\n path: PathLike,\n *,\n name: Optional[str] = None,\n expect_handler: Optional[_ExpectHandler] = None,\n chunk_size: int = 256 * 1024,\n show_index: bool = False,\n follow_symlinks: bool = False,\n append_version: bool = False,\n ) -> AbstractResource:\n """Add static files view.\n\n prefix - url prefix\n path - folder with files\n\n """\n assert prefix.startswith("/")\n if prefix.endswith("/"):\n prefix = prefix[:-1]\n resource = StaticResource(\n prefix,\n path,\n name=name,\n expect_handler=expect_handler,\n chunk_size=chunk_size,\n show_index=show_index,\n follow_symlinks=follow_symlinks,\n append_version=append_version,\n )\n self.register_resource(resource)\n return resource\n\n def add_head(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:\n """Shortcut for add_route with method HEAD."""\n return self.add_route(hdrs.METH_HEAD, path, handler, **kwargs)\n\n def add_options(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:\n """Shortcut for add_route with method OPTIONS."""\n return self.add_route(hdrs.METH_OPTIONS, path, handler, **kwargs)\n\n def add_get(\n self,\n path: str,\n handler: Handler,\n *,\n name: Optional[str] = None,\n allow_head: bool = True,\n **kwargs: Any,\n ) -> AbstractRoute:\n """Shortcut for add_route with method GET.\n\n If allow_head is true, another\n route is added allowing head requests to the same endpoint.\n """\n resource = self.add_resource(path, name=name)\n if allow_head:\n resource.add_route(hdrs.METH_HEAD, handler, **kwargs)\n return resource.add_route(hdrs.METH_GET, handler, **kwargs)\n\n def add_post(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:\n """Shortcut for add_route with method POST."""\n return self.add_route(hdrs.METH_POST, path, handler, **kwargs)\n\n def add_put(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:\n """Shortcut for add_route with method PUT."""\n return self.add_route(hdrs.METH_PUT, path, handler, **kwargs)\n\n def add_patch(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:\n """Shortcut for add_route with method PATCH."""\n return self.add_route(hdrs.METH_PATCH, path, handler, **kwargs)\n\n def add_delete(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:\n """Shortcut for add_route with method DELETE."""\n return self.add_route(hdrs.METH_DELETE, path, handler, **kwargs)\n\n def add_view(\n self, path: str, handler: Type[AbstractView], **kwargs: Any\n ) -> AbstractRoute:\n """Shortcut for add_route with ANY methods for a class-based view."""\n return self.add_route(hdrs.METH_ANY, path, handler, **kwargs)\n\n def freeze(self) -> None:\n super().freeze()\n for resource in self._resources:\n resource.freeze()\n\n def add_routes(self, routes: Iterable[AbstractRouteDef]) -> List[AbstractRoute]:\n """Append routes to route table.\n\n Parameter should be a sequence of RouteDef objects.\n\n Returns a list of registered AbstractRoute instances.\n """\n registered_routes = []\n for route_def in routes:\n registered_routes.extend(route_def.register(self))\n return registered_routes\n\n\ndef _quote_path(value: str) -> str:\n if YARL_VERSION < (1, 6):\n value = value.replace("%", "%25")\n return URL.build(path=value, encoded=False).raw_path\n\n\ndef _unquote_path_safe(value: str) -> str:\n if "%" not in value:\n return value\n return value.replace("%2F", "/").replace("%25", "%")\n\n\ndef _requote_path(value: str) -> str:\n # Quote non-ascii characters and other characters which must be quoted,\n # but preserve existing %-sequences.\n result = _quote_path(value)\n if "%" in value:\n result = result.replace("%25", "%")\n return result\n
.venv\Lib\site-packages\aiohttp\web_urldispatcher.py
web_urldispatcher.py
Python
45,303
0.95
0.236378
0.055238
node-utils
810
2023-11-14T14:14:41.451533
Apache-2.0
false
25456f8563349f3972267f26964a86b4
import asyncio\nimport base64\nimport binascii\nimport hashlib\nimport json\nimport sys\nfrom typing import Any, Final, Iterable, Optional, Tuple, Union, cast\n\nimport attr\nfrom multidict import CIMultiDict\n\nfrom . import hdrs\nfrom ._websocket.reader import WebSocketDataQueue\nfrom ._websocket.writer import DEFAULT_LIMIT\nfrom .abc import AbstractStreamWriter\nfrom .client_exceptions import WSMessageTypeError\nfrom .helpers import calculate_timeout_when, set_exception, set_result\nfrom .http import (\n WS_CLOSED_MESSAGE,\n WS_CLOSING_MESSAGE,\n WS_KEY,\n WebSocketError,\n WebSocketReader,\n WebSocketWriter,\n WSCloseCode,\n WSMessage,\n WSMsgType as WSMsgType,\n ws_ext_gen,\n ws_ext_parse,\n)\nfrom .http_websocket import _INTERNAL_RECEIVE_TYPES\nfrom .log import ws_logger\nfrom .streams import EofStream\nfrom .typedefs import JSONDecoder, JSONEncoder\nfrom .web_exceptions import HTTPBadRequest, HTTPException\nfrom .web_request import BaseRequest\nfrom .web_response import StreamResponse\n\nif sys.version_info >= (3, 11):\n import asyncio as async_timeout\nelse:\n import async_timeout\n\n__all__ = (\n "WebSocketResponse",\n "WebSocketReady",\n "WSMsgType",\n)\n\nTHRESHOLD_CONNLOST_ACCESS: Final[int] = 5\n\n\n@attr.s(auto_attribs=True, frozen=True, slots=True)\nclass WebSocketReady:\n ok: bool\n protocol: Optional[str]\n\n def __bool__(self) -> bool:\n return self.ok\n\n\nclass WebSocketResponse(StreamResponse):\n\n _length_check: bool = False\n _ws_protocol: Optional[str] = None\n _writer: Optional[WebSocketWriter] = None\n _reader: Optional[WebSocketDataQueue] = None\n _closed: bool = False\n _closing: bool = False\n _conn_lost: int = 0\n _close_code: Optional[int] = None\n _loop: Optional[asyncio.AbstractEventLoop] = None\n _waiting: bool = False\n _close_wait: Optional[asyncio.Future[None]] = None\n _exception: Optional[BaseException] = None\n _heartbeat_when: float = 0.0\n _heartbeat_cb: Optional[asyncio.TimerHandle] = None\n _pong_response_cb: Optional[asyncio.TimerHandle] = None\n _ping_task: Optional[asyncio.Task[None]] = None\n\n def __init__(\n self,\n *,\n timeout: float = 10.0,\n receive_timeout: Optional[float] = None,\n autoclose: bool = True,\n autoping: bool = True,\n heartbeat: Optional[float] = None,\n protocols: Iterable[str] = (),\n compress: bool = True,\n max_msg_size: int = 4 * 1024 * 1024,\n writer_limit: int = DEFAULT_LIMIT,\n ) -> None:\n super().__init__(status=101)\n self._protocols = protocols\n self._timeout = timeout\n self._receive_timeout = receive_timeout\n self._autoclose = autoclose\n self._autoping = autoping\n self._heartbeat = heartbeat\n if heartbeat is not None:\n self._pong_heartbeat = heartbeat / 2.0\n self._compress: Union[bool, int] = compress\n self._max_msg_size = max_msg_size\n self._writer_limit = writer_limit\n\n def _cancel_heartbeat(self) -> None:\n self._cancel_pong_response_cb()\n if self._heartbeat_cb is not None:\n self._heartbeat_cb.cancel()\n self._heartbeat_cb = None\n if self._ping_task is not None:\n self._ping_task.cancel()\n self._ping_task = None\n\n def _cancel_pong_response_cb(self) -> None:\n if self._pong_response_cb is not None:\n self._pong_response_cb.cancel()\n self._pong_response_cb = None\n\n def _reset_heartbeat(self) -> None:\n if self._heartbeat is None:\n return\n self._cancel_pong_response_cb()\n req = self._req\n timeout_ceil_threshold = (\n req._protocol._timeout_ceil_threshold if req is not None else 5\n )\n loop = self._loop\n assert loop is not None\n now = loop.time()\n when = calculate_timeout_when(now, self._heartbeat, timeout_ceil_threshold)\n self._heartbeat_when = when\n if self._heartbeat_cb is None:\n # We do not cancel the previous heartbeat_cb here because\n # it generates a significant amount of TimerHandle churn\n # which causes asyncio to rebuild the heap frequently.\n # Instead _send_heartbeat() will reschedule the next\n # heartbeat if it fires too early.\n self._heartbeat_cb = loop.call_at(when, self._send_heartbeat)\n\n def _send_heartbeat(self) -> None:\n self._heartbeat_cb = None\n loop = self._loop\n assert loop is not None and self._writer is not None\n now = loop.time()\n if now < self._heartbeat_when:\n # Heartbeat fired too early, reschedule\n self._heartbeat_cb = loop.call_at(\n self._heartbeat_when, self._send_heartbeat\n )\n return\n\n req = self._req\n timeout_ceil_threshold = (\n req._protocol._timeout_ceil_threshold if req is not None else 5\n )\n when = calculate_timeout_when(now, self._pong_heartbeat, timeout_ceil_threshold)\n self._cancel_pong_response_cb()\n self._pong_response_cb = loop.call_at(when, self._pong_not_received)\n\n coro = self._writer.send_frame(b"", WSMsgType.PING)\n if sys.version_info >= (3, 12):\n # Optimization for Python 3.12, try to send the ping\n # immediately to avoid having to schedule\n # the task on the event loop.\n ping_task = asyncio.Task(coro, loop=loop, eager_start=True)\n else:\n ping_task = loop.create_task(coro)\n\n if not ping_task.done():\n self._ping_task = ping_task\n ping_task.add_done_callback(self._ping_task_done)\n else:\n self._ping_task_done(ping_task)\n\n def _ping_task_done(self, task: "asyncio.Task[None]") -> None:\n """Callback for when the ping task completes."""\n if not task.cancelled() and (exc := task.exception()):\n self._handle_ping_pong_exception(exc)\n self._ping_task = None\n\n def _pong_not_received(self) -> None:\n if self._req is not None and self._req.transport is not None:\n self._handle_ping_pong_exception(\n asyncio.TimeoutError(\n f"No PONG received after {self._pong_heartbeat} seconds"\n )\n )\n\n def _handle_ping_pong_exception(self, exc: BaseException) -> None:\n """Handle exceptions raised during ping/pong processing."""\n if self._closed:\n return\n self._set_closed()\n self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE)\n self._exception = exc\n if self._waiting and not self._closing and self._reader is not None:\n self._reader.feed_data(WSMessage(WSMsgType.ERROR, exc, None), 0)\n\n def _set_closed(self) -> None:\n """Set the connection to closed.\n\n Cancel any heartbeat timers and set the closed flag.\n """\n self._closed = True\n self._cancel_heartbeat()\n\n async def prepare(self, request: BaseRequest) -> AbstractStreamWriter:\n # make pre-check to don't hide it by do_handshake() exceptions\n if self._payload_writer is not None:\n return self._payload_writer\n\n protocol, writer = self._pre_start(request)\n payload_writer = await super().prepare(request)\n assert payload_writer is not None\n self._post_start(request, protocol, writer)\n await payload_writer.drain()\n return payload_writer\n\n def _handshake(\n self, request: BaseRequest\n ) -> Tuple["CIMultiDict[str]", Optional[str], int, bool]:\n headers = request.headers\n if "websocket" != headers.get(hdrs.UPGRADE, "").lower().strip():\n raise HTTPBadRequest(\n text=(\n "No WebSocket UPGRADE hdr: {}\n Can "\n '"Upgrade" only to "WebSocket".'\n ).format(headers.get(hdrs.UPGRADE))\n )\n\n if "upgrade" not in headers.get(hdrs.CONNECTION, "").lower():\n raise HTTPBadRequest(\n text="No CONNECTION upgrade hdr: {}".format(\n headers.get(hdrs.CONNECTION)\n )\n )\n\n # find common sub-protocol between client and server\n protocol: Optional[str] = None\n if hdrs.SEC_WEBSOCKET_PROTOCOL in headers:\n req_protocols = [\n str(proto.strip())\n for proto in headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(",")\n ]\n\n for proto in req_protocols:\n if proto in self._protocols:\n protocol = proto\n break\n else:\n # No overlap found: Return no protocol as per spec\n ws_logger.warning(\n "%s: Client protocols %r don’t overlap server-known ones %r",\n request.remote,\n req_protocols,\n self._protocols,\n )\n\n # check supported version\n version = headers.get(hdrs.SEC_WEBSOCKET_VERSION, "")\n if version not in ("13", "8", "7"):\n raise HTTPBadRequest(text=f"Unsupported version: {version}")\n\n # check client handshake for validity\n key = headers.get(hdrs.SEC_WEBSOCKET_KEY)\n try:\n if not key or len(base64.b64decode(key)) != 16:\n raise HTTPBadRequest(text=f"Handshake error: {key!r}")\n except binascii.Error:\n raise HTTPBadRequest(text=f"Handshake error: {key!r}") from None\n\n accept_val = base64.b64encode(\n hashlib.sha1(key.encode() + WS_KEY).digest()\n ).decode()\n response_headers = CIMultiDict(\n {\n hdrs.UPGRADE: "websocket",\n hdrs.CONNECTION: "upgrade",\n hdrs.SEC_WEBSOCKET_ACCEPT: accept_val,\n }\n )\n\n notakeover = False\n compress = 0\n if self._compress:\n extensions = headers.get(hdrs.SEC_WEBSOCKET_EXTENSIONS)\n # Server side always get return with no exception.\n # If something happened, just drop compress extension\n compress, notakeover = ws_ext_parse(extensions, isserver=True)\n if compress:\n enabledext = ws_ext_gen(\n compress=compress, isserver=True, server_notakeover=notakeover\n )\n response_headers[hdrs.SEC_WEBSOCKET_EXTENSIONS] = enabledext\n\n if protocol:\n response_headers[hdrs.SEC_WEBSOCKET_PROTOCOL] = protocol\n return (\n response_headers,\n protocol,\n compress,\n notakeover,\n )\n\n def _pre_start(self, request: BaseRequest) -> Tuple[Optional[str], WebSocketWriter]:\n self._loop = request._loop\n\n headers, protocol, compress, notakeover = self._handshake(request)\n\n self.set_status(101)\n self.headers.update(headers)\n self.force_close()\n self._compress = compress\n transport = request._protocol.transport\n assert transport is not None\n writer = WebSocketWriter(\n request._protocol,\n transport,\n compress=compress,\n notakeover=notakeover,\n limit=self._writer_limit,\n )\n\n return protocol, writer\n\n def _post_start(\n self, request: BaseRequest, protocol: Optional[str], writer: WebSocketWriter\n ) -> None:\n self._ws_protocol = protocol\n self._writer = writer\n\n self._reset_heartbeat()\n\n loop = self._loop\n assert loop is not None\n self._reader = WebSocketDataQueue(request._protocol, 2**16, loop=loop)\n request.protocol.set_parser(\n WebSocketReader(\n self._reader, self._max_msg_size, compress=bool(self._compress)\n )\n )\n # disable HTTP keepalive for WebSocket\n request.protocol.keep_alive(False)\n\n def can_prepare(self, request: BaseRequest) -> WebSocketReady:\n if self._writer is not None:\n raise RuntimeError("Already started")\n try:\n _, protocol, _, _ = self._handshake(request)\n except HTTPException:\n return WebSocketReady(False, None)\n else:\n return WebSocketReady(True, protocol)\n\n @property\n def prepared(self) -> bool:\n return self._writer is not None\n\n @property\n def closed(self) -> bool:\n return self._closed\n\n @property\n def close_code(self) -> Optional[int]:\n return self._close_code\n\n @property\n def ws_protocol(self) -> Optional[str]:\n return self._ws_protocol\n\n @property\n def compress(self) -> Union[int, bool]:\n return self._compress\n\n def get_extra_info(self, name: str, default: Any = None) -> Any:\n """Get optional transport information.\n\n If no value associated with ``name`` is found, ``default`` is returned.\n """\n writer = self._writer\n if writer is None:\n return default\n transport = writer.transport\n if transport is None:\n return default\n return transport.get_extra_info(name, default)\n\n def exception(self) -> Optional[BaseException]:\n return self._exception\n\n async def ping(self, message: bytes = b"") -> None:\n if self._writer is None:\n raise RuntimeError("Call .prepare() first")\n await self._writer.send_frame(message, WSMsgType.PING)\n\n async def pong(self, message: bytes = b"") -> None:\n # unsolicited pong\n if self._writer is None:\n raise RuntimeError("Call .prepare() first")\n await self._writer.send_frame(message, WSMsgType.PONG)\n\n async def send_frame(\n self, message: bytes, opcode: WSMsgType, compress: Optional[int] = None\n ) -> None:\n """Send a frame over the websocket."""\n if self._writer is None:\n raise RuntimeError("Call .prepare() first")\n await self._writer.send_frame(message, opcode, compress)\n\n async def send_str(self, data: str, compress: Optional[int] = None) -> None:\n if self._writer is None:\n raise RuntimeError("Call .prepare() first")\n if not isinstance(data, str):\n raise TypeError("data argument must be str (%r)" % type(data))\n await self._writer.send_frame(\n data.encode("utf-8"), WSMsgType.TEXT, compress=compress\n )\n\n async def send_bytes(self, data: bytes, compress: Optional[int] = None) -> None:\n if self._writer is None:\n raise RuntimeError("Call .prepare() first")\n if not isinstance(data, (bytes, bytearray, memoryview)):\n raise TypeError("data argument must be byte-ish (%r)" % type(data))\n await self._writer.send_frame(data, WSMsgType.BINARY, compress=compress)\n\n async def send_json(\n self,\n data: Any,\n compress: Optional[int] = None,\n *,\n dumps: JSONEncoder = json.dumps,\n ) -> None:\n await self.send_str(dumps(data), compress=compress)\n\n async def write_eof(self) -> None: # type: ignore[override]\n if self._eof_sent:\n return\n if self._payload_writer is None:\n raise RuntimeError("Response has not been started")\n\n await self.close()\n self._eof_sent = True\n\n async def close(\n self, *, code: int = WSCloseCode.OK, message: bytes = b"", drain: bool = True\n ) -> bool:\n """Close websocket connection."""\n if self._writer is None:\n raise RuntimeError("Call .prepare() first")\n\n if self._closed:\n return False\n self._set_closed()\n\n try:\n await self._writer.close(code, message)\n writer = self._payload_writer\n assert writer is not None\n if drain:\n await writer.drain()\n except (asyncio.CancelledError, asyncio.TimeoutError):\n self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE)\n raise\n except Exception as exc:\n self._exception = exc\n self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE)\n return True\n\n reader = self._reader\n assert reader is not None\n # we need to break `receive()` cycle before we can call\n # `reader.read()` as `close()` may be called from different task\n if self._waiting:\n assert self._loop is not None\n assert self._close_wait is None\n self._close_wait = self._loop.create_future()\n reader.feed_data(WS_CLOSING_MESSAGE, 0)\n await self._close_wait\n\n if self._closing:\n self._close_transport()\n return True\n\n try:\n async with async_timeout.timeout(self._timeout):\n while True:\n msg = await reader.read()\n if msg.type is WSMsgType.CLOSE:\n self._set_code_close_transport(msg.data)\n return True\n except asyncio.CancelledError:\n self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE)\n raise\n except Exception as exc:\n self._exception = exc\n self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE)\n return True\n\n def _set_closing(self, code: WSCloseCode) -> None:\n """Set the close code and mark the connection as closing."""\n self._closing = True\n self._close_code = code\n self._cancel_heartbeat()\n\n def _set_code_close_transport(self, code: WSCloseCode) -> None:\n """Set the close code and close the transport."""\n self._close_code = code\n self._close_transport()\n\n def _close_transport(self) -> None:\n """Close the transport."""\n if self._req is not None and self._req.transport is not None:\n self._req.transport.close()\n\n async def receive(self, timeout: Optional[float] = None) -> WSMessage:\n if self._reader is None:\n raise RuntimeError("Call .prepare() first")\n\n receive_timeout = timeout or self._receive_timeout\n while True:\n if self._waiting:\n raise RuntimeError("Concurrent call to receive() is not allowed")\n\n if self._closed:\n self._conn_lost += 1\n if self._conn_lost >= THRESHOLD_CONNLOST_ACCESS:\n raise RuntimeError("WebSocket connection is closed.")\n return WS_CLOSED_MESSAGE\n elif self._closing:\n return WS_CLOSING_MESSAGE\n\n try:\n self._waiting = True\n try:\n if receive_timeout:\n # Entering the context manager and creating\n # Timeout() object can take almost 50% of the\n # run time in this loop so we avoid it if\n # there is no read timeout.\n async with async_timeout.timeout(receive_timeout):\n msg = await self._reader.read()\n else:\n msg = await self._reader.read()\n self._reset_heartbeat()\n finally:\n self._waiting = False\n if self._close_wait:\n set_result(self._close_wait, None)\n except asyncio.TimeoutError:\n raise\n except EofStream:\n self._close_code = WSCloseCode.OK\n await self.close()\n return WSMessage(WSMsgType.CLOSED, None, None)\n except WebSocketError as exc:\n self._close_code = exc.code\n await self.close(code=exc.code)\n return WSMessage(WSMsgType.ERROR, exc, None)\n except Exception as exc:\n self._exception = exc\n self._set_closing(WSCloseCode.ABNORMAL_CLOSURE)\n await self.close()\n return WSMessage(WSMsgType.ERROR, exc, None)\n\n if msg.type not in _INTERNAL_RECEIVE_TYPES:\n # If its not a close/closing/ping/pong message\n # we can return it immediately\n return msg\n\n if msg.type is WSMsgType.CLOSE:\n self._set_closing(msg.data)\n # Could be closed while awaiting reader.\n if not self._closed and self._autoclose:\n # The client is likely going to close the\n # connection out from under us so we do not\n # want to drain any pending writes as it will\n # likely result writing to a broken pipe.\n await self.close(drain=False)\n elif msg.type is WSMsgType.CLOSING:\n self._set_closing(WSCloseCode.OK)\n elif msg.type is WSMsgType.PING and self._autoping:\n await self.pong(msg.data)\n continue\n elif msg.type is WSMsgType.PONG and self._autoping:\n continue\n\n return msg\n\n async def receive_str(self, *, timeout: Optional[float] = None) -> str:\n msg = await self.receive(timeout)\n if msg.type is not WSMsgType.TEXT:\n raise WSMessageTypeError(\n f"Received message {msg.type}:{msg.data!r} is not WSMsgType.TEXT"\n )\n return cast(str, msg.data)\n\n async def receive_bytes(self, *, timeout: Optional[float] = None) -> bytes:\n msg = await self.receive(timeout)\n if msg.type is not WSMsgType.BINARY:\n raise WSMessageTypeError(\n f"Received message {msg.type}:{msg.data!r} is not WSMsgType.BINARY"\n )\n return cast(bytes, msg.data)\n\n async def receive_json(\n self, *, loads: JSONDecoder = json.loads, timeout: Optional[float] = None\n ) -> Any:\n data = await self.receive_str(timeout=timeout)\n return loads(data)\n\n async def write(self, data: bytes) -> None:\n raise RuntimeError("Cannot call .write() for websocket")\n\n def __aiter__(self) -> "WebSocketResponse":\n return self\n\n async def __anext__(self) -> WSMessage:\n msg = await self.receive()\n if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSING, WSMsgType.CLOSED):\n raise StopAsyncIteration\n return msg\n\n def _cancel(self, exc: BaseException) -> None:\n # web_protocol calls this from connection_lost\n # or when the server is shutting down.\n self._closing = True\n self._cancel_heartbeat()\n if self._reader is not None:\n set_exception(self._reader, exc)\n
.venv\Lib\site-packages\aiohttp\web_ws.py
web_ws.py
Python
23,370
0.95
0.190174
0.063752
python-kit
703
2025-01-02T17:08:15.481348
BSD-3-Clause
false
3f1fe0dd7ce8f5d5115c4161f4ba85d7
"""\nInternal cookie handling helpers.\n\nThis module contains internal utilities for cookie parsing and manipulation.\nThese are not part of the public API and may change without notice.\n"""\n\nimport re\nimport sys\nfrom http.cookies import Morsel\nfrom typing import List, Optional, Sequence, Tuple, cast\n\nfrom .log import internal_logger\n\n__all__ = (\n "parse_set_cookie_headers",\n "parse_cookie_header",\n "preserve_morsel_with_coded_value",\n)\n\n# Cookie parsing constants\n# Allow more characters in cookie names to handle real-world cookies\n# that don't strictly follow RFC standards (fixes #2683)\n# RFC 6265 defines cookie-name token as per RFC 2616 Section 2.2,\n# but many servers send cookies with characters like {} [] () etc.\n# This makes the cookie parser more tolerant of real-world cookies\n# while still providing some validation to catch obviously malformed names.\n_COOKIE_NAME_RE = re.compile(r"^[!#$%&\'()*+\-./0-9:<=>?@A-Z\[\]^_`a-z{|}~]+$")\n_COOKIE_KNOWN_ATTRS = frozenset( # AKA Morsel._reserved\n (\n "path",\n "domain",\n "max-age",\n "expires",\n "secure",\n "httponly",\n "samesite",\n "partitioned",\n "version",\n "comment",\n )\n)\n_COOKIE_BOOL_ATTRS = frozenset( # AKA Morsel._flags\n ("secure", "httponly", "partitioned")\n)\n\n# SimpleCookie's pattern for parsing cookies with relaxed validation\n# Based on http.cookies pattern but extended to allow more characters in cookie names\n# to handle real-world cookies (fixes #2683)\n_COOKIE_PATTERN = re.compile(\n r"""\n \s* # Optional whitespace at start of cookie\n (?P<key> # Start of group 'key'\n # aiohttp has extended to include [] for compatibility with real-world cookies\n [\w\d!#%&'~_`><@,:/\$\*\+\-\.\^\|\)\(\?\}\{\=\[\]]+? # Any word of at least one letter\n ) # End of group 'key'\n ( # Optional group: there may not be a value.\n \s*=\s* # Equal Sign\n (?P<val> # Start of group 'val'\n "(?:[^\\"]|\\.)*" # Any double-quoted string (properly closed)\n | # or\n "[^";]* # Unmatched opening quote (differs from SimpleCookie - issue #7993)\n | # or\n # Special case for "expires" attr - RFC 822, RFC 850, RFC 1036, RFC 1123\n (\w{3,6}day|\w{3}),\s # Day of the week or abbreviated day (with comma)\n [\w\d\s-]{9,11}\s[\d:]{8}\s # Date and time in specific format\n (GMT|[+-]\d{4}) # Timezone: GMT or RFC 2822 offset like -0000, +0100\n # NOTE: RFC 2822 timezone support is an aiohttp extension\n # for issue #4493 - SimpleCookie does NOT support this\n | # or\n # ANSI C asctime() format: "Wed Jun 9 10:18:14 2021"\n # NOTE: This is an aiohttp extension for issue #4327 - SimpleCookie does NOT support this format\n \w{3}\s+\w{3}\s+[\s\d]\d\s+\d{2}:\d{2}:\d{2}\s+\d{4}\n | # or\n [\w\d!#%&'~_`><@,:/\$\*\+\-\.\^\|\)\(\?\}\{\=\[\]]* # Any word or empty string\n ) # End of group 'val'\n )? # End of optional value group\n \s* # Any number of spaces.\n (\s+|;|$) # Ending either at space, semicolon, or EOS.\n """,\n re.VERBOSE | re.ASCII,\n)\n\n\ndef preserve_morsel_with_coded_value(cookie: Morsel[str]) -> Morsel[str]:\n """\n Preserve a Morsel's coded_value exactly as received from the server.\n\n This function ensures that cookie encoding is preserved exactly as sent by\n the server, which is critical for compatibility with old servers that have\n strict requirements about cookie formats.\n\n This addresses the issue described in https://github.com/aio-libs/aiohttp/pull/1453\n where Python's SimpleCookie would re-encode cookies, breaking authentication\n with certain servers.\n\n Args:\n cookie: A Morsel object from SimpleCookie\n\n Returns:\n A Morsel object with preserved coded_value\n\n """\n mrsl_val = cast("Morsel[str]", cookie.get(cookie.key, Morsel()))\n # We use __setstate__ instead of the public set() API because it allows us to\n # bypass validation and set already validated state. This is more stable than\n # setting protected attributes directly and unlikely to change since it would\n # break pickling.\n mrsl_val.__setstate__( # type: ignore[attr-defined]\n {"key": cookie.key, "value": cookie.value, "coded_value": cookie.coded_value}\n )\n return mrsl_val\n\n\n_unquote_sub = re.compile(r"\\(?:([0-3][0-7][0-7])|(.))").sub\n\n\ndef _unquote_replace(m: re.Match[str]) -> str:\n """\n Replace function for _unquote_sub regex substitution.\n\n Handles escaped characters in cookie values:\n - Octal sequences are converted to their character representation\n - Other escaped characters are unescaped by removing the backslash\n """\n if m[1]:\n return chr(int(m[1], 8))\n return m[2]\n\n\ndef _unquote(value: str) -> str:\n """\n Unquote a cookie value.\n\n Vendored from http.cookies._unquote to ensure compatibility.\n\n Note: The original implementation checked for None, but we've removed\n that check since all callers already ensure the value is not None.\n """\n # If there aren't any doublequotes,\n # then there can't be any special characters. See RFC 2109.\n if len(value) < 2:\n return value\n if value[0] != '"' or value[-1] != '"':\n return value\n\n # We have to assume that we must decode this string.\n # Down to work.\n\n # Remove the "s\n value = value[1:-1]\n\n # Check for special sequences. Examples:\n # \012 --> \n\n # \" --> "\n #\n return _unquote_sub(_unquote_replace, value)\n\n\ndef parse_cookie_header(header: str) -> List[Tuple[str, Morsel[str]]]:\n """\n Parse a Cookie header according to RFC 6265 Section 5.4.\n\n Cookie headers contain only name-value pairs separated by semicolons.\n There are no attributes in Cookie headers - even names that match\n attribute names (like 'path' or 'secure') should be treated as cookies.\n\n This parser uses the same regex-based approach as parse_set_cookie_headers\n to properly handle quoted values that may contain semicolons.\n\n Args:\n header: The Cookie header value to parse\n\n Returns:\n List of (name, Morsel) tuples for compatibility with SimpleCookie.update()\n """\n if not header:\n return []\n\n cookies: List[Tuple[str, Morsel[str]]] = []\n i = 0\n n = len(header)\n\n while i < n:\n # Use the same pattern as parse_set_cookie_headers to find cookies\n match = _COOKIE_PATTERN.match(header, i)\n if not match:\n break\n\n key = match.group("key")\n value = match.group("val") or ""\n i = match.end(0)\n\n # Validate the name\n if not key or not _COOKIE_NAME_RE.match(key):\n internal_logger.warning("Can not load cookie: Illegal cookie name %r", key)\n continue\n\n # Create new morsel\n morsel: Morsel[str] = Morsel()\n # Preserve the original value as coded_value (with quotes if present)\n # We use __setstate__ instead of the public set() API because it allows us to\n # bypass validation and set already validated state. This is more stable than\n # setting protected attributes directly and unlikely to change since it would\n # break pickling.\n morsel.__setstate__( # type: ignore[attr-defined]\n {"key": key, "value": _unquote(value), "coded_value": value}\n )\n\n cookies.append((key, morsel))\n\n return cookies\n\n\ndef parse_set_cookie_headers(headers: Sequence[str]) -> List[Tuple[str, Morsel[str]]]:\n """\n Parse cookie headers using a vendored version of SimpleCookie parsing.\n\n This implementation is based on SimpleCookie.__parse_string to ensure\n compatibility with how SimpleCookie parses cookies, including handling\n of malformed cookies with missing semicolons.\n\n This function is used for both Cookie and Set-Cookie headers in order to be\n forgiving. Ideally we would have followed RFC 6265 Section 5.2 (for Cookie\n headers) and RFC 6265 Section 4.2.1 (for Set-Cookie headers), but the\n real world data makes it impossible since we need to be a bit more forgiving.\n\n NOTE: This implementation differs from SimpleCookie in handling unmatched quotes.\n SimpleCookie will stop parsing when it encounters a cookie value with an unmatched\n quote (e.g., 'cookie="value'), causing subsequent cookies to be silently dropped.\n This implementation handles unmatched quotes more gracefully to prevent cookie loss.\n See https://github.com/aio-libs/aiohttp/issues/7993\n """\n parsed_cookies: List[Tuple[str, Morsel[str]]] = []\n\n for header in headers:\n if not header:\n continue\n\n # Parse cookie string using SimpleCookie's algorithm\n i = 0\n n = len(header)\n current_morsel: Optional[Morsel[str]] = None\n morsel_seen = False\n\n while 0 <= i < n:\n # Start looking for a cookie\n match = _COOKIE_PATTERN.match(header, i)\n if not match:\n # No more cookies\n break\n\n key, value = match.group("key"), match.group("val")\n i = match.end(0)\n lower_key = key.lower()\n\n if key[0] == "$":\n if not morsel_seen:\n # We ignore attributes which pertain to the cookie\n # mechanism as a whole, such as "$Version".\n continue\n # Process as attribute\n if current_morsel is not None:\n attr_lower_key = lower_key[1:]\n if attr_lower_key in _COOKIE_KNOWN_ATTRS:\n current_morsel[attr_lower_key] = value or ""\n elif lower_key in _COOKIE_KNOWN_ATTRS:\n if not morsel_seen:\n # Invalid cookie string - attribute before cookie\n break\n if lower_key in _COOKIE_BOOL_ATTRS:\n # Boolean attribute with any value should be True\n if current_morsel is not None:\n if lower_key == "partitioned" and sys.version_info < (3, 14):\n dict.__setitem__(current_morsel, lower_key, True)\n else:\n current_morsel[lower_key] = True\n elif value is None:\n # Invalid cookie string - non-boolean attribute without value\n break\n elif current_morsel is not None:\n # Regular attribute with value\n current_morsel[lower_key] = _unquote(value)\n elif value is not None:\n # This is a cookie name=value pair\n # Validate the name\n if key in _COOKIE_KNOWN_ATTRS or not _COOKIE_NAME_RE.match(key):\n internal_logger.warning(\n "Can not load cookies: Illegal cookie name %r", key\n )\n current_morsel = None\n else:\n # Create new morsel\n current_morsel = Morsel()\n # Preserve the original value as coded_value (with quotes if present)\n # We use __setstate__ instead of the public set() API because it allows us to\n # bypass validation and set already validated state. This is more stable than\n # setting protected attributes directly and unlikely to change since it would\n # break pickling.\n current_morsel.__setstate__( # type: ignore[attr-defined]\n {"key": key, "value": _unquote(value), "coded_value": value}\n )\n parsed_cookies.append((key, current_morsel))\n morsel_seen = True\n else:\n # Invalid cookie string - no value for non-attribute\n break\n\n return parsed_cookies\n
.venv\Lib\site-packages\aiohttp\_cookie_helpers.py
_cookie_helpers.py
Python
12,727
0.95
0.15534
0.215385
vue-tools
611
2024-12-27T00:45:51.733184
BSD-3-Clause
false
f513cd871b4ab241cb64592c14b98b49
from libc.stdint cimport int32_t, uint8_t, uint16_t, uint64_t\n\n\ncdef extern from "llhttp.h":\n\n struct llhttp__internal_s:\n int32_t _index\n void* _span_pos0\n void* _span_cb0\n int32_t error\n const char* reason\n const char* error_pos\n void* data\n void* _current\n uint64_t content_length\n uint8_t type\n uint8_t method\n uint8_t http_major\n uint8_t http_minor\n uint8_t header_state\n uint8_t lenient_flags\n uint8_t upgrade\n uint8_t finish\n uint16_t flags\n uint16_t status_code\n void* settings\n\n ctypedef llhttp__internal_s llhttp__internal_t\n ctypedef llhttp__internal_t llhttp_t\n\n ctypedef int (*llhttp_data_cb)(llhttp_t*, const char *at, size_t length) except -1\n ctypedef int (*llhttp_cb)(llhttp_t*) except -1\n\n struct llhttp_settings_s:\n llhttp_cb on_message_begin\n llhttp_data_cb on_url\n llhttp_data_cb on_status\n llhttp_data_cb on_header_field\n llhttp_data_cb on_header_value\n llhttp_cb on_headers_complete\n llhttp_data_cb on_body\n llhttp_cb on_message_complete\n llhttp_cb on_chunk_header\n llhttp_cb on_chunk_complete\n\n llhttp_cb on_url_complete\n llhttp_cb on_status_complete\n llhttp_cb on_header_field_complete\n llhttp_cb on_header_value_complete\n\n ctypedef llhttp_settings_s llhttp_settings_t\n\n enum llhttp_errno:\n HPE_OK,\n HPE_INTERNAL,\n HPE_STRICT,\n HPE_LF_EXPECTED,\n HPE_UNEXPECTED_CONTENT_LENGTH,\n HPE_CLOSED_CONNECTION,\n HPE_INVALID_METHOD,\n HPE_INVALID_URL,\n HPE_INVALID_CONSTANT,\n HPE_INVALID_VERSION,\n HPE_INVALID_HEADER_TOKEN,\n HPE_INVALID_CONTENT_LENGTH,\n HPE_INVALID_CHUNK_SIZE,\n HPE_INVALID_STATUS,\n HPE_INVALID_EOF_STATE,\n HPE_INVALID_TRANSFER_ENCODING,\n HPE_CB_MESSAGE_BEGIN,\n HPE_CB_HEADERS_COMPLETE,\n HPE_CB_MESSAGE_COMPLETE,\n HPE_CB_CHUNK_HEADER,\n HPE_CB_CHUNK_COMPLETE,\n HPE_PAUSED,\n HPE_PAUSED_UPGRADE,\n HPE_USER\n\n ctypedef llhttp_errno llhttp_errno_t\n\n enum llhttp_flags:\n F_CHUNKED,\n F_CONTENT_LENGTH\n\n enum llhttp_type:\n HTTP_REQUEST,\n HTTP_RESPONSE,\n HTTP_BOTH\n\n enum llhttp_method:\n HTTP_DELETE,\n HTTP_GET,\n HTTP_HEAD,\n HTTP_POST,\n HTTP_PUT,\n HTTP_CONNECT,\n HTTP_OPTIONS,\n HTTP_TRACE,\n HTTP_COPY,\n HTTP_LOCK,\n HTTP_MKCOL,\n HTTP_MOVE,\n HTTP_PROPFIND,\n HTTP_PROPPATCH,\n HTTP_SEARCH,\n HTTP_UNLOCK,\n HTTP_BIND,\n HTTP_REBIND,\n HTTP_UNBIND,\n HTTP_ACL,\n HTTP_REPORT,\n HTTP_MKACTIVITY,\n HTTP_CHECKOUT,\n HTTP_MERGE,\n HTTP_MSEARCH,\n HTTP_NOTIFY,\n HTTP_SUBSCRIBE,\n HTTP_UNSUBSCRIBE,\n HTTP_PATCH,\n HTTP_PURGE,\n HTTP_MKCALENDAR,\n HTTP_LINK,\n HTTP_UNLINK,\n HTTP_SOURCE,\n HTTP_PRI,\n HTTP_DESCRIBE,\n HTTP_ANNOUNCE,\n HTTP_SETUP,\n HTTP_PLAY,\n HTTP_PAUSE,\n HTTP_TEARDOWN,\n HTTP_GET_PARAMETER,\n HTTP_SET_PARAMETER,\n HTTP_REDIRECT,\n HTTP_RECORD,\n HTTP_FLUSH\n\n ctypedef llhttp_method llhttp_method_t;\n\n void llhttp_settings_init(llhttp_settings_t* settings)\n void llhttp_init(llhttp_t* parser, llhttp_type type,\n const llhttp_settings_t* settings)\n\n llhttp_errno_t llhttp_execute(llhttp_t* parser, const char* data, size_t len)\n\n int llhttp_should_keep_alive(const llhttp_t* parser)\n\n void llhttp_resume_after_upgrade(llhttp_t* parser)\n\n llhttp_errno_t llhttp_get_errno(const llhttp_t* parser)\n const char* llhttp_get_error_reason(const llhttp_t* parser)\n const char* llhttp_get_error_pos(const llhttp_t* parser)\n\n const char* llhttp_method_name(llhttp_method_t method)\n\n void llhttp_set_lenient_headers(llhttp_t* parser, int enabled)\n void llhttp_set_lenient_optional_cr_before_lf(llhttp_t* parser, int enabled)\n void llhttp_set_lenient_spaces_after_chunk_size(llhttp_t* parser, int enabled)\n
.venv\Lib\site-packages\aiohttp\_cparser.pxd
_cparser.pxd
Other
4,453
0.85
0
0
node-utils
727
2024-10-18T19:43:44.907201
BSD-3-Clause
false
cffdd91f6d3b032b3e2b897f8feca72d
cdef extern from "_find_header.h":\n int find_header(char *, int)\n
.venv\Lib\site-packages\aiohttp\_find_header.pxd
_find_header.pxd
Other
70
0.65
0
0
node-utils
199
2024-03-28T13:16:17.867735
BSD-3-Clause
false
ebbadf191554c2527d18b586e2dd82fa
# The file is autogenerated from aiohttp/hdrs.py\n# Run ./tools/gen.py to update it after the origin changing.\n\nfrom . import hdrs\ncdef tuple headers = (\n hdrs.ACCEPT,\n hdrs.ACCEPT_CHARSET,\n hdrs.ACCEPT_ENCODING,\n hdrs.ACCEPT_LANGUAGE,\n hdrs.ACCEPT_RANGES,\n hdrs.ACCESS_CONTROL_ALLOW_CREDENTIALS,\n hdrs.ACCESS_CONTROL_ALLOW_HEADERS,\n hdrs.ACCESS_CONTROL_ALLOW_METHODS,\n hdrs.ACCESS_CONTROL_ALLOW_ORIGIN,\n hdrs.ACCESS_CONTROL_EXPOSE_HEADERS,\n hdrs.ACCESS_CONTROL_MAX_AGE,\n hdrs.ACCESS_CONTROL_REQUEST_HEADERS,\n hdrs.ACCESS_CONTROL_REQUEST_METHOD,\n hdrs.AGE,\n hdrs.ALLOW,\n hdrs.AUTHORIZATION,\n hdrs.CACHE_CONTROL,\n hdrs.CONNECTION,\n hdrs.CONTENT_DISPOSITION,\n hdrs.CONTENT_ENCODING,\n hdrs.CONTENT_LANGUAGE,\n hdrs.CONTENT_LENGTH,\n hdrs.CONTENT_LOCATION,\n hdrs.CONTENT_MD5,\n hdrs.CONTENT_RANGE,\n hdrs.CONTENT_TRANSFER_ENCODING,\n hdrs.CONTENT_TYPE,\n hdrs.COOKIE,\n hdrs.DATE,\n hdrs.DESTINATION,\n hdrs.DIGEST,\n hdrs.ETAG,\n hdrs.EXPECT,\n hdrs.EXPIRES,\n hdrs.FORWARDED,\n hdrs.FROM,\n hdrs.HOST,\n hdrs.IF_MATCH,\n hdrs.IF_MODIFIED_SINCE,\n hdrs.IF_NONE_MATCH,\n hdrs.IF_RANGE,\n hdrs.IF_UNMODIFIED_SINCE,\n hdrs.KEEP_ALIVE,\n hdrs.LAST_EVENT_ID,\n hdrs.LAST_MODIFIED,\n hdrs.LINK,\n hdrs.LOCATION,\n hdrs.MAX_FORWARDS,\n hdrs.ORIGIN,\n hdrs.PRAGMA,\n hdrs.PROXY_AUTHENTICATE,\n hdrs.PROXY_AUTHORIZATION,\n hdrs.RANGE,\n hdrs.REFERER,\n hdrs.RETRY_AFTER,\n hdrs.SEC_WEBSOCKET_ACCEPT,\n hdrs.SEC_WEBSOCKET_EXTENSIONS,\n hdrs.SEC_WEBSOCKET_KEY,\n hdrs.SEC_WEBSOCKET_KEY1,\n hdrs.SEC_WEBSOCKET_PROTOCOL,\n hdrs.SEC_WEBSOCKET_VERSION,\n hdrs.SERVER,\n hdrs.SET_COOKIE,\n hdrs.TE,\n hdrs.TRAILER,\n hdrs.TRANSFER_ENCODING,\n hdrs.URI,\n hdrs.UPGRADE,\n hdrs.USER_AGENT,\n hdrs.VARY,\n hdrs.VIA,\n hdrs.WWW_AUTHENTICATE,\n hdrs.WANT_DIGEST,\n hdrs.WARNING,\n hdrs.X_FORWARDED_FOR,\n hdrs.X_FORWARDED_HOST,\n hdrs.X_FORWARDED_PROTO,\n)\n
.venv\Lib\site-packages\aiohttp\_headers.pxi
_headers.pxi
Other
2,090
0.95
0
0.02439
python-kit
882
2024-08-28T11:43:55.999139
BSD-3-Clause
false
3839c2818220ae742c6964b673e781ab
#cython: language_level=3\n#\n# Based on https://github.com/MagicStack/httptools\n#\n\nfrom cpython cimport (\n Py_buffer,\n PyBUF_SIMPLE,\n PyBuffer_Release,\n PyBytes_AsString,\n PyBytes_AsStringAndSize,\n PyObject_GetBuffer,\n)\nfrom cpython.mem cimport PyMem_Free, PyMem_Malloc\nfrom libc.limits cimport ULLONG_MAX\nfrom libc.string cimport memcpy\n\nfrom multidict import CIMultiDict as _CIMultiDict, CIMultiDictProxy as _CIMultiDictProxy\nfrom yarl import URL as _URL\n\nfrom aiohttp import hdrs\nfrom aiohttp.helpers import DEBUG, set_exception\n\nfrom .http_exceptions import (\n BadHttpMessage,\n BadHttpMethod,\n BadStatusLine,\n ContentLengthError,\n InvalidHeader,\n InvalidURLError,\n LineTooLong,\n PayloadEncodingError,\n TransferEncodingError,\n)\nfrom .http_parser import DeflateBuffer as _DeflateBuffer\nfrom .http_writer import (\n HttpVersion as _HttpVersion,\n HttpVersion10 as _HttpVersion10,\n HttpVersion11 as _HttpVersion11,\n)\nfrom .streams import EMPTY_PAYLOAD as _EMPTY_PAYLOAD, StreamReader as _StreamReader\n\ncimport cython\n\nfrom aiohttp cimport _cparser as cparser\n\ninclude "_headers.pxi"\n\nfrom aiohttp cimport _find_header\n\nALLOWED_UPGRADES = frozenset({"websocket"})\nDEF DEFAULT_FREELIST_SIZE = 250\n\ncdef extern from "Python.h":\n int PyByteArray_Resize(object, Py_ssize_t) except -1\n Py_ssize_t PyByteArray_Size(object) except -1\n char* PyByteArray_AsString(object)\n\n__all__ = ('HttpRequestParser', 'HttpResponseParser',\n 'RawRequestMessage', 'RawResponseMessage')\n\ncdef object URL = _URL\ncdef object URL_build = URL.build\ncdef object CIMultiDict = _CIMultiDict\ncdef object CIMultiDictProxy = _CIMultiDictProxy\ncdef object HttpVersion = _HttpVersion\ncdef object HttpVersion10 = _HttpVersion10\ncdef object HttpVersion11 = _HttpVersion11\ncdef object SEC_WEBSOCKET_KEY1 = hdrs.SEC_WEBSOCKET_KEY1\ncdef object CONTENT_ENCODING = hdrs.CONTENT_ENCODING\ncdef object EMPTY_PAYLOAD = _EMPTY_PAYLOAD\ncdef object StreamReader = _StreamReader\ncdef object DeflateBuffer = _DeflateBuffer\ncdef bytes EMPTY_BYTES = b""\n\ncdef inline object extend(object buf, const char* at, size_t length):\n cdef Py_ssize_t s\n cdef char* ptr\n s = PyByteArray_Size(buf)\n PyByteArray_Resize(buf, s + length)\n ptr = PyByteArray_AsString(buf)\n memcpy(ptr + s, at, length)\n\n\nDEF METHODS_COUNT = 46;\n\ncdef list _http_method = []\n\nfor i in range(METHODS_COUNT):\n _http_method.append(\n cparser.llhttp_method_name(<cparser.llhttp_method_t> i).decode('ascii'))\n\n\ncdef inline str http_method_str(int i):\n if i < METHODS_COUNT:\n return <str>_http_method[i]\n else:\n return "<unknown>"\n\ncdef inline object find_header(bytes raw_header):\n cdef Py_ssize_t size\n cdef char *buf\n cdef int idx\n PyBytes_AsStringAndSize(raw_header, &buf, &size)\n idx = _find_header.find_header(buf, size)\n if idx == -1:\n return raw_header.decode('utf-8', 'surrogateescape')\n return headers[idx]\n\n\n@cython.freelist(DEFAULT_FREELIST_SIZE)\ncdef class RawRequestMessage:\n cdef readonly str method\n cdef readonly str path\n cdef readonly object version # HttpVersion\n cdef readonly object headers # CIMultiDict\n cdef readonly object raw_headers # tuple\n cdef readonly object should_close\n cdef readonly object compression\n cdef readonly object upgrade\n cdef readonly object chunked\n cdef readonly object url # yarl.URL\n\n def __init__(self, method, path, version, headers, raw_headers,\n should_close, compression, upgrade, chunked, url):\n self.method = method\n self.path = path\n self.version = version\n self.headers = headers\n self.raw_headers = raw_headers\n self.should_close = should_close\n self.compression = compression\n self.upgrade = upgrade\n self.chunked = chunked\n self.url = url\n\n def __repr__(self):\n info = []\n info.append(("method", self.method))\n info.append(("path", self.path))\n info.append(("version", self.version))\n info.append(("headers", self.headers))\n info.append(("raw_headers", self.raw_headers))\n info.append(("should_close", self.should_close))\n info.append(("compression", self.compression))\n info.append(("upgrade", self.upgrade))\n info.append(("chunked", self.chunked))\n info.append(("url", self.url))\n sinfo = ', '.join(name + '=' + repr(val) for name, val in info)\n return '<RawRequestMessage(' + sinfo + ')>'\n\n def _replace(self, **dct):\n cdef RawRequestMessage ret\n ret = _new_request_message(self.method,\n self.path,\n self.version,\n self.headers,\n self.raw_headers,\n self.should_close,\n self.compression,\n self.upgrade,\n self.chunked,\n self.url)\n if "method" in dct:\n ret.method = dct["method"]\n if "path" in dct:\n ret.path = dct["path"]\n if "version" in dct:\n ret.version = dct["version"]\n if "headers" in dct:\n ret.headers = dct["headers"]\n if "raw_headers" in dct:\n ret.raw_headers = dct["raw_headers"]\n if "should_close" in dct:\n ret.should_close = dct["should_close"]\n if "compression" in dct:\n ret.compression = dct["compression"]\n if "upgrade" in dct:\n ret.upgrade = dct["upgrade"]\n if "chunked" in dct:\n ret.chunked = dct["chunked"]\n if "url" in dct:\n ret.url = dct["url"]\n return ret\n\ncdef _new_request_message(str method,\n str path,\n object version,\n object headers,\n object raw_headers,\n bint should_close,\n object compression,\n bint upgrade,\n bint chunked,\n object url):\n cdef RawRequestMessage ret\n ret = RawRequestMessage.__new__(RawRequestMessage)\n ret.method = method\n ret.path = path\n ret.version = version\n ret.headers = headers\n ret.raw_headers = raw_headers\n ret.should_close = should_close\n ret.compression = compression\n ret.upgrade = upgrade\n ret.chunked = chunked\n ret.url = url\n return ret\n\n\n@cython.freelist(DEFAULT_FREELIST_SIZE)\ncdef class RawResponseMessage:\n cdef readonly object version # HttpVersion\n cdef readonly int code\n cdef readonly str reason\n cdef readonly object headers # CIMultiDict\n cdef readonly object raw_headers # tuple\n cdef readonly object should_close\n cdef readonly object compression\n cdef readonly object upgrade\n cdef readonly object chunked\n\n def __init__(self, version, code, reason, headers, raw_headers,\n should_close, compression, upgrade, chunked):\n self.version = version\n self.code = code\n self.reason = reason\n self.headers = headers\n self.raw_headers = raw_headers\n self.should_close = should_close\n self.compression = compression\n self.upgrade = upgrade\n self.chunked = chunked\n\n def __repr__(self):\n info = []\n info.append(("version", self.version))\n info.append(("code", self.code))\n info.append(("reason", self.reason))\n info.append(("headers", self.headers))\n info.append(("raw_headers", self.raw_headers))\n info.append(("should_close", self.should_close))\n info.append(("compression", self.compression))\n info.append(("upgrade", self.upgrade))\n info.append(("chunked", self.chunked))\n sinfo = ', '.join(name + '=' + repr(val) for name, val in info)\n return '<RawResponseMessage(' + sinfo + ')>'\n\n\ncdef _new_response_message(object version,\n int code,\n str reason,\n object headers,\n object raw_headers,\n bint should_close,\n object compression,\n bint upgrade,\n bint chunked):\n cdef RawResponseMessage ret\n ret = RawResponseMessage.__new__(RawResponseMessage)\n ret.version = version\n ret.code = code\n ret.reason = reason\n ret.headers = headers\n ret.raw_headers = raw_headers\n ret.should_close = should_close\n ret.compression = compression\n ret.upgrade = upgrade\n ret.chunked = chunked\n return ret\n\n\n@cython.internal\ncdef class HttpParser:\n\n cdef:\n cparser.llhttp_t* _cparser\n cparser.llhttp_settings_t* _csettings\n\n bytes _raw_name\n object _name\n bytes _raw_value\n bint _has_value\n\n object _protocol\n object _loop\n object _timer\n\n size_t _max_line_size\n size_t _max_field_size\n size_t _max_headers\n bint _response_with_body\n bint _read_until_eof\n\n bint _started\n object _url\n bytearray _buf\n str _path\n str _reason\n list _headers\n list _raw_headers\n bint _upgraded\n list _messages\n object _payload\n bint _payload_error\n object _payload_exception\n object _last_error\n bint _auto_decompress\n int _limit\n\n str _content_encoding\n\n Py_buffer py_buf\n\n def __cinit__(self):\n self._cparser = <cparser.llhttp_t*> \\n PyMem_Malloc(sizeof(cparser.llhttp_t))\n if self._cparser is NULL:\n raise MemoryError()\n\n self._csettings = <cparser.llhttp_settings_t*> \\n PyMem_Malloc(sizeof(cparser.llhttp_settings_t))\n if self._csettings is NULL:\n raise MemoryError()\n\n def __dealloc__(self):\n PyMem_Free(self._cparser)\n PyMem_Free(self._csettings)\n\n cdef _init(\n self, cparser.llhttp_type mode,\n object protocol, object loop, int limit,\n object timer=None,\n size_t max_line_size=8190, size_t max_headers=32768,\n size_t max_field_size=8190, payload_exception=None,\n bint response_with_body=True, bint read_until_eof=False,\n bint auto_decompress=True,\n ):\n cparser.llhttp_settings_init(self._csettings)\n cparser.llhttp_init(self._cparser, mode, self._csettings)\n self._cparser.data = <void*>self\n self._cparser.content_length = 0\n\n self._protocol = protocol\n self._loop = loop\n self._timer = timer\n\n self._buf = bytearray()\n self._payload = None\n self._payload_error = 0\n self._payload_exception = payload_exception\n self._messages = []\n\n self._raw_name = EMPTY_BYTES\n self._raw_value = EMPTY_BYTES\n self._has_value = False\n\n self._max_line_size = max_line_size\n self._max_headers = max_headers\n self._max_field_size = max_field_size\n self._response_with_body = response_with_body\n self._read_until_eof = read_until_eof\n self._upgraded = False\n self._auto_decompress = auto_decompress\n self._content_encoding = None\n\n self._csettings.on_url = cb_on_url\n self._csettings.on_status = cb_on_status\n self._csettings.on_header_field = cb_on_header_field\n self._csettings.on_header_value = cb_on_header_value\n self._csettings.on_headers_complete = cb_on_headers_complete\n self._csettings.on_body = cb_on_body\n self._csettings.on_message_begin = cb_on_message_begin\n self._csettings.on_message_complete = cb_on_message_complete\n self._csettings.on_chunk_header = cb_on_chunk_header\n self._csettings.on_chunk_complete = cb_on_chunk_complete\n\n self._last_error = None\n self._limit = limit\n\n cdef _process_header(self):\n cdef str value\n if self._raw_name is not EMPTY_BYTES:\n name = find_header(self._raw_name)\n value = self._raw_value.decode('utf-8', 'surrogateescape')\n\n self._headers.append((name, value))\n\n if name is CONTENT_ENCODING:\n self._content_encoding = value\n\n self._has_value = False\n self._raw_headers.append((self._raw_name, self._raw_value))\n self._raw_name = EMPTY_BYTES\n self._raw_value = EMPTY_BYTES\n\n cdef _on_header_field(self, char* at, size_t length):\n if self._has_value:\n self._process_header()\n\n if self._raw_name is EMPTY_BYTES:\n self._raw_name = at[:length]\n else:\n self._raw_name += at[:length]\n\n cdef _on_header_value(self, char* at, size_t length):\n if self._raw_value is EMPTY_BYTES:\n self._raw_value = at[:length]\n else:\n self._raw_value += at[:length]\n self._has_value = True\n\n cdef _on_headers_complete(self):\n self._process_header()\n\n should_close = not cparser.llhttp_should_keep_alive(self._cparser)\n upgrade = self._cparser.upgrade\n chunked = self._cparser.flags & cparser.F_CHUNKED\n\n raw_headers = tuple(self._raw_headers)\n headers = CIMultiDictProxy(CIMultiDict(self._headers))\n\n if self._cparser.type == cparser.HTTP_REQUEST:\n allowed = upgrade and headers.get("upgrade", "").lower() in ALLOWED_UPGRADES\n if allowed or self._cparser.method == cparser.HTTP_CONNECT:\n self._upgraded = True\n else:\n if upgrade and self._cparser.status_code == 101:\n self._upgraded = True\n\n # do not support old websocket spec\n if SEC_WEBSOCKET_KEY1 in headers:\n raise InvalidHeader(SEC_WEBSOCKET_KEY1)\n\n encoding = None\n enc = self._content_encoding\n if enc is not None:\n self._content_encoding = None\n enc = enc.lower()\n if enc in ('gzip', 'deflate', 'br'):\n encoding = enc\n\n if self._cparser.type == cparser.HTTP_REQUEST:\n method = http_method_str(self._cparser.method)\n msg = _new_request_message(\n method, self._path,\n self.http_version(), headers, raw_headers,\n should_close, encoding, upgrade, chunked, self._url)\n else:\n msg = _new_response_message(\n self.http_version(), self._cparser.status_code, self._reason,\n headers, raw_headers, should_close, encoding,\n upgrade, chunked)\n\n if (\n ULLONG_MAX > self._cparser.content_length > 0 or chunked or\n self._cparser.method == cparser.HTTP_CONNECT or\n (self._cparser.status_code >= 199 and\n self._cparser.content_length == 0 and\n self._read_until_eof)\n ):\n payload = StreamReader(\n self._protocol, timer=self._timer, loop=self._loop,\n limit=self._limit)\n else:\n payload = EMPTY_PAYLOAD\n\n self._payload = payload\n if encoding is not None and self._auto_decompress:\n self._payload = DeflateBuffer(payload, encoding)\n\n if not self._response_with_body:\n payload = EMPTY_PAYLOAD\n\n self._messages.append((msg, payload))\n\n cdef _on_message_complete(self):\n self._payload.feed_eof()\n self._payload = None\n\n cdef _on_chunk_header(self):\n self._payload.begin_http_chunk_receiving()\n\n cdef _on_chunk_complete(self):\n self._payload.end_http_chunk_receiving()\n\n cdef object _on_status_complete(self):\n pass\n\n cdef inline http_version(self):\n cdef cparser.llhttp_t* parser = self._cparser\n\n if parser.http_major == 1:\n if parser.http_minor == 0:\n return HttpVersion10\n elif parser.http_minor == 1:\n return HttpVersion11\n\n return HttpVersion(parser.http_major, parser.http_minor)\n\n ### Public API ###\n\n def feed_eof(self):\n cdef bytes desc\n\n if self._payload is not None:\n if self._cparser.flags & cparser.F_CHUNKED:\n raise TransferEncodingError(\n "Not enough data to satisfy transfer length header.")\n elif self._cparser.flags & cparser.F_CONTENT_LENGTH:\n raise ContentLengthError(\n "Not enough data to satisfy content length header.")\n elif cparser.llhttp_get_errno(self._cparser) != cparser.HPE_OK:\n desc = cparser.llhttp_get_error_reason(self._cparser)\n raise PayloadEncodingError(desc.decode('latin-1'))\n else:\n self._payload.feed_eof()\n elif self._started:\n self._on_headers_complete()\n if self._messages:\n return self._messages[-1][0]\n\n def feed_data(self, data):\n cdef:\n size_t data_len\n size_t nb\n cdef cparser.llhttp_errno_t errno\n\n PyObject_GetBuffer(data, &self.py_buf, PyBUF_SIMPLE)\n data_len = <size_t>self.py_buf.len\n\n errno = cparser.llhttp_execute(\n self._cparser,\n <char*>self.py_buf.buf,\n data_len)\n\n if errno is cparser.HPE_PAUSED_UPGRADE:\n cparser.llhttp_resume_after_upgrade(self._cparser)\n\n nb = cparser.llhttp_get_error_pos(self._cparser) - <char*>self.py_buf.buf\n\n PyBuffer_Release(&self.py_buf)\n\n if errno not in (cparser.HPE_OK, cparser.HPE_PAUSED_UPGRADE):\n if self._payload_error == 0:\n if self._last_error is not None:\n ex = self._last_error\n self._last_error = None\n else:\n after = cparser.llhttp_get_error_pos(self._cparser)\n before = data[:after - <char*>self.py_buf.buf]\n after_b = after.split(b"\r\n", 1)[0]\n before = before.rsplit(b"\r\n", 1)[-1]\n data = before + after_b\n pointer = " " * (len(repr(before))-1) + "^"\n ex = parser_error_from_errno(self._cparser, data, pointer)\n self._payload = None\n raise ex\n\n if self._messages:\n messages = self._messages\n self._messages = []\n else:\n messages = ()\n\n if self._upgraded:\n return messages, True, data[nb:]\n else:\n return messages, False, b""\n\n def set_upgraded(self, val):\n self._upgraded = val\n\n\ncdef class HttpRequestParser(HttpParser):\n\n def __init__(\n self, protocol, loop, int limit, timer=None,\n size_t max_line_size=8190, size_t max_headers=32768,\n size_t max_field_size=8190, payload_exception=None,\n bint response_with_body=True, bint read_until_eof=False,\n bint auto_decompress=True,\n ):\n self._init(cparser.HTTP_REQUEST, protocol, loop, limit, timer,\n max_line_size, max_headers, max_field_size,\n payload_exception, response_with_body, read_until_eof,\n auto_decompress)\n\n cdef object _on_status_complete(self):\n cdef int idx1, idx2\n if not self._buf:\n return\n self._path = self._buf.decode('utf-8', 'surrogateescape')\n try:\n idx3 = len(self._path)\n if self._cparser.method == cparser.HTTP_CONNECT:\n # authority-form,\n # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.3\n self._url = URL.build(authority=self._path, encoded=True)\n elif idx3 > 1 and self._path[0] == '/':\n # origin-form,\n # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.1\n idx1 = self._path.find("?")\n if idx1 == -1:\n query = ""\n idx2 = self._path.find("#")\n if idx2 == -1:\n path = self._path\n fragment = ""\n else:\n path = self._path[0: idx2]\n fragment = self._path[idx2+1:]\n\n else:\n path = self._path[0:idx1]\n idx1 += 1\n idx2 = self._path.find("#", idx1+1)\n if idx2 == -1:\n query = self._path[idx1:]\n fragment = ""\n else:\n query = self._path[idx1: idx2]\n fragment = self._path[idx2+1:]\n\n self._url = URL.build(\n path=path,\n query_string=query,\n fragment=fragment,\n encoded=True,\n )\n else:\n # absolute-form for proxy maybe,\n # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.2\n self._url = URL(self._path, encoded=True)\n finally:\n PyByteArray_Resize(self._buf, 0)\n\n\ncdef class HttpResponseParser(HttpParser):\n\n def __init__(\n self, protocol, loop, int limit, timer=None,\n size_t max_line_size=8190, size_t max_headers=32768,\n size_t max_field_size=8190, payload_exception=None,\n bint response_with_body=True, bint read_until_eof=False,\n bint auto_decompress=True\n ):\n self._init(cparser.HTTP_RESPONSE, protocol, loop, limit, timer,\n max_line_size, max_headers, max_field_size,\n payload_exception, response_with_body, read_until_eof,\n auto_decompress)\n # Use strict parsing on dev mode, so users are warned about broken servers.\n if not DEBUG:\n cparser.llhttp_set_lenient_headers(self._cparser, 1)\n cparser.llhttp_set_lenient_optional_cr_before_lf(self._cparser, 1)\n cparser.llhttp_set_lenient_spaces_after_chunk_size(self._cparser, 1)\n\n cdef object _on_status_complete(self):\n if self._buf:\n self._reason = self._buf.decode('utf-8', 'surrogateescape')\n PyByteArray_Resize(self._buf, 0)\n else:\n self._reason = self._reason or ''\n\ncdef int cb_on_message_begin(cparser.llhttp_t* parser) except -1:\n cdef HttpParser pyparser = <HttpParser>parser.data\n\n pyparser._started = True\n pyparser._headers = []\n pyparser._raw_headers = []\n PyByteArray_Resize(pyparser._buf, 0)\n pyparser._path = None\n pyparser._reason = None\n return 0\n\n\ncdef int cb_on_url(cparser.llhttp_t* parser,\n const char *at, size_t length) except -1:\n cdef HttpParser pyparser = <HttpParser>parser.data\n try:\n if length > pyparser._max_line_size:\n raise LineTooLong(\n 'Status line is too long', pyparser._max_line_size, length)\n extend(pyparser._buf, at, length)\n except BaseException as ex:\n pyparser._last_error = ex\n return -1\n else:\n return 0\n\n\ncdef int cb_on_status(cparser.llhttp_t* parser,\n const char *at, size_t length) except -1:\n cdef HttpParser pyparser = <HttpParser>parser.data\n cdef str reason\n try:\n if length > pyparser._max_line_size:\n raise LineTooLong(\n 'Status line is too long', pyparser._max_line_size, length)\n extend(pyparser._buf, at, length)\n except BaseException as ex:\n pyparser._last_error = ex\n return -1\n else:\n return 0\n\n\ncdef int cb_on_header_field(cparser.llhttp_t* parser,\n const char *at, size_t length) except -1:\n cdef HttpParser pyparser = <HttpParser>parser.data\n cdef Py_ssize_t size\n try:\n pyparser._on_status_complete()\n size = len(pyparser._raw_name) + length\n if size > pyparser._max_field_size:\n raise LineTooLong(\n 'Header name is too long', pyparser._max_field_size, size)\n pyparser._on_header_field(at, length)\n except BaseException as ex:\n pyparser._last_error = ex\n return -1\n else:\n return 0\n\n\ncdef int cb_on_header_value(cparser.llhttp_t* parser,\n const char *at, size_t length) except -1:\n cdef HttpParser pyparser = <HttpParser>parser.data\n cdef Py_ssize_t size\n try:\n size = len(pyparser._raw_value) + length\n if size > pyparser._max_field_size:\n raise LineTooLong(\n 'Header value is too long', pyparser._max_field_size, size)\n pyparser._on_header_value(at, length)\n except BaseException as ex:\n pyparser._last_error = ex\n return -1\n else:\n return 0\n\n\ncdef int cb_on_headers_complete(cparser.llhttp_t* parser) except -1:\n cdef HttpParser pyparser = <HttpParser>parser.data\n try:\n pyparser._on_status_complete()\n pyparser._on_headers_complete()\n except BaseException as exc:\n pyparser._last_error = exc\n return -1\n else:\n if pyparser._upgraded or pyparser._cparser.method == cparser.HTTP_CONNECT:\n return 2\n else:\n return 0\n\n\ncdef int cb_on_body(cparser.llhttp_t* parser,\n const char *at, size_t length) except -1:\n cdef HttpParser pyparser = <HttpParser>parser.data\n cdef bytes body = at[:length]\n try:\n pyparser._payload.feed_data(body, length)\n except BaseException as underlying_exc:\n reraised_exc = underlying_exc\n if pyparser._payload_exception is not None:\n reraised_exc = pyparser._payload_exception(str(underlying_exc))\n\n set_exception(pyparser._payload, reraised_exc, underlying_exc)\n\n pyparser._payload_error = 1\n return -1\n else:\n return 0\n\n\ncdef int cb_on_message_complete(cparser.llhttp_t* parser) except -1:\n cdef HttpParser pyparser = <HttpParser>parser.data\n try:\n pyparser._started = False\n pyparser._on_message_complete()\n except BaseException as exc:\n pyparser._last_error = exc\n return -1\n else:\n return 0\n\n\ncdef int cb_on_chunk_header(cparser.llhttp_t* parser) except -1:\n cdef HttpParser pyparser = <HttpParser>parser.data\n try:\n pyparser._on_chunk_header()\n except BaseException as exc:\n pyparser._last_error = exc\n return -1\n else:\n return 0\n\n\ncdef int cb_on_chunk_complete(cparser.llhttp_t* parser) except -1:\n cdef HttpParser pyparser = <HttpParser>parser.data\n try:\n pyparser._on_chunk_complete()\n except BaseException as exc:\n pyparser._last_error = exc\n return -1\n else:\n return 0\n\n\ncdef parser_error_from_errno(cparser.llhttp_t* parser, data, pointer):\n cdef cparser.llhttp_errno_t errno = cparser.llhttp_get_errno(parser)\n cdef bytes desc = cparser.llhttp_get_error_reason(parser)\n\n err_msg = "{}:\n\n {!r}\n {}".format(desc.decode("latin-1"), data, pointer)\n\n if errno in {cparser.HPE_CB_MESSAGE_BEGIN,\n cparser.HPE_CB_HEADERS_COMPLETE,\n cparser.HPE_CB_MESSAGE_COMPLETE,\n cparser.HPE_CB_CHUNK_HEADER,\n cparser.HPE_CB_CHUNK_COMPLETE,\n cparser.HPE_INVALID_CONSTANT,\n cparser.HPE_INVALID_HEADER_TOKEN,\n cparser.HPE_INVALID_CONTENT_LENGTH,\n cparser.HPE_INVALID_CHUNK_SIZE,\n cparser.HPE_INVALID_EOF_STATE,\n cparser.HPE_INVALID_TRANSFER_ENCODING}:\n return BadHttpMessage(err_msg)\n elif errno == cparser.HPE_INVALID_METHOD:\n return BadHttpMethod(error=err_msg)\n elif errno in {cparser.HPE_INVALID_STATUS,\n cparser.HPE_INVALID_VERSION}:\n return BadStatusLine(error=err_msg)\n elif errno == cparser.HPE_INVALID_URL:\n return InvalidURLError(err_msg)\n\n return BadHttpMessage(err_msg)\n
.venv\Lib\site-packages\aiohttp\_http_parser.pyx
_http_parser.pyx
Other
29,076
0.95
0.101553
0.018284
awesome-app
791
2025-06-05T19:49:41.769865
MIT
false
aeba93f1d22d8b2f3d5e8410888be60c
MZ
.venv\Lib\site-packages\aiohttp\_http_writer.cp313-win_amd64.pyd
_http_writer.cp313-win_amd64.pyd
Other
46,080
0.95
0.031034
0.00692
python-kit
540
2024-02-20T09:24:40.636244
MIT
false
81f660d6d767cbc1a3acd63054b12bd1
from cpython.bytes cimport PyBytes_FromStringAndSize\nfrom cpython.exc cimport PyErr_NoMemory\nfrom cpython.mem cimport PyMem_Free, PyMem_Malloc, PyMem_Realloc\nfrom cpython.object cimport PyObject_Str\nfrom libc.stdint cimport uint8_t, uint64_t\nfrom libc.string cimport memcpy\n\nfrom multidict import istr\n\nDEF BUF_SIZE = 16 * 1024 # 16KiB\ncdef char BUFFER[BUF_SIZE]\n\ncdef object _istr = istr\n\n\n# ----------------- writer ---------------------------\n\ncdef struct Writer:\n char *buf\n Py_ssize_t size\n Py_ssize_t pos\n\n\ncdef inline void _init_writer(Writer* writer):\n writer.buf = &BUFFER[0]\n writer.size = BUF_SIZE\n writer.pos = 0\n\n\ncdef inline void _release_writer(Writer* writer):\n if writer.buf != BUFFER:\n PyMem_Free(writer.buf)\n\n\ncdef inline int _write_byte(Writer* writer, uint8_t ch):\n cdef char * buf\n cdef Py_ssize_t size\n\n if writer.pos == writer.size:\n # reallocate\n size = writer.size + BUF_SIZE\n if writer.buf == BUFFER:\n buf = <char*>PyMem_Malloc(size)\n if buf == NULL:\n PyErr_NoMemory()\n return -1\n memcpy(buf, writer.buf, writer.size)\n else:\n buf = <char*>PyMem_Realloc(writer.buf, size)\n if buf == NULL:\n PyErr_NoMemory()\n return -1\n writer.buf = buf\n writer.size = size\n writer.buf[writer.pos] = <char>ch\n writer.pos += 1\n return 0\n\n\ncdef inline int _write_utf8(Writer* writer, Py_UCS4 symbol):\n cdef uint64_t utf = <uint64_t> symbol\n\n if utf < 0x80:\n return _write_byte(writer, <uint8_t>utf)\n elif utf < 0x800:\n if _write_byte(writer, <uint8_t>(0xc0 | (utf >> 6))) < 0:\n return -1\n return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))\n elif 0xD800 <= utf <= 0xDFFF:\n # surogate pair, ignored\n return 0\n elif utf < 0x10000:\n if _write_byte(writer, <uint8_t>(0xe0 | (utf >> 12))) < 0:\n return -1\n if _write_byte(writer, <uint8_t>(0x80 | ((utf >> 6) & 0x3f))) < 0:\n return -1\n return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))\n elif utf > 0x10FFFF:\n # symbol is too large\n return 0\n else:\n if _write_byte(writer, <uint8_t>(0xf0 | (utf >> 18))) < 0:\n return -1\n if _write_byte(writer,\n <uint8_t>(0x80 | ((utf >> 12) & 0x3f))) < 0:\n return -1\n if _write_byte(writer,\n <uint8_t>(0x80 | ((utf >> 6) & 0x3f))) < 0:\n return -1\n return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))\n\n\ncdef inline int _write_str(Writer* writer, str s):\n cdef Py_UCS4 ch\n for ch in s:\n if _write_utf8(writer, ch) < 0:\n return -1\n\n\ncdef inline int _write_str_raise_on_nlcr(Writer* writer, object s):\n cdef Py_UCS4 ch\n cdef str out_str\n if type(s) is str:\n out_str = <str>s\n elif type(s) is _istr:\n out_str = PyObject_Str(s)\n elif not isinstance(s, str):\n raise TypeError("Cannot serialize non-str key {!r}".format(s))\n else:\n out_str = str(s)\n\n for ch in out_str:\n if ch == 0x0D or ch == 0x0A:\n raise ValueError(\n "Newline or carriage return detected in headers. "\n "Potential header injection attack."\n )\n if _write_utf8(writer, ch) < 0:\n return -1\n\n\n# --------------- _serialize_headers ----------------------\n\ndef _serialize_headers(str status_line, headers):\n cdef Writer writer\n cdef object key\n cdef object val\n\n _init_writer(&writer)\n\n try:\n if _write_str(&writer, status_line) < 0:\n raise\n if _write_byte(&writer, b'\r') < 0:\n raise\n if _write_byte(&writer, b'\n') < 0:\n raise\n\n for key, val in headers.items():\n if _write_str_raise_on_nlcr(&writer, key) < 0:\n raise\n if _write_byte(&writer, b':') < 0:\n raise\n if _write_byte(&writer, b' ') < 0:\n raise\n if _write_str_raise_on_nlcr(&writer, val) < 0:\n raise\n if _write_byte(&writer, b'\r') < 0:\n raise\n if _write_byte(&writer, b'\n') < 0:\n raise\n\n if _write_byte(&writer, b'\r') < 0:\n raise\n if _write_byte(&writer, b'\n') < 0:\n raise\n\n return PyBytes_FromStringAndSize(writer.buf, writer.pos)\n finally:\n _release_writer(&writer)\n
.venv\Lib\site-packages\aiohttp\_http_writer.pyx
_http_writer.pyx
Other
4,721
0.95
0.2
0.038168
python-kit
495
2025-02-23T01:13:29.037293
MIT
false
6e8671173995c0fdb8b6e0eb76c3b6ef
__version__ = "3.12.13"\n\nfrom typing import TYPE_CHECKING, Tuple\n\nfrom . import hdrs as hdrs\nfrom .client import (\n BaseConnector,\n ClientConnectionError,\n ClientConnectionResetError,\n ClientConnectorCertificateError,\n ClientConnectorDNSError,\n ClientConnectorError,\n ClientConnectorSSLError,\n ClientError,\n ClientHttpProxyError,\n ClientOSError,\n ClientPayloadError,\n ClientProxyConnectionError,\n ClientRequest,\n ClientResponse,\n ClientResponseError,\n ClientSession,\n ClientSSLError,\n ClientTimeout,\n ClientWebSocketResponse,\n ClientWSTimeout,\n ConnectionTimeoutError,\n ContentTypeError,\n Fingerprint,\n InvalidURL,\n InvalidUrlClientError,\n InvalidUrlRedirectClientError,\n NamedPipeConnector,\n NonHttpUrlClientError,\n NonHttpUrlRedirectClientError,\n RedirectClientError,\n RequestInfo,\n ServerConnectionError,\n ServerDisconnectedError,\n ServerFingerprintMismatch,\n ServerTimeoutError,\n SocketTimeoutError,\n TCPConnector,\n TooManyRedirects,\n UnixConnector,\n WSMessageTypeError,\n WSServerHandshakeError,\n request,\n)\nfrom .client_middleware_digest_auth import DigestAuthMiddleware\nfrom .client_middlewares import ClientHandlerType, ClientMiddlewareType\nfrom .compression_utils import set_zlib_backend\nfrom .connector import (\n AddrInfoType as AddrInfoType,\n SocketFactoryType as SocketFactoryType,\n)\nfrom .cookiejar import CookieJar as CookieJar, DummyCookieJar as DummyCookieJar\nfrom .formdata import FormData as FormData\nfrom .helpers import BasicAuth, ChainMapProxy, ETag\nfrom .http import (\n HttpVersion as HttpVersion,\n HttpVersion10 as HttpVersion10,\n HttpVersion11 as HttpVersion11,\n WebSocketError as WebSocketError,\n WSCloseCode as WSCloseCode,\n WSMessage as WSMessage,\n WSMsgType as WSMsgType,\n)\nfrom .multipart import (\n BadContentDispositionHeader as BadContentDispositionHeader,\n BadContentDispositionParam as BadContentDispositionParam,\n BodyPartReader as BodyPartReader,\n MultipartReader as MultipartReader,\n MultipartWriter as MultipartWriter,\n content_disposition_filename as content_disposition_filename,\n parse_content_disposition as parse_content_disposition,\n)\nfrom .payload import (\n PAYLOAD_REGISTRY as PAYLOAD_REGISTRY,\n AsyncIterablePayload as AsyncIterablePayload,\n BufferedReaderPayload as BufferedReaderPayload,\n BytesIOPayload as BytesIOPayload,\n BytesPayload as BytesPayload,\n IOBasePayload as IOBasePayload,\n JsonPayload as JsonPayload,\n Payload as Payload,\n StringIOPayload as StringIOPayload,\n StringPayload as StringPayload,\n TextIOPayload as TextIOPayload,\n get_payload as get_payload,\n payload_type as payload_type,\n)\nfrom .payload_streamer import streamer as streamer\nfrom .resolver import (\n AsyncResolver as AsyncResolver,\n DefaultResolver as DefaultResolver,\n ThreadedResolver as ThreadedResolver,\n)\nfrom .streams import (\n EMPTY_PAYLOAD as EMPTY_PAYLOAD,\n DataQueue as DataQueue,\n EofStream as EofStream,\n FlowControlDataQueue as FlowControlDataQueue,\n StreamReader as StreamReader,\n)\nfrom .tracing import (\n TraceConfig as TraceConfig,\n TraceConnectionCreateEndParams as TraceConnectionCreateEndParams,\n TraceConnectionCreateStartParams as TraceConnectionCreateStartParams,\n TraceConnectionQueuedEndParams as TraceConnectionQueuedEndParams,\n TraceConnectionQueuedStartParams as TraceConnectionQueuedStartParams,\n TraceConnectionReuseconnParams as TraceConnectionReuseconnParams,\n TraceDnsCacheHitParams as TraceDnsCacheHitParams,\n TraceDnsCacheMissParams as TraceDnsCacheMissParams,\n TraceDnsResolveHostEndParams as TraceDnsResolveHostEndParams,\n TraceDnsResolveHostStartParams as TraceDnsResolveHostStartParams,\n TraceRequestChunkSentParams as TraceRequestChunkSentParams,\n TraceRequestEndParams as TraceRequestEndParams,\n TraceRequestExceptionParams as TraceRequestExceptionParams,\n TraceRequestHeadersSentParams as TraceRequestHeadersSentParams,\n TraceRequestRedirectParams as TraceRequestRedirectParams,\n TraceRequestStartParams as TraceRequestStartParams,\n TraceResponseChunkReceivedParams as TraceResponseChunkReceivedParams,\n)\n\nif TYPE_CHECKING:\n # At runtime these are lazy-loaded at the bottom of the file.\n from .worker import (\n GunicornUVLoopWebWorker as GunicornUVLoopWebWorker,\n GunicornWebWorker as GunicornWebWorker,\n )\n\n__all__: Tuple[str, ...] = (\n "hdrs",\n # client\n "AddrInfoType",\n "BaseConnector",\n "ClientConnectionError",\n "ClientConnectionResetError",\n "ClientConnectorCertificateError",\n "ClientConnectorDNSError",\n "ClientConnectorError",\n "ClientConnectorSSLError",\n "ClientError",\n "ClientHttpProxyError",\n "ClientOSError",\n "ClientPayloadError",\n "ClientProxyConnectionError",\n "ClientResponse",\n "ClientRequest",\n "ClientResponseError",\n "ClientSSLError",\n "ClientSession",\n "ClientTimeout",\n "ClientWebSocketResponse",\n "ClientWSTimeout",\n "ConnectionTimeoutError",\n "ContentTypeError",\n "Fingerprint",\n "FlowControlDataQueue",\n "InvalidURL",\n "InvalidUrlClientError",\n "InvalidUrlRedirectClientError",\n "NonHttpUrlClientError",\n "NonHttpUrlRedirectClientError",\n "RedirectClientError",\n "RequestInfo",\n "ServerConnectionError",\n "ServerDisconnectedError",\n "ServerFingerprintMismatch",\n "ServerTimeoutError",\n "SocketFactoryType",\n "SocketTimeoutError",\n "TCPConnector",\n "TooManyRedirects",\n "UnixConnector",\n "NamedPipeConnector",\n "WSServerHandshakeError",\n "request",\n # client_middleware\n "ClientMiddlewareType",\n "ClientHandlerType",\n # cookiejar\n "CookieJar",\n "DummyCookieJar",\n # formdata\n "FormData",\n # helpers\n "BasicAuth",\n "ChainMapProxy",\n "DigestAuthMiddleware",\n "ETag",\n "set_zlib_backend",\n # http\n "HttpVersion",\n "HttpVersion10",\n "HttpVersion11",\n "WSMsgType",\n "WSCloseCode",\n "WSMessage",\n "WebSocketError",\n # multipart\n "BadContentDispositionHeader",\n "BadContentDispositionParam",\n "BodyPartReader",\n "MultipartReader",\n "MultipartWriter",\n "content_disposition_filename",\n "parse_content_disposition",\n # payload\n "AsyncIterablePayload",\n "BufferedReaderPayload",\n "BytesIOPayload",\n "BytesPayload",\n "IOBasePayload",\n "JsonPayload",\n "PAYLOAD_REGISTRY",\n "Payload",\n "StringIOPayload",\n "StringPayload",\n "TextIOPayload",\n "get_payload",\n "payload_type",\n # payload_streamer\n "streamer",\n # resolver\n "AsyncResolver",\n "DefaultResolver",\n "ThreadedResolver",\n # streams\n "DataQueue",\n "EMPTY_PAYLOAD",\n "EofStream",\n "StreamReader",\n # tracing\n "TraceConfig",\n "TraceConnectionCreateEndParams",\n "TraceConnectionCreateStartParams",\n "TraceConnectionQueuedEndParams",\n "TraceConnectionQueuedStartParams",\n "TraceConnectionReuseconnParams",\n "TraceDnsCacheHitParams",\n "TraceDnsCacheMissParams",\n "TraceDnsResolveHostEndParams",\n "TraceDnsResolveHostStartParams",\n "TraceRequestChunkSentParams",\n "TraceRequestEndParams",\n "TraceRequestExceptionParams",\n "TraceRequestHeadersSentParams",\n "TraceRequestRedirectParams",\n "TraceRequestStartParams",\n "TraceResponseChunkReceivedParams",\n # workers (imported lazily with __getattr__)\n "GunicornUVLoopWebWorker",\n "GunicornWebWorker",\n "WSMessageTypeError",\n)\n\n\ndef __dir__() -> Tuple[str, ...]:\n return __all__ + ("__doc__",)\n\n\ndef __getattr__(name: str) -> object:\n global GunicornUVLoopWebWorker, GunicornWebWorker\n\n # Importing gunicorn takes a long time (>100ms), so only import if actually needed.\n if name in ("GunicornUVLoopWebWorker", "GunicornWebWorker"):\n try:\n from .worker import GunicornUVLoopWebWorker as guv, GunicornWebWorker as gw\n except ImportError:\n return None\n\n GunicornUVLoopWebWorker = guv # type: ignore[misc]\n GunicornWebWorker = gw # type: ignore[misc]\n return guv if name == "GunicornUVLoopWebWorker" else gw\n\n raise AttributeError(f"module {__name__} has no attribute {name}")\n
.venv\Lib\site-packages\aiohttp\__init__.py
__init__.py
Python
8,581
0.95
0.02518
0.05618
awesome-app
194
2025-02-10T16:53:53.581930
BSD-3-Clause
false
f5d752b5b4c52ddc20ced1cf29cf39f0
ee1b6686067213d1ea59b3e9c47534afb90021d4f692939741ad4069d0e1d96f
.venv\Lib\site-packages\aiohttp\.hash\hdrs.py.hash
hdrs.py.hash
Other
64
0.5
0.1
0
awesome-app
275
2025-06-21T03:46:29.742879
GPL-3.0
false
b57ae654fb5219bfcfd3e88bec7c7b8b
18fd18f4da996101a426d4bcd570f353bd1eeeb44c6f7e1347bc86326c79ff3b
.venv\Lib\site-packages\aiohttp\.hash\_cparser.pxd.hash
_cparser.pxd.hash
Other
64
0.5
0.1
0
react-lib
303
2023-09-13T00:03:56.999751
MIT
false
327ddf7611fd427e073192b8b3c77627
0455129b185e981b5b96ac738f31f7c74dc57f1696953cae0083b3f18679fe73
.venv\Lib\site-packages\aiohttp\.hash\_find_header.pxd.hash
_find_header.pxd.hash
Other
64
0.5
0.1
0
node-utils
659
2023-08-04T09:34:49.618237
MIT
false
87539e88d7dac4c10e781ff0c9839198
7584e6ccbd1472c5e86982c462e434a0eea469888a4e1baea595970c1e997520
.venv\Lib\site-packages\aiohttp\.hash\_http_parser.pyx.hash
_http_parser.pyx.hash
Other
64
0.5
0.1
0
python-kit
273
2025-06-05T14:32:06.965579
GPL-3.0
false
363355e94fb443adc25c3e259cf98be3
c3ad073fa4d540a9abb3f9c79bc16548d622457d04068ec7caaf62994983363a
.venv\Lib\site-packages\aiohttp\.hash\_http_writer.pyx.hash
_http_writer.pyx.hash
Other
64
0.5
0.1
0
node-utils
70
2025-06-27T14:02:37.406627
MIT
false
aa8970c2f7c7bdb56292c80eef4a9db4
"""Helpers for WebSocket protocol versions 13 and 8."""\n\nimport functools\nimport re\nfrom struct import Struct\nfrom typing import TYPE_CHECKING, Final, List, Optional, Pattern, Tuple\n\nfrom ..helpers import NO_EXTENSIONS\nfrom .models import WSHandshakeError\n\nUNPACK_LEN3 = Struct("!Q").unpack_from\nUNPACK_CLOSE_CODE = Struct("!H").unpack\nPACK_LEN1 = Struct("!BB").pack\nPACK_LEN2 = Struct("!BBH").pack\nPACK_LEN3 = Struct("!BBQ").pack\nPACK_CLOSE_CODE = Struct("!H").pack\nPACK_RANDBITS = Struct("!L").pack\nMSG_SIZE: Final[int] = 2**14\nMASK_LEN: Final[int] = 4\n\nWS_KEY: Final[bytes] = b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11"\n\n\n# Used by _websocket_mask_python\n@functools.lru_cache\ndef _xor_table() -> List[bytes]:\n return [bytes(a ^ b for a in range(256)) for b in range(256)]\n\n\ndef _websocket_mask_python(mask: bytes, data: bytearray) -> None:\n """Websocket masking function.\n\n `mask` is a `bytes` object of length 4; `data` is a `bytearray`\n object of any length. The contents of `data` are masked with `mask`,\n as specified in section 5.3 of RFC 6455.\n\n Note that this function mutates the `data` argument.\n\n This pure-python implementation may be replaced by an optimized\n version when available.\n\n """\n assert isinstance(data, bytearray), data\n assert len(mask) == 4, mask\n\n if data:\n _XOR_TABLE = _xor_table()\n a, b, c, d = (_XOR_TABLE[n] for n in mask)\n data[::4] = data[::4].translate(a)\n data[1::4] = data[1::4].translate(b)\n data[2::4] = data[2::4].translate(c)\n data[3::4] = data[3::4].translate(d)\n\n\nif TYPE_CHECKING or NO_EXTENSIONS: # pragma: no cover\n websocket_mask = _websocket_mask_python\nelse:\n try:\n from .mask import _websocket_mask_cython # type: ignore[import-not-found]\n\n websocket_mask = _websocket_mask_cython\n except ImportError: # pragma: no cover\n websocket_mask = _websocket_mask_python\n\n\n_WS_EXT_RE: Final[Pattern[str]] = re.compile(\n r"^(?:;\s*(?:"\n r"(server_no_context_takeover)|"\n r"(client_no_context_takeover)|"\n r"(server_max_window_bits(?:=(\d+))?)|"\n r"(client_max_window_bits(?:=(\d+))?)))*$"\n)\n\n_WS_EXT_RE_SPLIT: Final[Pattern[str]] = re.compile(r"permessage-deflate([^,]+)?")\n\n\ndef ws_ext_parse(extstr: Optional[str], isserver: bool = False) -> Tuple[int, bool]:\n if not extstr:\n return 0, False\n\n compress = 0\n notakeover = False\n for ext in _WS_EXT_RE_SPLIT.finditer(extstr):\n defext = ext.group(1)\n # Return compress = 15 when get `permessage-deflate`\n if not defext:\n compress = 15\n break\n match = _WS_EXT_RE.match(defext)\n if match:\n compress = 15\n if isserver:\n # Server never fail to detect compress handshake.\n # Server does not need to send max wbit to client\n if match.group(4):\n compress = int(match.group(4))\n # Group3 must match if group4 matches\n # Compress wbit 8 does not support in zlib\n # If compress level not support,\n # CONTINUE to next extension\n if compress > 15 or compress < 9:\n compress = 0\n continue\n if match.group(1):\n notakeover = True\n # Ignore regex group 5 & 6 for client_max_window_bits\n break\n else:\n if match.group(6):\n compress = int(match.group(6))\n # Group5 must match if group6 matches\n # Compress wbit 8 does not support in zlib\n # If compress level not support,\n # FAIL the parse progress\n if compress > 15 or compress < 9:\n raise WSHandshakeError("Invalid window size")\n if match.group(2):\n notakeover = True\n # Ignore regex group 5 & 6 for client_max_window_bits\n break\n # Return Fail if client side and not match\n elif not isserver:\n raise WSHandshakeError("Extension for deflate not supported" + ext.group(1))\n\n return compress, notakeover\n\n\ndef ws_ext_gen(\n compress: int = 15, isserver: bool = False, server_notakeover: bool = False\n) -> str:\n # client_notakeover=False not used for server\n # compress wbit 8 does not support in zlib\n if compress < 9 or compress > 15:\n raise ValueError(\n "Compress wbits must between 9 and 15, zlib does not support wbits=8"\n )\n enabledext = ["permessage-deflate"]\n if not isserver:\n enabledext.append("client_max_window_bits")\n\n if compress < 15:\n enabledext.append("server_max_window_bits=" + str(compress))\n if server_notakeover:\n enabledext.append("server_no_context_takeover")\n # if client_notakeover:\n # enabledext.append('client_no_context_takeover')\n return "; ".join(enabledext)\n
.venv\Lib\site-packages\aiohttp\_websocket\helpers.py
helpers.py
Python
5,185
0.95
0.244898
0.157025
react-lib
718
2025-04-12T03:07:09.559109
GPL-3.0
false
8dc4de75b2846a795451b0e8321bdfee
MZ
.venv\Lib\site-packages\aiohttp\_websocket\mask.cp313-win_amd64.pyd
mask.cp313-win_amd64.pyd
Other
35,328
0.95
0.038136
0.017167
python-kit
244
2024-12-05T22:25:32.248365
Apache-2.0
false
eb392fd8eaca606978a7605b4e9aecb0
"""Cython declarations for websocket masking."""\n\ncpdef void _websocket_mask_cython(bytes mask, bytearray data)\n
.venv\Lib\site-packages\aiohttp\_websocket\mask.pxd
mask.pxd
Other
115
0.85
0.333333
0
awesome-app
302
2024-11-22T17:14:05.973094
MIT
false
16efd3c587b671ee9b3b3db269cf02b1
from cpython cimport PyBytes_AsString\n\n\n#from cpython cimport PyByteArray_AsString # cython still not exports that\ncdef extern from "Python.h":\n char* PyByteArray_AsString(bytearray ba) except NULL\n\nfrom libc.stdint cimport uint32_t, uint64_t, uintmax_t\n\n\ncpdef void _websocket_mask_cython(bytes mask, bytearray data):\n """Note, this function mutates its `data` argument\n """\n cdef:\n Py_ssize_t data_len, i\n # bit operations on signed integers are implementation-specific\n unsigned char * in_buf\n const unsigned char * mask_buf\n uint32_t uint32_msk\n uint64_t uint64_msk\n\n assert len(mask) == 4\n\n data_len = len(data)\n in_buf = <unsigned char*>PyByteArray_AsString(data)\n mask_buf = <const unsigned char*>PyBytes_AsString(mask)\n uint32_msk = (<uint32_t*>mask_buf)[0]\n\n # TODO: align in_data ptr to achieve even faster speeds\n # does it need in python ?! malloc() always aligns to sizeof(long) bytes\n\n if sizeof(size_t) >= 8:\n uint64_msk = uint32_msk\n uint64_msk = (uint64_msk << 32) | uint32_msk\n\n while data_len >= 8:\n (<uint64_t*>in_buf)[0] ^= uint64_msk\n in_buf += 8\n data_len -= 8\n\n\n while data_len >= 4:\n (<uint32_t*>in_buf)[0] ^= uint32_msk\n in_buf += 4\n data_len -= 4\n\n for i in range(0, data_len):\n in_buf[i] ^= mask_buf[i]\n
.venv\Lib\site-packages\aiohttp\_websocket\mask.pyx
mask.pyx
Other
1,445
0.95
0.104167
0.114286
python-kit
641
2023-11-07T03:24:21.975732
MIT
false
dd93ce24eae4db750eaf76a1151c568a
"""Models for WebSocket protocol versions 13 and 8."""\n\nimport json\nfrom enum import IntEnum\nfrom typing import Any, Callable, Final, NamedTuple, Optional, cast\n\nWS_DEFLATE_TRAILING: Final[bytes] = bytes([0x00, 0x00, 0xFF, 0xFF])\n\n\nclass WSCloseCode(IntEnum):\n OK = 1000\n GOING_AWAY = 1001\n PROTOCOL_ERROR = 1002\n UNSUPPORTED_DATA = 1003\n ABNORMAL_CLOSURE = 1006\n INVALID_TEXT = 1007\n POLICY_VIOLATION = 1008\n MESSAGE_TOO_BIG = 1009\n MANDATORY_EXTENSION = 1010\n INTERNAL_ERROR = 1011\n SERVICE_RESTART = 1012\n TRY_AGAIN_LATER = 1013\n BAD_GATEWAY = 1014\n\n\nclass WSMsgType(IntEnum):\n # websocket spec types\n CONTINUATION = 0x0\n TEXT = 0x1\n BINARY = 0x2\n PING = 0x9\n PONG = 0xA\n CLOSE = 0x8\n\n # aiohttp specific types\n CLOSING = 0x100\n CLOSED = 0x101\n ERROR = 0x102\n\n text = TEXT\n binary = BINARY\n ping = PING\n pong = PONG\n close = CLOSE\n closing = CLOSING\n closed = CLOSED\n error = ERROR\n\n\nclass WSMessage(NamedTuple):\n type: WSMsgType\n # To type correctly, this would need some kind of tagged union for each type.\n data: Any\n extra: Optional[str]\n\n def json(self, *, loads: Callable[[Any], Any] = json.loads) -> Any:\n """Return parsed JSON data.\n\n .. versionadded:: 0.22\n """\n return loads(self.data)\n\n\n# Constructing the tuple directly to avoid the overhead of\n# the lambda and arg processing since NamedTuples are constructed\n# with a run time built lambda\n# https://github.com/python/cpython/blob/d83fcf8371f2f33c7797bc8f5423a8bca8c46e5c/Lib/collections/__init__.py#L441\nWS_CLOSED_MESSAGE = tuple.__new__(WSMessage, (WSMsgType.CLOSED, None, None))\nWS_CLOSING_MESSAGE = tuple.__new__(WSMessage, (WSMsgType.CLOSING, None, None))\n\n\nclass WebSocketError(Exception):\n """WebSocket protocol parser error."""\n\n def __init__(self, code: int, message: str) -> None:\n self.code = code\n super().__init__(code, message)\n\n def __str__(self) -> str:\n return cast(str, self.args[1])\n\n\nclass WSHandshakeError(Exception):\n """WebSocket protocol handshake error."""\n
.venv\Lib\site-packages\aiohttp\_websocket\models.py
models.py
Python
2,205
0.95
0.119048
0.109375
react-lib
494
2024-10-30T04:27:25.239233
GPL-3.0
false
d862cba5c549209d198bd6f2935a1056
"""Reader for WebSocket protocol versions 13 and 8."""\n\nfrom typing import TYPE_CHECKING\n\nfrom ..helpers import NO_EXTENSIONS\n\nif TYPE_CHECKING or NO_EXTENSIONS: # pragma: no cover\n from .reader_py import (\n WebSocketDataQueue as WebSocketDataQueuePython,\n WebSocketReader as WebSocketReaderPython,\n )\n\n WebSocketReader = WebSocketReaderPython\n WebSocketDataQueue = WebSocketDataQueuePython\nelse:\n try:\n from .reader_c import ( # type: ignore[import-not-found]\n WebSocketDataQueue as WebSocketDataQueueCython,\n WebSocketReader as WebSocketReaderCython,\n )\n\n WebSocketReader = WebSocketReaderCython\n WebSocketDataQueue = WebSocketDataQueueCython\n except ImportError: # pragma: no cover\n from .reader_py import (\n WebSocketDataQueue as WebSocketDataQueuePython,\n WebSocketReader as WebSocketReaderPython,\n )\n\n WebSocketReader = WebSocketReaderPython\n WebSocketDataQueue = WebSocketDataQueuePython\n
.venv\Lib\site-packages\aiohttp\_websocket\reader.py
reader.py
Python
1,061
0.95
0.096774
0
node-utils
128
2023-12-08T12:07:16.325624
GPL-3.0
false
b8025a77c597525a736637de4dd178ec
import cython\n\nfrom .mask cimport _websocket_mask_cython as websocket_mask\n\n\ncdef unsigned int READ_HEADER\ncdef unsigned int READ_PAYLOAD_LENGTH\ncdef unsigned int READ_PAYLOAD_MASK\ncdef unsigned int READ_PAYLOAD\n\ncdef int OP_CODE_NOT_SET\ncdef int OP_CODE_CONTINUATION\ncdef int OP_CODE_TEXT\ncdef int OP_CODE_BINARY\ncdef int OP_CODE_CLOSE\ncdef int OP_CODE_PING\ncdef int OP_CODE_PONG\n\ncdef int COMPRESSED_NOT_SET\ncdef int COMPRESSED_FALSE\ncdef int COMPRESSED_TRUE\n\ncdef object UNPACK_LEN3\ncdef object UNPACK_CLOSE_CODE\ncdef object TUPLE_NEW\n\ncdef object WSMsgType\ncdef object WSMessage\n\ncdef object WS_MSG_TYPE_TEXT\ncdef object WS_MSG_TYPE_BINARY\n\ncdef set ALLOWED_CLOSE_CODES\ncdef set MESSAGE_TYPES_WITH_CONTENT\n\ncdef tuple EMPTY_FRAME\ncdef tuple EMPTY_FRAME_ERROR\n\ncdef class WebSocketDataQueue:\n\n cdef unsigned int _size\n cdef public object _protocol\n cdef unsigned int _limit\n cdef object _loop\n cdef bint _eof\n cdef object _waiter\n cdef object _exception\n cdef public object _buffer\n cdef object _get_buffer\n cdef object _put_buffer\n\n cdef void _release_waiter(self)\n\n cpdef void feed_data(self, object data, unsigned int size)\n\n @cython.locals(size="unsigned int")\n cdef _read_from_buffer(self)\n\ncdef class WebSocketReader:\n\n cdef WebSocketDataQueue queue\n cdef unsigned int _max_msg_size\n\n cdef Exception _exc\n cdef bytearray _partial\n cdef unsigned int _state\n\n cdef int _opcode\n cdef bint _frame_fin\n cdef int _frame_opcode\n cdef list _payload_fragments\n cdef Py_ssize_t _frame_payload_len\n\n cdef bytes _tail\n cdef bint _has_mask\n cdef bytes _frame_mask\n cdef Py_ssize_t _payload_bytes_to_read\n cdef unsigned int _payload_len_flag\n cdef int _compressed\n cdef object _decompressobj\n cdef bint _compress\n\n cpdef tuple feed_data(self, object data)\n\n @cython.locals(\n is_continuation=bint,\n fin=bint,\n has_partial=bint,\n payload_merged=bytes,\n )\n cpdef void _handle_frame(self, bint fin, int opcode, object payload, int compressed) except *\n\n @cython.locals(\n start_pos=Py_ssize_t,\n data_len=Py_ssize_t,\n length=Py_ssize_t,\n chunk_size=Py_ssize_t,\n chunk_len=Py_ssize_t,\n data_len=Py_ssize_t,\n data_cstr="const unsigned char *",\n first_byte="unsigned char",\n second_byte="unsigned char",\n f_start_pos=Py_ssize_t,\n f_end_pos=Py_ssize_t,\n has_mask=bint,\n fin=bint,\n had_fragments=Py_ssize_t,\n payload_bytearray=bytearray,\n )\n cpdef void _feed_data(self, bytes data) except *\n
.venv\Lib\site-packages\aiohttp\_websocket\reader_c.pxd
reader_c.pxd
Other
2,735
0.85
0.018182
0
react-lib
900
2024-12-05T01:13:54.742145
GPL-3.0
false
756e8687d17fc1438f500e8df8859698
"""Reader for WebSocket protocol versions 13 and 8."""\n\nimport asyncio\nimport builtins\nfrom collections import deque\nfrom typing import Deque, Final, Optional, Set, Tuple, Union\n\nfrom ..base_protocol import BaseProtocol\nfrom ..compression_utils import ZLibDecompressor\nfrom ..helpers import _EXC_SENTINEL, set_exception\nfrom ..streams import EofStream\nfrom .helpers import UNPACK_CLOSE_CODE, UNPACK_LEN3, websocket_mask\nfrom .models import (\n WS_DEFLATE_TRAILING,\n WebSocketError,\n WSCloseCode,\n WSMessage,\n WSMsgType,\n)\n\nALLOWED_CLOSE_CODES: Final[Set[int]] = {int(i) for i in WSCloseCode}\n\n# States for the reader, used to parse the WebSocket frame\n# integer values are used so they can be cythonized\nREAD_HEADER = 1\nREAD_PAYLOAD_LENGTH = 2\nREAD_PAYLOAD_MASK = 3\nREAD_PAYLOAD = 4\n\nWS_MSG_TYPE_BINARY = WSMsgType.BINARY\nWS_MSG_TYPE_TEXT = WSMsgType.TEXT\n\n# WSMsgType values unpacked so they can by cythonized to ints\nOP_CODE_NOT_SET = -1\nOP_CODE_CONTINUATION = WSMsgType.CONTINUATION.value\nOP_CODE_TEXT = WSMsgType.TEXT.value\nOP_CODE_BINARY = WSMsgType.BINARY.value\nOP_CODE_CLOSE = WSMsgType.CLOSE.value\nOP_CODE_PING = WSMsgType.PING.value\nOP_CODE_PONG = WSMsgType.PONG.value\n\nEMPTY_FRAME_ERROR = (True, b"")\nEMPTY_FRAME = (False, b"")\n\nCOMPRESSED_NOT_SET = -1\nCOMPRESSED_FALSE = 0\nCOMPRESSED_TRUE = 1\n\nTUPLE_NEW = tuple.__new__\n\ncython_int = int # Typed to int in Python, but cython with use a signed int in the pxd\n\n\nclass WebSocketDataQueue:\n """WebSocketDataQueue resumes and pauses an underlying stream.\n\n It is a destination for WebSocket data.\n """\n\n def __init__(\n self, protocol: BaseProtocol, limit: int, *, loop: asyncio.AbstractEventLoop\n ) -> None:\n self._size = 0\n self._protocol = protocol\n self._limit = limit * 2\n self._loop = loop\n self._eof = False\n self._waiter: Optional[asyncio.Future[None]] = None\n self._exception: Union[BaseException, None] = None\n self._buffer: Deque[Tuple[WSMessage, int]] = deque()\n self._get_buffer = self._buffer.popleft\n self._put_buffer = self._buffer.append\n\n def is_eof(self) -> bool:\n return self._eof\n\n def exception(self) -> Optional[BaseException]:\n return self._exception\n\n def set_exception(\n self,\n exc: BaseException,\n exc_cause: builtins.BaseException = _EXC_SENTINEL,\n ) -> None:\n self._eof = True\n self._exception = exc\n if (waiter := self._waiter) is not None:\n self._waiter = None\n set_exception(waiter, exc, exc_cause)\n\n def _release_waiter(self) -> None:\n if (waiter := self._waiter) is None:\n return\n self._waiter = None\n if not waiter.done():\n waiter.set_result(None)\n\n def feed_eof(self) -> None:\n self._eof = True\n self._release_waiter()\n self._exception = None # Break cyclic references\n\n def feed_data(self, data: "WSMessage", size: "cython_int") -> None:\n self._size += size\n self._put_buffer((data, size))\n self._release_waiter()\n if self._size > self._limit and not self._protocol._reading_paused:\n self._protocol.pause_reading()\n\n async def read(self) -> WSMessage:\n if not self._buffer and not self._eof:\n assert not self._waiter\n self._waiter = self._loop.create_future()\n try:\n await self._waiter\n except (asyncio.CancelledError, asyncio.TimeoutError):\n self._waiter = None\n raise\n return self._read_from_buffer()\n\n def _read_from_buffer(self) -> WSMessage:\n if self._buffer:\n data, size = self._get_buffer()\n self._size -= size\n if self._size < self._limit and self._protocol._reading_paused:\n self._protocol.resume_reading()\n return data\n if self._exception is not None:\n raise self._exception\n raise EofStream\n\n\nclass WebSocketReader:\n def __init__(\n self, queue: WebSocketDataQueue, max_msg_size: int, compress: bool = True\n ) -> None:\n self.queue = queue\n self._max_msg_size = max_msg_size\n\n self._exc: Optional[Exception] = None\n self._partial = bytearray()\n self._state = READ_HEADER\n\n self._opcode: int = OP_CODE_NOT_SET\n self._frame_fin = False\n self._frame_opcode: int = OP_CODE_NOT_SET\n self._payload_fragments: list[bytes] = []\n self._frame_payload_len = 0\n\n self._tail: bytes = b""\n self._has_mask = False\n self._frame_mask: Optional[bytes] = None\n self._payload_bytes_to_read = 0\n self._payload_len_flag = 0\n self._compressed: int = COMPRESSED_NOT_SET\n self._decompressobj: Optional[ZLibDecompressor] = None\n self._compress = compress\n\n def feed_eof(self) -> None:\n self.queue.feed_eof()\n\n # data can be bytearray on Windows because proactor event loop uses bytearray\n # and asyncio types this to Union[bytes, bytearray, memoryview] so we need\n # coerce data to bytes if it is not\n def feed_data(\n self, data: Union[bytes, bytearray, memoryview]\n ) -> Tuple[bool, bytes]:\n if type(data) is not bytes:\n data = bytes(data)\n\n if self._exc is not None:\n return True, data\n\n try:\n self._feed_data(data)\n except Exception as exc:\n self._exc = exc\n set_exception(self.queue, exc)\n return EMPTY_FRAME_ERROR\n\n return EMPTY_FRAME\n\n def _handle_frame(\n self,\n fin: bool,\n opcode: Union[int, cython_int], # Union intended: Cython pxd uses C int\n payload: Union[bytes, bytearray],\n compressed: Union[int, cython_int], # Union intended: Cython pxd uses C int\n ) -> None:\n msg: WSMessage\n if opcode in {OP_CODE_TEXT, OP_CODE_BINARY, OP_CODE_CONTINUATION}:\n # load text/binary\n if not fin:\n # got partial frame payload\n if opcode != OP_CODE_CONTINUATION:\n self._opcode = opcode\n self._partial += payload\n if self._max_msg_size and len(self._partial) >= self._max_msg_size:\n raise WebSocketError(\n WSCloseCode.MESSAGE_TOO_BIG,\n f"Message size {len(self._partial)} "\n f"exceeds limit {self._max_msg_size}",\n )\n return\n\n has_partial = bool(self._partial)\n if opcode == OP_CODE_CONTINUATION:\n if self._opcode == OP_CODE_NOT_SET:\n raise WebSocketError(\n WSCloseCode.PROTOCOL_ERROR,\n "Continuation frame for non started message",\n )\n opcode = self._opcode\n self._opcode = OP_CODE_NOT_SET\n # previous frame was non finished\n # we should get continuation opcode\n elif has_partial:\n raise WebSocketError(\n WSCloseCode.PROTOCOL_ERROR,\n "The opcode in non-fin frame is expected "\n f"to be zero, got {opcode!r}",\n )\n\n assembled_payload: Union[bytes, bytearray]\n if has_partial:\n assembled_payload = self._partial + payload\n self._partial.clear()\n else:\n assembled_payload = payload\n\n if self._max_msg_size and len(assembled_payload) >= self._max_msg_size:\n raise WebSocketError(\n WSCloseCode.MESSAGE_TOO_BIG,\n f"Message size {len(assembled_payload)} "\n f"exceeds limit {self._max_msg_size}",\n )\n\n # Decompress process must to be done after all packets\n # received.\n if compressed:\n if not self._decompressobj:\n self._decompressobj = ZLibDecompressor(suppress_deflate_header=True)\n # XXX: It's possible that the zlib backend (isal is known to\n # do this, maybe others too?) will return max_length bytes,\n # but internally buffer more data such that the payload is\n # >max_length, so we return one extra byte and if we're able\n # to do that, then the message is too big.\n payload_merged = self._decompressobj.decompress_sync(\n assembled_payload + WS_DEFLATE_TRAILING,\n (\n self._max_msg_size + 1\n if self._max_msg_size\n else self._max_msg_size\n ),\n )\n if self._max_msg_size and len(payload_merged) > self._max_msg_size:\n raise WebSocketError(\n WSCloseCode.MESSAGE_TOO_BIG,\n f"Decompressed message exceeds size limit {self._max_msg_size}",\n )\n elif type(assembled_payload) is bytes:\n payload_merged = assembled_payload\n else:\n payload_merged = bytes(assembled_payload)\n\n if opcode == OP_CODE_TEXT:\n try:\n text = payload_merged.decode("utf-8")\n except UnicodeDecodeError as exc:\n raise WebSocketError(\n WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message"\n ) from exc\n\n # XXX: The Text and Binary messages here can be a performance\n # bottleneck, so we use tuple.__new__ to improve performance.\n # This is not type safe, but many tests should fail in\n # test_client_ws_functional.py if this is wrong.\n self.queue.feed_data(\n TUPLE_NEW(WSMessage, (WS_MSG_TYPE_TEXT, text, "")),\n len(payload_merged),\n )\n else:\n self.queue.feed_data(\n TUPLE_NEW(WSMessage, (WS_MSG_TYPE_BINARY, payload_merged, "")),\n len(payload_merged),\n )\n elif opcode == OP_CODE_CLOSE:\n if len(payload) >= 2:\n close_code = UNPACK_CLOSE_CODE(payload[:2])[0]\n if close_code < 3000 and close_code not in ALLOWED_CLOSE_CODES:\n raise WebSocketError(\n WSCloseCode.PROTOCOL_ERROR,\n f"Invalid close code: {close_code}",\n )\n try:\n close_message = payload[2:].decode("utf-8")\n except UnicodeDecodeError as exc:\n raise WebSocketError(\n WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message"\n ) from exc\n msg = TUPLE_NEW(WSMessage, (WSMsgType.CLOSE, close_code, close_message))\n elif payload:\n raise WebSocketError(\n WSCloseCode.PROTOCOL_ERROR,\n f"Invalid close frame: {fin} {opcode} {payload!r}",\n )\n else:\n msg = TUPLE_NEW(WSMessage, (WSMsgType.CLOSE, 0, ""))\n\n self.queue.feed_data(msg, 0)\n elif opcode == OP_CODE_PING:\n msg = TUPLE_NEW(WSMessage, (WSMsgType.PING, payload, ""))\n self.queue.feed_data(msg, len(payload))\n elif opcode == OP_CODE_PONG:\n msg = TUPLE_NEW(WSMessage, (WSMsgType.PONG, payload, ""))\n self.queue.feed_data(msg, len(payload))\n else:\n raise WebSocketError(\n WSCloseCode.PROTOCOL_ERROR, f"Unexpected opcode={opcode!r}"\n )\n\n def _feed_data(self, data: bytes) -> None:\n """Return the next frame from the socket."""\n if self._tail:\n data, self._tail = self._tail + data, b""\n\n start_pos: int = 0\n data_len = len(data)\n data_cstr = data\n\n while True:\n # read header\n if self._state == READ_HEADER:\n if data_len - start_pos < 2:\n break\n first_byte = data_cstr[start_pos]\n second_byte = data_cstr[start_pos + 1]\n start_pos += 2\n\n fin = (first_byte >> 7) & 1\n rsv1 = (first_byte >> 6) & 1\n rsv2 = (first_byte >> 5) & 1\n rsv3 = (first_byte >> 4) & 1\n opcode = first_byte & 0xF\n\n # frame-fin = %x0 ; more frames of this message follow\n # / %x1 ; final frame of this message\n # frame-rsv1 = %x0 ;\n # 1 bit, MUST be 0 unless negotiated otherwise\n # frame-rsv2 = %x0 ;\n # 1 bit, MUST be 0 unless negotiated otherwise\n # frame-rsv3 = %x0 ;\n # 1 bit, MUST be 0 unless negotiated otherwise\n #\n # Remove rsv1 from this test for deflate development\n if rsv2 or rsv3 or (rsv1 and not self._compress):\n raise WebSocketError(\n WSCloseCode.PROTOCOL_ERROR,\n "Received frame with non-zero reserved bits",\n )\n\n if opcode > 0x7 and fin == 0:\n raise WebSocketError(\n WSCloseCode.PROTOCOL_ERROR,\n "Received fragmented control frame",\n )\n\n has_mask = (second_byte >> 7) & 1\n length = second_byte & 0x7F\n\n # Control frames MUST have a payload\n # length of 125 bytes or less\n if opcode > 0x7 and length > 125:\n raise WebSocketError(\n WSCloseCode.PROTOCOL_ERROR,\n "Control frame payload cannot be larger than 125 bytes",\n )\n\n # Set compress status if last package is FIN\n # OR set compress status if this is first fragment\n # Raise error if not first fragment with rsv1 = 0x1\n if self._frame_fin or self._compressed == COMPRESSED_NOT_SET:\n self._compressed = COMPRESSED_TRUE if rsv1 else COMPRESSED_FALSE\n elif rsv1:\n raise WebSocketError(\n WSCloseCode.PROTOCOL_ERROR,\n "Received frame with non-zero reserved bits",\n )\n\n self._frame_fin = bool(fin)\n self._frame_opcode = opcode\n self._has_mask = bool(has_mask)\n self._payload_len_flag = length\n self._state = READ_PAYLOAD_LENGTH\n\n # read payload length\n if self._state == READ_PAYLOAD_LENGTH:\n len_flag = self._payload_len_flag\n if len_flag == 126:\n if data_len - start_pos < 2:\n break\n first_byte = data_cstr[start_pos]\n second_byte = data_cstr[start_pos + 1]\n start_pos += 2\n self._payload_bytes_to_read = first_byte << 8 | second_byte\n elif len_flag > 126:\n if data_len - start_pos < 8:\n break\n self._payload_bytes_to_read = UNPACK_LEN3(data, start_pos)[0]\n start_pos += 8\n else:\n self._payload_bytes_to_read = len_flag\n\n self._state = READ_PAYLOAD_MASK if self._has_mask else READ_PAYLOAD\n\n # read payload mask\n if self._state == READ_PAYLOAD_MASK:\n if data_len - start_pos < 4:\n break\n self._frame_mask = data_cstr[start_pos : start_pos + 4]\n start_pos += 4\n self._state = READ_PAYLOAD\n\n if self._state == READ_PAYLOAD:\n chunk_len = data_len - start_pos\n if self._payload_bytes_to_read >= chunk_len:\n f_end_pos = data_len\n self._payload_bytes_to_read -= chunk_len\n else:\n f_end_pos = start_pos + self._payload_bytes_to_read\n self._payload_bytes_to_read = 0\n\n had_fragments = self._frame_payload_len\n self._frame_payload_len += f_end_pos - start_pos\n f_start_pos = start_pos\n start_pos = f_end_pos\n\n if self._payload_bytes_to_read != 0:\n # If we don't have a complete frame, we need to save the\n # data for the next call to feed_data.\n self._payload_fragments.append(data_cstr[f_start_pos:f_end_pos])\n break\n\n payload: Union[bytes, bytearray]\n if had_fragments:\n # We have to join the payload fragments get the payload\n self._payload_fragments.append(data_cstr[f_start_pos:f_end_pos])\n if self._has_mask:\n assert self._frame_mask is not None\n payload_bytearray = bytearray(b"".join(self._payload_fragments))\n websocket_mask(self._frame_mask, payload_bytearray)\n payload = payload_bytearray\n else:\n payload = b"".join(self._payload_fragments)\n self._payload_fragments.clear()\n elif self._has_mask:\n assert self._frame_mask is not None\n payload_bytearray = data_cstr[f_start_pos:f_end_pos] # type: ignore[assignment]\n if type(payload_bytearray) is not bytearray: # pragma: no branch\n # Cython will do the conversion for us\n # but we need to do it for Python and we\n # will always get here in Python\n payload_bytearray = bytearray(payload_bytearray)\n websocket_mask(self._frame_mask, payload_bytearray)\n payload = payload_bytearray\n else:\n payload = data_cstr[f_start_pos:f_end_pos]\n\n self._handle_frame(\n self._frame_fin, self._frame_opcode, payload, self._compressed\n )\n self._frame_payload_len = 0\n self._state = READ_HEADER\n\n # XXX: Cython needs slices to be bounded, so we can't omit the slice end here.\n self._tail = data_cstr[start_pos:data_len] if start_pos < data_len else b""\n
.venv\Lib\site-packages\aiohttp\_websocket\reader_c.py
reader_c.py
Python
19,267
0.95
0.17437
0.110312
vue-tools
776
2024-07-15T00:05:34.299601
Apache-2.0
false
50819c8ed76980cdd80337abdf961858
"""Reader for WebSocket protocol versions 13 and 8."""\n\nimport asyncio\nimport builtins\nfrom collections import deque\nfrom typing import Deque, Final, Optional, Set, Tuple, Union\n\nfrom ..base_protocol import BaseProtocol\nfrom ..compression_utils import ZLibDecompressor\nfrom ..helpers import _EXC_SENTINEL, set_exception\nfrom ..streams import EofStream\nfrom .helpers import UNPACK_CLOSE_CODE, UNPACK_LEN3, websocket_mask\nfrom .models import (\n WS_DEFLATE_TRAILING,\n WebSocketError,\n WSCloseCode,\n WSMessage,\n WSMsgType,\n)\n\nALLOWED_CLOSE_CODES: Final[Set[int]] = {int(i) for i in WSCloseCode}\n\n# States for the reader, used to parse the WebSocket frame\n# integer values are used so they can be cythonized\nREAD_HEADER = 1\nREAD_PAYLOAD_LENGTH = 2\nREAD_PAYLOAD_MASK = 3\nREAD_PAYLOAD = 4\n\nWS_MSG_TYPE_BINARY = WSMsgType.BINARY\nWS_MSG_TYPE_TEXT = WSMsgType.TEXT\n\n# WSMsgType values unpacked so they can by cythonized to ints\nOP_CODE_NOT_SET = -1\nOP_CODE_CONTINUATION = WSMsgType.CONTINUATION.value\nOP_CODE_TEXT = WSMsgType.TEXT.value\nOP_CODE_BINARY = WSMsgType.BINARY.value\nOP_CODE_CLOSE = WSMsgType.CLOSE.value\nOP_CODE_PING = WSMsgType.PING.value\nOP_CODE_PONG = WSMsgType.PONG.value\n\nEMPTY_FRAME_ERROR = (True, b"")\nEMPTY_FRAME = (False, b"")\n\nCOMPRESSED_NOT_SET = -1\nCOMPRESSED_FALSE = 0\nCOMPRESSED_TRUE = 1\n\nTUPLE_NEW = tuple.__new__\n\ncython_int = int # Typed to int in Python, but cython with use a signed int in the pxd\n\n\nclass WebSocketDataQueue:\n """WebSocketDataQueue resumes and pauses an underlying stream.\n\n It is a destination for WebSocket data.\n """\n\n def __init__(\n self, protocol: BaseProtocol, limit: int, *, loop: asyncio.AbstractEventLoop\n ) -> None:\n self._size = 0\n self._protocol = protocol\n self._limit = limit * 2\n self._loop = loop\n self._eof = False\n self._waiter: Optional[asyncio.Future[None]] = None\n self._exception: Union[BaseException, None] = None\n self._buffer: Deque[Tuple[WSMessage, int]] = deque()\n self._get_buffer = self._buffer.popleft\n self._put_buffer = self._buffer.append\n\n def is_eof(self) -> bool:\n return self._eof\n\n def exception(self) -> Optional[BaseException]:\n return self._exception\n\n def set_exception(\n self,\n exc: BaseException,\n exc_cause: builtins.BaseException = _EXC_SENTINEL,\n ) -> None:\n self._eof = True\n self._exception = exc\n if (waiter := self._waiter) is not None:\n self._waiter = None\n set_exception(waiter, exc, exc_cause)\n\n def _release_waiter(self) -> None:\n if (waiter := self._waiter) is None:\n return\n self._waiter = None\n if not waiter.done():\n waiter.set_result(None)\n\n def feed_eof(self) -> None:\n self._eof = True\n self._release_waiter()\n self._exception = None # Break cyclic references\n\n def feed_data(self, data: "WSMessage", size: "cython_int") -> None:\n self._size += size\n self._put_buffer((data, size))\n self._release_waiter()\n if self._size > self._limit and not self._protocol._reading_paused:\n self._protocol.pause_reading()\n\n async def read(self) -> WSMessage:\n if not self._buffer and not self._eof:\n assert not self._waiter\n self._waiter = self._loop.create_future()\n try:\n await self._waiter\n except (asyncio.CancelledError, asyncio.TimeoutError):\n self._waiter = None\n raise\n return self._read_from_buffer()\n\n def _read_from_buffer(self) -> WSMessage:\n if self._buffer:\n data, size = self._get_buffer()\n self._size -= size\n if self._size < self._limit and self._protocol._reading_paused:\n self._protocol.resume_reading()\n return data\n if self._exception is not None:\n raise self._exception\n raise EofStream\n\n\nclass WebSocketReader:\n def __init__(\n self, queue: WebSocketDataQueue, max_msg_size: int, compress: bool = True\n ) -> None:\n self.queue = queue\n self._max_msg_size = max_msg_size\n\n self._exc: Optional[Exception] = None\n self._partial = bytearray()\n self._state = READ_HEADER\n\n self._opcode: int = OP_CODE_NOT_SET\n self._frame_fin = False\n self._frame_opcode: int = OP_CODE_NOT_SET\n self._payload_fragments: list[bytes] = []\n self._frame_payload_len = 0\n\n self._tail: bytes = b""\n self._has_mask = False\n self._frame_mask: Optional[bytes] = None\n self._payload_bytes_to_read = 0\n self._payload_len_flag = 0\n self._compressed: int = COMPRESSED_NOT_SET\n self._decompressobj: Optional[ZLibDecompressor] = None\n self._compress = compress\n\n def feed_eof(self) -> None:\n self.queue.feed_eof()\n\n # data can be bytearray on Windows because proactor event loop uses bytearray\n # and asyncio types this to Union[bytes, bytearray, memoryview] so we need\n # coerce data to bytes if it is not\n def feed_data(\n self, data: Union[bytes, bytearray, memoryview]\n ) -> Tuple[bool, bytes]:\n if type(data) is not bytes:\n data = bytes(data)\n\n if self._exc is not None:\n return True, data\n\n try:\n self._feed_data(data)\n except Exception as exc:\n self._exc = exc\n set_exception(self.queue, exc)\n return EMPTY_FRAME_ERROR\n\n return EMPTY_FRAME\n\n def _handle_frame(\n self,\n fin: bool,\n opcode: Union[int, cython_int], # Union intended: Cython pxd uses C int\n payload: Union[bytes, bytearray],\n compressed: Union[int, cython_int], # Union intended: Cython pxd uses C int\n ) -> None:\n msg: WSMessage\n if opcode in {OP_CODE_TEXT, OP_CODE_BINARY, OP_CODE_CONTINUATION}:\n # load text/binary\n if not fin:\n # got partial frame payload\n if opcode != OP_CODE_CONTINUATION:\n self._opcode = opcode\n self._partial += payload\n if self._max_msg_size and len(self._partial) >= self._max_msg_size:\n raise WebSocketError(\n WSCloseCode.MESSAGE_TOO_BIG,\n f"Message size {len(self._partial)} "\n f"exceeds limit {self._max_msg_size}",\n )\n return\n\n has_partial = bool(self._partial)\n if opcode == OP_CODE_CONTINUATION:\n if self._opcode == OP_CODE_NOT_SET:\n raise WebSocketError(\n WSCloseCode.PROTOCOL_ERROR,\n "Continuation frame for non started message",\n )\n opcode = self._opcode\n self._opcode = OP_CODE_NOT_SET\n # previous frame was non finished\n # we should get continuation opcode\n elif has_partial:\n raise WebSocketError(\n WSCloseCode.PROTOCOL_ERROR,\n "The opcode in non-fin frame is expected "\n f"to be zero, got {opcode!r}",\n )\n\n assembled_payload: Union[bytes, bytearray]\n if has_partial:\n assembled_payload = self._partial + payload\n self._partial.clear()\n else:\n assembled_payload = payload\n\n if self._max_msg_size and len(assembled_payload) >= self._max_msg_size:\n raise WebSocketError(\n WSCloseCode.MESSAGE_TOO_BIG,\n f"Message size {len(assembled_payload)} "\n f"exceeds limit {self._max_msg_size}",\n )\n\n # Decompress process must to be done after all packets\n # received.\n if compressed:\n if not self._decompressobj:\n self._decompressobj = ZLibDecompressor(suppress_deflate_header=True)\n # XXX: It's possible that the zlib backend (isal is known to\n # do this, maybe others too?) will return max_length bytes,\n # but internally buffer more data such that the payload is\n # >max_length, so we return one extra byte and if we're able\n # to do that, then the message is too big.\n payload_merged = self._decompressobj.decompress_sync(\n assembled_payload + WS_DEFLATE_TRAILING,\n (\n self._max_msg_size + 1\n if self._max_msg_size\n else self._max_msg_size\n ),\n )\n if self._max_msg_size and len(payload_merged) > self._max_msg_size:\n raise WebSocketError(\n WSCloseCode.MESSAGE_TOO_BIG,\n f"Decompressed message exceeds size limit {self._max_msg_size}",\n )\n elif type(assembled_payload) is bytes:\n payload_merged = assembled_payload\n else:\n payload_merged = bytes(assembled_payload)\n\n if opcode == OP_CODE_TEXT:\n try:\n text = payload_merged.decode("utf-8")\n except UnicodeDecodeError as exc:\n raise WebSocketError(\n WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message"\n ) from exc\n\n # XXX: The Text and Binary messages here can be a performance\n # bottleneck, so we use tuple.__new__ to improve performance.\n # This is not type safe, but many tests should fail in\n # test_client_ws_functional.py if this is wrong.\n self.queue.feed_data(\n TUPLE_NEW(WSMessage, (WS_MSG_TYPE_TEXT, text, "")),\n len(payload_merged),\n )\n else:\n self.queue.feed_data(\n TUPLE_NEW(WSMessage, (WS_MSG_TYPE_BINARY, payload_merged, "")),\n len(payload_merged),\n )\n elif opcode == OP_CODE_CLOSE:\n if len(payload) >= 2:\n close_code = UNPACK_CLOSE_CODE(payload[:2])[0]\n if close_code < 3000 and close_code not in ALLOWED_CLOSE_CODES:\n raise WebSocketError(\n WSCloseCode.PROTOCOL_ERROR,\n f"Invalid close code: {close_code}",\n )\n try:\n close_message = payload[2:].decode("utf-8")\n except UnicodeDecodeError as exc:\n raise WebSocketError(\n WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message"\n ) from exc\n msg = TUPLE_NEW(WSMessage, (WSMsgType.CLOSE, close_code, close_message))\n elif payload:\n raise WebSocketError(\n WSCloseCode.PROTOCOL_ERROR,\n f"Invalid close frame: {fin} {opcode} {payload!r}",\n )\n else:\n msg = TUPLE_NEW(WSMessage, (WSMsgType.CLOSE, 0, ""))\n\n self.queue.feed_data(msg, 0)\n elif opcode == OP_CODE_PING:\n msg = TUPLE_NEW(WSMessage, (WSMsgType.PING, payload, ""))\n self.queue.feed_data(msg, len(payload))\n elif opcode == OP_CODE_PONG:\n msg = TUPLE_NEW(WSMessage, (WSMsgType.PONG, payload, ""))\n self.queue.feed_data(msg, len(payload))\n else:\n raise WebSocketError(\n WSCloseCode.PROTOCOL_ERROR, f"Unexpected opcode={opcode!r}"\n )\n\n def _feed_data(self, data: bytes) -> None:\n """Return the next frame from the socket."""\n if self._tail:\n data, self._tail = self._tail + data, b""\n\n start_pos: int = 0\n data_len = len(data)\n data_cstr = data\n\n while True:\n # read header\n if self._state == READ_HEADER:\n if data_len - start_pos < 2:\n break\n first_byte = data_cstr[start_pos]\n second_byte = data_cstr[start_pos + 1]\n start_pos += 2\n\n fin = (first_byte >> 7) & 1\n rsv1 = (first_byte >> 6) & 1\n rsv2 = (first_byte >> 5) & 1\n rsv3 = (first_byte >> 4) & 1\n opcode = first_byte & 0xF\n\n # frame-fin = %x0 ; more frames of this message follow\n # / %x1 ; final frame of this message\n # frame-rsv1 = %x0 ;\n # 1 bit, MUST be 0 unless negotiated otherwise\n # frame-rsv2 = %x0 ;\n # 1 bit, MUST be 0 unless negotiated otherwise\n # frame-rsv3 = %x0 ;\n # 1 bit, MUST be 0 unless negotiated otherwise\n #\n # Remove rsv1 from this test for deflate development\n if rsv2 or rsv3 or (rsv1 and not self._compress):\n raise WebSocketError(\n WSCloseCode.PROTOCOL_ERROR,\n "Received frame with non-zero reserved bits",\n )\n\n if opcode > 0x7 and fin == 0:\n raise WebSocketError(\n WSCloseCode.PROTOCOL_ERROR,\n "Received fragmented control frame",\n )\n\n has_mask = (second_byte >> 7) & 1\n length = second_byte & 0x7F\n\n # Control frames MUST have a payload\n # length of 125 bytes or less\n if opcode > 0x7 and length > 125:\n raise WebSocketError(\n WSCloseCode.PROTOCOL_ERROR,\n "Control frame payload cannot be larger than 125 bytes",\n )\n\n # Set compress status if last package is FIN\n # OR set compress status if this is first fragment\n # Raise error if not first fragment with rsv1 = 0x1\n if self._frame_fin or self._compressed == COMPRESSED_NOT_SET:\n self._compressed = COMPRESSED_TRUE if rsv1 else COMPRESSED_FALSE\n elif rsv1:\n raise WebSocketError(\n WSCloseCode.PROTOCOL_ERROR,\n "Received frame with non-zero reserved bits",\n )\n\n self._frame_fin = bool(fin)\n self._frame_opcode = opcode\n self._has_mask = bool(has_mask)\n self._payload_len_flag = length\n self._state = READ_PAYLOAD_LENGTH\n\n # read payload length\n if self._state == READ_PAYLOAD_LENGTH:\n len_flag = self._payload_len_flag\n if len_flag == 126:\n if data_len - start_pos < 2:\n break\n first_byte = data_cstr[start_pos]\n second_byte = data_cstr[start_pos + 1]\n start_pos += 2\n self._payload_bytes_to_read = first_byte << 8 | second_byte\n elif len_flag > 126:\n if data_len - start_pos < 8:\n break\n self._payload_bytes_to_read = UNPACK_LEN3(data, start_pos)[0]\n start_pos += 8\n else:\n self._payload_bytes_to_read = len_flag\n\n self._state = READ_PAYLOAD_MASK if self._has_mask else READ_PAYLOAD\n\n # read payload mask\n if self._state == READ_PAYLOAD_MASK:\n if data_len - start_pos < 4:\n break\n self._frame_mask = data_cstr[start_pos : start_pos + 4]\n start_pos += 4\n self._state = READ_PAYLOAD\n\n if self._state == READ_PAYLOAD:\n chunk_len = data_len - start_pos\n if self._payload_bytes_to_read >= chunk_len:\n f_end_pos = data_len\n self._payload_bytes_to_read -= chunk_len\n else:\n f_end_pos = start_pos + self._payload_bytes_to_read\n self._payload_bytes_to_read = 0\n\n had_fragments = self._frame_payload_len\n self._frame_payload_len += f_end_pos - start_pos\n f_start_pos = start_pos\n start_pos = f_end_pos\n\n if self._payload_bytes_to_read != 0:\n # If we don't have a complete frame, we need to save the\n # data for the next call to feed_data.\n self._payload_fragments.append(data_cstr[f_start_pos:f_end_pos])\n break\n\n payload: Union[bytes, bytearray]\n if had_fragments:\n # We have to join the payload fragments get the payload\n self._payload_fragments.append(data_cstr[f_start_pos:f_end_pos])\n if self._has_mask:\n assert self._frame_mask is not None\n payload_bytearray = bytearray(b"".join(self._payload_fragments))\n websocket_mask(self._frame_mask, payload_bytearray)\n payload = payload_bytearray\n else:\n payload = b"".join(self._payload_fragments)\n self._payload_fragments.clear()\n elif self._has_mask:\n assert self._frame_mask is not None\n payload_bytearray = data_cstr[f_start_pos:f_end_pos] # type: ignore[assignment]\n if type(payload_bytearray) is not bytearray: # pragma: no branch\n # Cython will do the conversion for us\n # but we need to do it for Python and we\n # will always get here in Python\n payload_bytearray = bytearray(payload_bytearray)\n websocket_mask(self._frame_mask, payload_bytearray)\n payload = payload_bytearray\n else:\n payload = data_cstr[f_start_pos:f_end_pos]\n\n self._handle_frame(\n self._frame_fin, self._frame_opcode, payload, self._compressed\n )\n self._frame_payload_len = 0\n self._state = READ_HEADER\n\n # XXX: Cython needs slices to be bounded, so we can't omit the slice end here.\n self._tail = data_cstr[start_pos:data_len] if start_pos < data_len else b""\n
.venv\Lib\site-packages\aiohttp\_websocket\reader_py.py
reader_py.py
Python
19,267
0.95
0.17437
0.110312
node-utils
452
2024-10-11T13:19:43.160841
BSD-3-Clause
false
50819c8ed76980cdd80337abdf961858
"""WebSocket protocol versions 13 and 8."""\n\nimport asyncio\nimport random\nfrom functools import partial\nfrom typing import Any, Final, Optional, Union\n\nfrom ..base_protocol import BaseProtocol\nfrom ..client_exceptions import ClientConnectionResetError\nfrom ..compression_utils import ZLibBackend, ZLibCompressor\nfrom .helpers import (\n MASK_LEN,\n MSG_SIZE,\n PACK_CLOSE_CODE,\n PACK_LEN1,\n PACK_LEN2,\n PACK_LEN3,\n PACK_RANDBITS,\n websocket_mask,\n)\nfrom .models import WS_DEFLATE_TRAILING, WSMsgType\n\nDEFAULT_LIMIT: Final[int] = 2**16\n\n# For websockets, keeping latency low is extremely important as implementations\n# generally expect to be able to send and receive messages quickly. We use a\n# larger chunk size than the default to reduce the number of executor calls\n# since the executor is a significant source of latency and overhead when\n# the chunks are small. A size of 5KiB was chosen because it is also the\n# same value python-zlib-ng choose to use as the threshold to release the GIL.\n\nWEBSOCKET_MAX_SYNC_CHUNK_SIZE = 5 * 1024\n\n\nclass WebSocketWriter:\n """WebSocket writer.\n\n The writer is responsible for sending messages to the client. It is\n created by the protocol when a connection is established. The writer\n should avoid implementing any application logic and should only be\n concerned with the low-level details of the WebSocket protocol.\n """\n\n def __init__(\n self,\n protocol: BaseProtocol,\n transport: asyncio.Transport,\n *,\n use_mask: bool = False,\n limit: int = DEFAULT_LIMIT,\n random: random.Random = random.Random(),\n compress: int = 0,\n notakeover: bool = False,\n ) -> None:\n """Initialize a WebSocket writer."""\n self.protocol = protocol\n self.transport = transport\n self.use_mask = use_mask\n self.get_random_bits = partial(random.getrandbits, 32)\n self.compress = compress\n self.notakeover = notakeover\n self._closing = False\n self._limit = limit\n self._output_size = 0\n self._compressobj: Any = None # actually compressobj\n\n async def send_frame(\n self, message: bytes, opcode: int, compress: Optional[int] = None\n ) -> None:\n """Send a frame over the websocket with message as its payload."""\n if self._closing and not (opcode & WSMsgType.CLOSE):\n raise ClientConnectionResetError("Cannot write to closing transport")\n\n # RSV are the reserved bits in the frame header. They are used to\n # indicate that the frame is using an extension.\n # https://datatracker.ietf.org/doc/html/rfc6455#section-5.2\n rsv = 0\n # Only compress larger packets (disabled)\n # Does small packet needs to be compressed?\n # if self.compress and opcode < 8 and len(message) > 124:\n if (compress or self.compress) and opcode < 8:\n # RSV1 (rsv = 0x40) is set for compressed frames\n # https://datatracker.ietf.org/doc/html/rfc7692#section-7.2.3.1\n rsv = 0x40\n\n if compress:\n # Do not set self._compress if compressing is for this frame\n compressobj = self._make_compress_obj(compress)\n else: # self.compress\n if not self._compressobj:\n self._compressobj = self._make_compress_obj(self.compress)\n compressobj = self._compressobj\n\n message = (\n await compressobj.compress(message)\n + compressobj.flush(\n ZLibBackend.Z_FULL_FLUSH\n if self.notakeover\n else ZLibBackend.Z_SYNC_FLUSH\n )\n ).removesuffix(WS_DEFLATE_TRAILING)\n # Its critical that we do not return control to the event\n # loop until we have finished sending all the compressed\n # data. Otherwise we could end up mixing compressed frames\n # if there are multiple coroutines compressing data.\n\n msg_length = len(message)\n\n use_mask = self.use_mask\n mask_bit = 0x80 if use_mask else 0\n\n # Depending on the message length, the header is assembled differently.\n # The first byte is reserved for the opcode and the RSV bits.\n first_byte = 0x80 | rsv | opcode\n if msg_length < 126:\n header = PACK_LEN1(first_byte, msg_length | mask_bit)\n header_len = 2\n elif msg_length < 65536:\n header = PACK_LEN2(first_byte, 126 | mask_bit, msg_length)\n header_len = 4\n else:\n header = PACK_LEN3(first_byte, 127 | mask_bit, msg_length)\n header_len = 10\n\n if self.transport.is_closing():\n raise ClientConnectionResetError("Cannot write to closing transport")\n\n # https://datatracker.ietf.org/doc/html/rfc6455#section-5.3\n # If we are using a mask, we need to generate it randomly\n # and apply it to the message before sending it. A mask is\n # a 32-bit value that is applied to the message using a\n # bitwise XOR operation. It is used to prevent certain types\n # of attacks on the websocket protocol. The mask is only used\n # when aiohttp is acting as a client. Servers do not use a mask.\n if use_mask:\n mask = PACK_RANDBITS(self.get_random_bits())\n message = bytearray(message)\n websocket_mask(mask, message)\n self.transport.write(header + mask + message)\n self._output_size += MASK_LEN\n elif msg_length > MSG_SIZE:\n self.transport.write(header)\n self.transport.write(message)\n else:\n self.transport.write(header + message)\n\n self._output_size += header_len + msg_length\n\n # It is safe to return control to the event loop when using compression\n # after this point as we have already sent or buffered all the data.\n\n # Once we have written output_size up to the limit, we call the\n # drain helper which waits for the transport to be ready to accept\n # more data. This is a flow control mechanism to prevent the buffer\n # from growing too large. The drain helper will return right away\n # if the writer is not paused.\n if self._output_size > self._limit:\n self._output_size = 0\n if self.protocol._paused:\n await self.protocol._drain_helper()\n\n def _make_compress_obj(self, compress: int) -> ZLibCompressor:\n return ZLibCompressor(\n level=ZLibBackend.Z_BEST_SPEED,\n wbits=-compress,\n max_sync_chunk_size=WEBSOCKET_MAX_SYNC_CHUNK_SIZE,\n )\n\n async def close(self, code: int = 1000, message: Union[bytes, str] = b"") -> None:\n """Close the websocket, sending the specified code and message."""\n if isinstance(message, str):\n message = message.encode("utf-8")\n try:\n await self.send_frame(\n PACK_CLOSE_CODE(code) + message, opcode=WSMsgType.CLOSE\n )\n finally:\n self._closing = True\n
.venv\Lib\site-packages\aiohttp\_websocket\writer.py
writer.py
Python
7,317
0.95
0.151685
0.232258
python-kit
614
2024-10-24T11:23:34.339560
Apache-2.0
false
f0b1a3d25d654b888e78f778417bdabc
"""WebSocket protocol versions 13 and 8."""\n
.venv\Lib\site-packages\aiohttp\_websocket\__init__.py
__init__.py
Python
45
0.5
0
0
awesome-app
746
2025-06-13T02:45:14.047298
GPL-3.0
false
f64890f7e1b7aa51cb1cca43855dd39d
e354dd499be171b6125bf56bc3b6c5e2bff2a28af69e3b5d699ddb9af2bafa3c
.venv\Lib\site-packages\aiohttp\_websocket\.hash\mask.pxd.hash
mask.pxd.hash
Other
64
0.5
0.1
0
node-utils
78
2023-08-04T07:57:13.959530
Apache-2.0
false
9bc4f45586a5def63d78f4e58ab17397
468edd38ebf8dc7000a8d333df1c82035d69a5c9febc0448be3c9c4ad4c4630c
.venv\Lib\site-packages\aiohttp\_websocket\.hash\mask.pyx.hash
mask.pyx.hash
Other
64
0.5
0.1
0
awesome-app
731
2025-06-06T14:36:00.703337
GPL-3.0
false
cfbebf42ce854abe48209334f207f7f6
1cd3a5e20456b4d04d11835b2bd3c639f14443052a2467b105b0ca07fdb4b25d
.venv\Lib\site-packages\aiohttp\_websocket\.hash\reader_c.pxd.hash
reader_c.pxd.hash
Other
64
0.5
0.1
0
awesome-app
683
2025-04-30T13:06:45.807466
GPL-3.0
false
ed961ae364c59864ea1f936010cd21f1
\n\n
.venv\Lib\site-packages\aiohttp\_websocket\__pycache__\helpers.cpython-313.pyc
helpers.cpython-313.pyc
Other
6,158
0.95
0.066667
0
awesome-app
431
2024-06-03T02:14:26.176881
GPL-3.0
false
c2596f244820241dce193291cfe89707
\n\n
.venv\Lib\site-packages\aiohttp\_websocket\__pycache__\models.cpython-313.pyc
models.cpython-313.pyc
Other
3,623
0.8
0.021277
0
react-lib
919
2025-05-12T19:22:12.295396
GPL-3.0
false
cc27b8c6954552c78e40fa6c253ebcf9
\n\n
.venv\Lib\site-packages\aiohttp\_websocket\__pycache__\reader.cpython-313.pyc
reader.cpython-313.pyc
Other
763
0.8
0.071429
0
vue-tools
835
2025-06-18T21:24:10.203284
MIT
false
55e085ef2f39a254188f7e6d181f7475
\n\n
.venv\Lib\site-packages\aiohttp\_websocket\__pycache__\reader_c.cpython-313.pyc
reader_c.cpython-313.pyc
Other
18,118
0.8
0.016393
0.011429
vue-tools
375
2023-12-11T23:27:37.553207
GPL-3.0
false
f968575583c0fbe0b0de7047cd746bdd
\n\n
.venv\Lib\site-packages\aiohttp\_websocket\__pycache__\reader_py.cpython-313.pyc
reader_py.cpython-313.pyc
Other
18,119
0.8
0.016393
0.011429
awesome-app
661
2025-04-30T20:29:46.829803
MIT
false
054ebcbe70a20f5be07b144cae9eb036
\n\n
.venv\Lib\site-packages\aiohttp\_websocket\__pycache__\writer.cpython-313.pyc
writer.cpython-313.pyc
Other
6,543
0.8
0.012048
0
node-utils
807
2025-04-15T08:52:24.412012
Apache-2.0
false
93a4d0d916e40b5180019af1604f3ad7
\n\n
.venv\Lib\site-packages\aiohttp\_websocket\__pycache__\__init__.cpython-313.pyc
__init__.cpython-313.pyc
Other
245
0.7
0
0
awesome-app
836
2024-09-02T04:41:43.934789
MIT
false
a7f024a0cc3d6e8420f11ccf3c5e0a56
\n\n
.venv\Lib\site-packages\aiohttp\__pycache__\abc.cpython-313.pyc
abc.cpython-313.pyc
Other
12,728
0.95
0.121495
0
node-utils
691
2024-08-04T05:08:25.862639
GPL-3.0
false
295d1fccb0019ac3b936b549bf656c2a
\n\n
.venv\Lib\site-packages\aiohttp\__pycache__\base_protocol.cpython-313.pyc
base_protocol.cpython-313.pyc
Other
4,718
0.8
0.025
0
vue-tools
811
2024-12-23T15:55:35.881297
BSD-3-Clause
false
e2d34550e81f31e185f13bd0107f007c
\n\n
.venv\Lib\site-packages\aiohttp\__pycache__\client.cpython-313.pyc
client.cpython-313.pyc
Other
56,003
0.75
0.030303
0.006993
python-kit
66
2024-03-28T13:26:29.667055
BSD-3-Clause
false
4d44fbe32f2246b3fc0c4eff7edd53d9
\n\n
.venv\Lib\site-packages\aiohttp\__pycache__\client_exceptions.cpython-313.pyc
client_exceptions.cpython-313.pyc
Other
18,621
0.95
0.120482
0.013072
awesome-app
393
2025-05-05T19:29:51.059021
BSD-3-Clause
false
6867b9e23debd11dd7bb136ae6a6ff2f
\n\n
.venv\Lib\site-packages\aiohttp\__pycache__\client_middlewares.cpython-313.pyc
client_middlewares.cpython-313.pyc
Other
2,273
0.8
0
0
awesome-app
52
2023-10-09T07:59:32.985011
Apache-2.0
false
64083090d7c2a068952aaacae954d96a
\n\n
.venv\Lib\site-packages\aiohttp\__pycache__\client_middleware_digest_auth.cpython-313.pyc
client_middleware_digest_auth.cpython-313.pyc
Other
16,406
0.95
0.069767
0
vue-tools
747
2023-10-08T05:10:49.704972
MIT
false
306ae91a3f171a5557e7ad2402161da5
\n\n
.venv\Lib\site-packages\aiohttp\__pycache__\client_proto.cpython-313.pyc
client_proto.cpython-313.pyc
Other
13,754
0.95
0.033898
0.018018
react-lib
352
2024-10-19T10:03:18.066846
BSD-3-Clause
false
6036e609d2e2610196b484e2433da242
\n\n
.venv\Lib\site-packages\aiohttp\__pycache__\client_reqrep.cpython-313.pyc
client_reqrep.cpython-313.pyc
Other
65,580
0.75
0.048689
0.018634
python-kit
168
2024-04-25T02:58:23.610249
Apache-2.0
false
502c2dccc9181d3763b59c4f3184f448
\n\n
.venv\Lib\site-packages\aiohttp\__pycache__\client_ws.cpython-313.pyc
client_ws.cpython-313.pyc
Other
22,908
0.8
0.012739
0.026316
python-kit
539
2023-10-11T13:16:11.942571
MIT
false
fff57e23e6f1a982d44a83a79f9e6b7c
\n\n
.venv\Lib\site-packages\aiohttp\__pycache__\compression_utils.cpython-313.pyc
compression_utils.cpython-313.pyc
Other
15,526
0.8
0
0.008929
node-utils
159
2023-08-26T05:59:08.812784
MIT
false
4ca037e9f49ebdfb87795d900606f948
\n\n
.venv\Lib\site-packages\aiohttp\__pycache__\connector.cpython-313.pyc
connector.cpython-313.pyc
Other
76,621
0.75
0.048246
0.014196
vue-tools
567
2025-05-24T07:18:50.304829
GPL-3.0
false
bab294e97071ee6e7fccaee2eeee7d9d
\n\n
.venv\Lib\site-packages\aiohttp\__pycache__\cookiejar.cpython-313.pyc
cookiejar.cpython-313.pyc
Other
22,098
0.95
0.01005
0.015873
python-kit
446
2024-03-30T11:47:30.127819
MIT
false
56450bd086064827c761585b51e79dac
\n\n
.venv\Lib\site-packages\aiohttp\__pycache__\formdata.cpython-313.pyc
formdata.cpython-313.pyc
Other
7,871
0.95
0.038961
0
node-utils
883
2023-09-13T02:43:43.702554
GPL-3.0
false
d35ac7a9dfe36be3e1bd6dbdcb8b1cb3
\n\n
.venv\Lib\site-packages\aiohttp\__pycache__\hdrs.cpython-313.pyc
hdrs.cpython-313.pyc
Other
8,475
0.8
0
0
node-utils
190
2024-08-04T06:51:09.834413
GPL-3.0
false
e8454f99e1a7e37eda3f5246787cf39d
\n\n
.venv\Lib\site-packages\aiohttp\__pycache__\helpers.cpython-313.pyc
helpers.cpython-313.pyc
Other
42,565
0.95
0.068293
0.015831
react-lib
978
2023-12-01T21:41:28.295042
Apache-2.0
false
382e1302ba280b8fd067a1dbaed6ca38
\n\n
.venv\Lib\site-packages\aiohttp\__pycache__\http.cpython-313.pyc
http.cpython-313.pyc
Other
1,692
0.7
0
0
vue-tools
8
2025-05-10T07:07:54.054498
MIT
false
6ff75ac8dd570d448e05c773c7a39b03
\n\n
.venv\Lib\site-packages\aiohttp\__pycache__\http_exceptions.cpython-313.pyc
http_exceptions.cpython-313.pyc
Other
6,534
0.95
0.042857
0
vue-tools
701
2024-01-23T05:20:40.219971
MIT
false
003cdf7b661a4853ef637797bc3232e3
\n\n
.venv\Lib\site-packages\aiohttp\__pycache__\http_parser.cpython-313.pyc
http_parser.cpython-313.pyc
Other
36,657
0.95
0.002551
0.002667
react-lib
209
2024-11-06T22:06:32.400265
BSD-3-Clause
false
8d11e988918395f46dea87a5aa42ac2e
\n\n
.venv\Lib\site-packages\aiohttp\__pycache__\http_websocket.cpython-313.pyc
http_websocket.cpython-313.pyc
Other
973
0.7
0
0
react-lib
841
2025-01-27T04:51:21.010399
MIT
false
c2551530f809f95c53bd96eda9cada83
\n\n
.venv\Lib\site-packages\aiohttp\__pycache__\http_writer.cpython-313.pyc
http_writer.cpython-313.pyc
Other
15,065
0.8
0.008065
0
vue-tools
94
2023-08-28T08:47:25.132899
GPL-3.0
false
05b8d253f4b354656803bf25e56b2206
\n\n
.venv\Lib\site-packages\aiohttp\__pycache__\log.cpython-313.pyc
log.cpython-313.pyc
Other
710
0.8
0
0
awesome-app
774
2024-12-08T01:49:15.330381
BSD-3-Clause
false
c8abf1c28c48744727378955bad109c1
\n\n
.venv\Lib\site-packages\aiohttp\__pycache__\multipart.cpython-313.pyc
multipart.cpython-313.pyc
Other
53,887
0.95
0.02799
0.010959
react-lib
889
2024-12-01T20:18:24.736075
BSD-3-Clause
false
d7f253e20c26962aa3e681347c758848
\n\n
.venv\Lib\site-packages\aiohttp\__pycache__\payload.cpython-313.pyc
payload.cpython-313.pyc
Other
47,639
0.95
0.064275
0.001739
awesome-app
8
2025-02-24T20:47:34.090362
Apache-2.0
false
576029462e59e47e7288d845f1ed70d8
\n\n
.venv\Lib\site-packages\aiohttp\__pycache__\payload_streamer.cpython-313.pyc
payload_streamer.cpython-313.pyc
Other
4,576
0.95
0.053571
0
react-lib
98
2023-08-04T04:14:37.255079
BSD-3-Clause
false
9e0b494a197340ac86b0111947f74279
\n\n
.venv\Lib\site-packages\aiohttp\__pycache__\pytest_plugin.cpython-313.pyc
pytest_plugin.cpython-313.pyc
Other
18,814
0.95
0.017937
0
python-kit
211
2024-03-14T22:14:25.770168
Apache-2.0
true
035e833faa56a35df0617eff250f53f8
\n\n
.venv\Lib\site-packages\aiohttp\__pycache__\resolver.cpython-313.pyc
resolver.cpython-313.pyc
Other
11,114
0.95
0.052174
0
node-utils
525
2023-07-24T06:54:56.099193
GPL-3.0
false
8ff9688cb979a53f2b4d15fdf8f53fb9
\n\n
.venv\Lib\site-packages\aiohttp\__pycache__\streams.cpython-313.pyc
streams.cpython-313.pyc
Other
32,065
0.95
0.047794
0.007813
python-kit
128
2025-01-29T02:34:27.551229
MIT
false
36ced0e89838675218bdf2080ae76321
\n\n
.venv\Lib\site-packages\aiohttp\__pycache__\tcp_helpers.cpython-313.pyc
tcp_helpers.cpython-313.pyc
Other
1,805
0.8
0
0.04
python-kit
557
2023-07-19T00:19:41.660056
BSD-3-Clause
false
7a967d89b9ba9c1cd41f5934704d5cac
\n\n
.venv\Lib\site-packages\aiohttp\__pycache__\test_utils.cpython-313.pyc
test_utils.cpython-313.pyc
Other
34,853
0.95
0.017327
0.019231
react-lib
764
2024-02-01T01:07:43.805192
Apache-2.0
true
8d795c8b72fdb00e3247d05cd8ae8c7f
\n\n
.venv\Lib\site-packages\aiohttp\__pycache__\tracing.cpython-313.pyc
tracing.cpython-313.pyc
Other
22,957
0.95
0.006536
0.014085
node-utils
130
2024-03-19T09:52:10.684493
MIT
false
5985923db0e5711c7dc58961ad6bab5e
\n\n
.venv\Lib\site-packages\aiohttp\__pycache__\typedefs.cpython-313.pyc
typedefs.cpython-313.pyc
Other
2,344
0.8
0
0
python-kit
600
2023-09-15T20:33:28.512894
Apache-2.0
false
544bfb2c3536e37dfdc5525a973dd374
\n\n
.venv\Lib\site-packages\aiohttp\__pycache__\web.cpython-313.pyc
web.cpython-313.pyc
Other
16,644
0.95
0.013699
0
vue-tools
149
2024-07-23T16:35:34.944996
GPL-3.0
false
2b18c87c0eb363f3f6a1b652059cd25a
\n\n
.venv\Lib\site-packages\aiohttp\__pycache__\web_app.cpython-313.pyc
web_app.cpython-313.pyc
Other
27,681
0.95
0.009756
0.031746
node-utils
643
2024-04-11T03:15:19.534690
Apache-2.0
false
91dd2a6a6872f0f869abca4bc3a9685d
\n\n
.venv\Lib\site-packages\aiohttp\__pycache__\web_exceptions.cpython-313.pyc
web_exceptions.cpython-313.pyc
Other
16,235
0.95
0.092105
0
node-utils
890
2025-02-16T23:51:51.615282
Apache-2.0
false
57d69c53248dbd0175937745c8e38475
\n\n
.venv\Lib\site-packages\aiohttp\__pycache__\web_fileresponse.cpython-313.pyc
web_fileresponse.cpython-313.pyc
Other
17,408
0.8
0
0.006711
react-lib
920
2024-03-14T00:57:32.889798
Apache-2.0
false
83f867c1ad56a03bf49bc289ed32aada
\n\n
.venv\Lib\site-packages\aiohttp\__pycache__\web_log.cpython-313.pyc
web_log.cpython-313.pyc
Other
10,535
0.8
0.027397
0.007752
vue-tools
950
2024-07-15T09:03:18.864828
Apache-2.0
false
14f28fd5e442646b10deec3088d4bda9
\n\n
.venv\Lib\site-packages\aiohttp\__pycache__\web_middlewares.cpython-313.pyc
web_middlewares.cpython-313.pyc
Other
5,705
0.8
0.044776
0
awesome-app
98
2025-05-15T10:09:12.897535
Apache-2.0
false
938dcc56ebb0365e7695d2c6f37baf7a
\n\n
.venv\Lib\site-packages\aiohttp\__pycache__\web_protocol.cpython-313.pyc
web_protocol.cpython-313.pyc
Other
31,921
0.95
0.023973
0.007663
react-lib
17
2025-03-14T04:03:22.812236
MIT
false
d78df51c1444e1cc8fe4f8a29634645e
\n\n
.venv\Lib\site-packages\aiohttp\__pycache__\web_request.cpython-313.pyc
web_request.cpython-313.pyc
Other
38,600
0.8
0.033951
0.003597
react-lib
923
2023-11-29T09:02:57.084125
GPL-3.0
false
b8b2686c72451c5d789538d064df62f8
\n\n
.venv\Lib\site-packages\aiohttp\__pycache__\web_response.cpython-313.pyc
web_response.cpython-313.pyc
Other
40,028
0.95
0.014205
0.021944
react-lib
39
2024-04-17T13:46:10.153781
GPL-3.0
false
01c8abaf157fe364879d8ae9bd191bc9
\n\n
.venv\Lib\site-packages\aiohttp\__pycache__\web_routedef.cpython-313.pyc
web_routedef.cpython-313.pyc
Other
11,985
0.8
0
0.025974
vue-tools
816
2024-07-25T19:39:08.603902
BSD-3-Clause
false
8523f2a62c0d6dab8cd07586924dda8a
\n\n
.venv\Lib\site-packages\aiohttp\__pycache__\web_runner.cpython-313.pyc
web_runner.cpython-313.pyc
Other
19,566
0.8
0
0.009434
react-lib
65
2024-07-16T10:56:15.753593
Apache-2.0
false
5753643f622297293dbac8421631492a
\n\n
.venv\Lib\site-packages\aiohttp\__pycache__\web_server.cpython-313.pyc
web_server.cpython-313.pyc
Other
5,054
0.8
0
0
awesome-app
789
2025-03-20T05:19:48.281239
MIT
false
8036ac1d84fd6eb20dc82affd4622917
\n\n
.venv\Lib\site-packages\aiohttp\__pycache__\web_urldispatcher.cpython-313.pyc
web_urldispatcher.cpython-313.pyc
Other
70,260
0.75
0.039924
0.014028
vue-tools
354
2025-03-10T18:02:32.866078
MIT
false
301b55b4cd78b66dab2ed8a6b6ce4feb
\n\n
.venv\Lib\site-packages\aiohttp\__pycache__\web_ws.cpython-313.pyc
web_ws.cpython-313.pyc
Other
32,041
0.8
0.010256
0.005236
awesome-app
324
2023-10-13T20:40:27.829713
GPL-3.0
false
69a7ca36cbb348d9ad470bb5e85846e7
\n\n
.venv\Lib\site-packages\aiohttp\__pycache__\worker.cpython-313.pyc
worker.cpython-313.pyc
Other
12,389
0.95
0.04902
0
react-lib
933
2024-05-31T14:35:27.381952
Apache-2.0
false
69b79e5458ad86e9de6c97eb704abe10
\n\n
.venv\Lib\site-packages\aiohttp\__pycache__\_cookie_helpers.cpython-313.pyc
_cookie_helpers.cpython-313.pyc
Other
10,065
0.95
0.105634
0.048387
awesome-app
652
2024-01-12T18:28:53.058957
GPL-3.0
false
7f4363e0bde263be4ad602c58d5106aa
\n\n
.venv\Lib\site-packages\aiohttp\__pycache__\__init__.cpython-313.pyc
__init__.cpython-313.pyc
Other
5,343
0.8
0
0
react-lib
756
2023-11-04T12:29:18.628667
BSD-3-Clause
false
c4c2103d0c43ff3381ff158ee03ab7bc
pip\n
.venv\Lib\site-packages\aiohttp-3.12.13.dist-info\INSTALLER
INSTALLER
Other
4
0.5
0
0
awesome-app
254
2024-09-17T00:28:21.393752
Apache-2.0
false
365c9bfeb7d89244f2ce01c1de44cb85
Metadata-Version: 2.4\nName: aiohttp\nVersion: 3.12.13\nSummary: Async http client/server framework (asyncio)\nHome-page: https://github.com/aio-libs/aiohttp\nMaintainer: aiohttp team <team@aiohttp.org>\nMaintainer-email: team@aiohttp.org\nLicense: Apache-2.0\nProject-URL: Chat: Matrix, https://matrix.to/#/#aio-libs:matrix.org\nProject-URL: Chat: Matrix Space, https://matrix.to/#/#aio-libs-space:matrix.org\nProject-URL: CI: GitHub Actions, https://github.com/aio-libs/aiohttp/actions?query=workflow%3ACI\nProject-URL: Coverage: codecov, https://codecov.io/github/aio-libs/aiohttp\nProject-URL: Docs: Changelog, https://docs.aiohttp.org/en/stable/changes.html\nProject-URL: Docs: RTD, https://docs.aiohttp.org\nProject-URL: GitHub: issues, https://github.com/aio-libs/aiohttp/issues\nProject-URL: GitHub: repo, https://github.com/aio-libs/aiohttp\nClassifier: Development Status :: 5 - Production/Stable\nClassifier: Framework :: AsyncIO\nClassifier: Intended Audience :: Developers\nClassifier: Operating System :: POSIX\nClassifier: Operating System :: MacOS :: MacOS X\nClassifier: Operating System :: Microsoft :: Windows\nClassifier: Programming Language :: Python\nClassifier: Programming Language :: Python :: 3\nClassifier: Programming Language :: Python :: 3.9\nClassifier: Programming Language :: Python :: 3.10\nClassifier: Programming Language :: Python :: 3.11\nClassifier: Programming Language :: Python :: 3.12\nClassifier: Programming Language :: Python :: 3.13\nClassifier: Topic :: Internet :: WWW/HTTP\nRequires-Python: >=3.9\nDescription-Content-Type: text/x-rst\nLicense-File: LICENSE.txt\nRequires-Dist: aiohappyeyeballs>=2.5.0\nRequires-Dist: aiosignal>=1.1.2\nRequires-Dist: async-timeout<6.0,>=4.0; python_version < "3.11"\nRequires-Dist: attrs>=17.3.0\nRequires-Dist: frozenlist>=1.1.1\nRequires-Dist: multidict<7.0,>=4.5\nRequires-Dist: propcache>=0.2.0\nRequires-Dist: yarl<2.0,>=1.17.0\nProvides-Extra: speedups\nRequires-Dist: aiodns>=3.3.0; extra == "speedups"\nRequires-Dist: Brotli; platform_python_implementation == "CPython" and extra == "speedups"\nRequires-Dist: brotlicffi; platform_python_implementation != "CPython" and extra == "speedups"\nDynamic: license-file\n\n==================================\nAsync http client/server framework\n==================================\n\n.. image:: https://raw.githubusercontent.com/aio-libs/aiohttp/master/docs/aiohttp-plain.svg\n :height: 64px\n :width: 64px\n :alt: aiohttp logo\n\n|\n\n.. image:: https://github.com/aio-libs/aiohttp/workflows/CI/badge.svg\n :target: https://github.com/aio-libs/aiohttp/actions?query=workflow%3ACI\n :alt: GitHub Actions status for master branch\n\n.. image:: https://codecov.io/gh/aio-libs/aiohttp/branch/master/graph/badge.svg\n :target: https://codecov.io/gh/aio-libs/aiohttp\n :alt: codecov.io status for master branch\n\n.. image:: https://img.shields.io/endpoint?url=https://codspeed.io/badge.json\n :target: https://codspeed.io/aio-libs/aiohttp\n :alt: Codspeed.io status for aiohttp\n\n.. image:: https://badge.fury.io/py/aiohttp.svg\n :target: https://pypi.org/project/aiohttp\n :alt: Latest PyPI package version\n\n.. image:: https://readthedocs.org/projects/aiohttp/badge/?version=latest\n :target: https://docs.aiohttp.org/\n :alt: Latest Read The Docs\n\n.. image:: https://img.shields.io/matrix/aio-libs:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat\n :target: https://matrix.to/#/%23aio-libs:matrix.org\n :alt: Matrix Room — #aio-libs:matrix.org\n\n.. image:: https://img.shields.io/matrix/aio-libs-space:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs-space%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat\n :target: https://matrix.to/#/%23aio-libs-space:matrix.org\n :alt: Matrix Space — #aio-libs-space:matrix.org\n\n\nKey Features\n============\n\n- Supports both client and server side of HTTP protocol.\n- Supports both client and server Web-Sockets out-of-the-box and avoids\n Callback Hell.\n- Provides Web-server with middleware and pluggable routing.\n\n\nGetting started\n===============\n\nClient\n------\n\nTo get something from the web:\n\n.. code-block:: python\n\n import aiohttp\n import asyncio\n\n async def main():\n\n async with aiohttp.ClientSession() as session:\n async with session.get('http://python.org') as response:\n\n print("Status:", response.status)\n print("Content-type:", response.headers['content-type'])\n\n html = await response.text()\n print("Body:", html[:15], "...")\n\n asyncio.run(main())\n\nThis prints:\n\n.. code-block::\n\n Status: 200\n Content-type: text/html; charset=utf-8\n Body: <!doctype html> ...\n\nComing from `requests <https://requests.readthedocs.io/>`_ ? Read `why we need so many lines <https://aiohttp.readthedocs.io/en/latest/http_request_lifecycle.html>`_.\n\nServer\n------\n\nAn example using a simple server:\n\n.. code-block:: python\n\n # examples/server_simple.py\n from aiohttp import web\n\n async def handle(request):\n name = request.match_info.get('name', "Anonymous")\n text = "Hello, " + name\n return web.Response(text=text)\n\n async def wshandle(request):\n ws = web.WebSocketResponse()\n await ws.prepare(request)\n\n async for msg in ws:\n if msg.type == web.WSMsgType.text:\n await ws.send_str("Hello, {}".format(msg.data))\n elif msg.type == web.WSMsgType.binary:\n await ws.send_bytes(msg.data)\n elif msg.type == web.WSMsgType.close:\n break\n\n return ws\n\n\n app = web.Application()\n app.add_routes([web.get('/', handle),\n web.get('/echo', wshandle),\n web.get('/{name}', handle)])\n\n if __name__ == '__main__':\n web.run_app(app)\n\n\nDocumentation\n=============\n\nhttps://aiohttp.readthedocs.io/\n\n\nDemos\n=====\n\nhttps://github.com/aio-libs/aiohttp-demos\n\n\nExternal links\n==============\n\n* `Third party libraries\n <http://aiohttp.readthedocs.io/en/latest/third_party.html>`_\n* `Built with aiohttp\n <http://aiohttp.readthedocs.io/en/latest/built_with.html>`_\n* `Powered by aiohttp\n <http://aiohttp.readthedocs.io/en/latest/powered_by.html>`_\n\nFeel free to make a Pull Request for adding your link to these pages!\n\n\nCommunication channels\n======================\n\n*aio-libs Discussions*: https://github.com/aio-libs/aiohttp/discussions\n\n*Matrix*: `#aio-libs:matrix.org <https://matrix.to/#/#aio-libs:matrix.org>`_\n\nWe support `Stack Overflow\n<https://stackoverflow.com/questions/tagged/aiohttp>`_.\nPlease add *aiohttp* tag to your question there.\n\nRequirements\n============\n\n- attrs_\n- multidict_\n- yarl_\n- frozenlist_\n\nOptionally you may install the aiodns_ library (highly recommended for sake of speed).\n\n.. _aiodns: https://pypi.python.org/pypi/aiodns\n.. _attrs: https://github.com/python-attrs/attrs\n.. _multidict: https://pypi.python.org/pypi/multidict\n.. _frozenlist: https://pypi.org/project/frozenlist/\n.. _yarl: https://pypi.python.org/pypi/yarl\n.. _async-timeout: https://pypi.python.org/pypi/async_timeout\n\nLicense\n=======\n\n``aiohttp`` is offered under the Apache 2 license.\n\n\nKeepsafe\n========\n\nThe aiohttp community would like to thank Keepsafe\n(https://www.getkeepsafe.com) for its support in the early days of\nthe project.\n\n\nSource code\n===========\n\nThe latest developer version is available in a GitHub repository:\nhttps://github.com/aio-libs/aiohttp\n\nBenchmarks\n==========\n\nIf you are interested in efficiency, the AsyncIO community maintains a\nlist of benchmarks on the official wiki:\nhttps://github.com/python/asyncio/wiki/Benchmarks\n
.venv\Lib\site-packages\aiohttp-3.12.13.dist-info\METADATA
METADATA
Other
7,863
0.95
0.048
0.032967
node-utils
632
2023-11-01T03:09:06.189140
BSD-3-Clause
false
d5da614b47daa46ae393be15b45d6b1f
aiohttp-3.12.13.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4\naiohttp-3.12.13.dist-info/METADATA,sha256=CEVhlDnumNzO2aS4kPv-OSYdapnViSwO1npPPrGvrnk,7863\naiohttp-3.12.13.dist-info/RECORD,,\naiohttp-3.12.13.dist-info/WHEEL,sha256=qV0EIPljj1XC_vuSatRWjn02nZIz3N1t8jsZz7HBr2U,101\naiohttp-3.12.13.dist-info/licenses/LICENSE.txt,sha256=wUk-nxDVnR-6n53ygAjhVX4zz5-6yM4SY6ozk5goA94,601\naiohttp-3.12.13.dist-info/top_level.txt,sha256=iv-JIaacmTl-hSho3QmphcKnbRRYx1st47yjz_178Ro,8\naiohttp/.hash/_cparser.pxd.hash,sha256=xLIAoXkmMgn1u0F7hkbDsYcG2iSP13cnGKUtPmCh1gA,64\naiohttp/.hash/_find_header.pxd.hash,sha256=W5qRPWDc55gArGZkriI5tztmQHkrdwR6NdQfRQfTxIg,64\naiohttp/.hash/_http_parser.pyx.hash,sha256=gQWpGE6DNxZWNfkY4HpUtMJIpfi7UZYmixD3cYg2Ft0,64\naiohttp/.hash/_http_writer.pyx.hash,sha256=6wl8DZynpvBFMT-qCSXDwvdFWO6u6g6YsIa4AKQg-uA,64\naiohttp/.hash/hdrs.py.hash,sha256=GldJpkmfx93VdDz-6BEe9rXA7UKQL6vnL5dnJl_h7Ug,64\naiohttp/__init__.py,sha256=tWWXpve3ehI2dilsZPZF6Ne5b-x81CpZXkSaO8j0iDs,8581\naiohttp/__pycache__/__init__.cpython-313.pyc,,\naiohttp/__pycache__/_cookie_helpers.cpython-313.pyc,,\naiohttp/__pycache__/abc.cpython-313.pyc,,\naiohttp/__pycache__/base_protocol.cpython-313.pyc,,\naiohttp/__pycache__/client.cpython-313.pyc,,\naiohttp/__pycache__/client_exceptions.cpython-313.pyc,,\naiohttp/__pycache__/client_middleware_digest_auth.cpython-313.pyc,,\naiohttp/__pycache__/client_middlewares.cpython-313.pyc,,\naiohttp/__pycache__/client_proto.cpython-313.pyc,,\naiohttp/__pycache__/client_reqrep.cpython-313.pyc,,\naiohttp/__pycache__/client_ws.cpython-313.pyc,,\naiohttp/__pycache__/compression_utils.cpython-313.pyc,,\naiohttp/__pycache__/connector.cpython-313.pyc,,\naiohttp/__pycache__/cookiejar.cpython-313.pyc,,\naiohttp/__pycache__/formdata.cpython-313.pyc,,\naiohttp/__pycache__/hdrs.cpython-313.pyc,,\naiohttp/__pycache__/helpers.cpython-313.pyc,,\naiohttp/__pycache__/http.cpython-313.pyc,,\naiohttp/__pycache__/http_exceptions.cpython-313.pyc,,\naiohttp/__pycache__/http_parser.cpython-313.pyc,,\naiohttp/__pycache__/http_websocket.cpython-313.pyc,,\naiohttp/__pycache__/http_writer.cpython-313.pyc,,\naiohttp/__pycache__/log.cpython-313.pyc,,\naiohttp/__pycache__/multipart.cpython-313.pyc,,\naiohttp/__pycache__/payload.cpython-313.pyc,,\naiohttp/__pycache__/payload_streamer.cpython-313.pyc,,\naiohttp/__pycache__/pytest_plugin.cpython-313.pyc,,\naiohttp/__pycache__/resolver.cpython-313.pyc,,\naiohttp/__pycache__/streams.cpython-313.pyc,,\naiohttp/__pycache__/tcp_helpers.cpython-313.pyc,,\naiohttp/__pycache__/test_utils.cpython-313.pyc,,\naiohttp/__pycache__/tracing.cpython-313.pyc,,\naiohttp/__pycache__/typedefs.cpython-313.pyc,,\naiohttp/__pycache__/web.cpython-313.pyc,,\naiohttp/__pycache__/web_app.cpython-313.pyc,,\naiohttp/__pycache__/web_exceptions.cpython-313.pyc,,\naiohttp/__pycache__/web_fileresponse.cpython-313.pyc,,\naiohttp/__pycache__/web_log.cpython-313.pyc,,\naiohttp/__pycache__/web_middlewares.cpython-313.pyc,,\naiohttp/__pycache__/web_protocol.cpython-313.pyc,,\naiohttp/__pycache__/web_request.cpython-313.pyc,,\naiohttp/__pycache__/web_response.cpython-313.pyc,,\naiohttp/__pycache__/web_routedef.cpython-313.pyc,,\naiohttp/__pycache__/web_runner.cpython-313.pyc,,\naiohttp/__pycache__/web_server.cpython-313.pyc,,\naiohttp/__pycache__/web_urldispatcher.cpython-313.pyc,,\naiohttp/__pycache__/web_ws.cpython-313.pyc,,\naiohttp/__pycache__/worker.cpython-313.pyc,,\naiohttp/_cookie_helpers.py,sha256=LR33wf6H6rmmqhdChoGW0o6mWESa3oGkFJL9_DKjoFY,12727\naiohttp/_cparser.pxd,sha256=GP0Y9NqZYQGkJtS81XDzU70e7rRMb34TR7yGMmx5_zs,4453\naiohttp/_find_header.pxd,sha256=BFUSmxhemBtblqxzjzH3x03FfxaWlTyuAIOz8YZ5_nM,70\naiohttp/_headers.pxi,sha256=1MhCe6Un_KI1tpO85HnDfzVO94BhcirLanAOys5FIHA,2090\naiohttp/_http_parser.cp313-win_amd64.pyd,sha256=wMITAL6oI8DnYo3rAO1SZktcY5JcPCTyXvQi6rI3ESY,242688\naiohttp/_http_parser.pyx,sha256=dYTmzL0UcsXoaYLEYuQ0oO6kaYiKThuupZWXDB6ZdSA,29076\naiohttp/_http_writer.cp313-win_amd64.pyd,sha256=OxhFa-cjbwn2Xi7yMRLvQWcWQYmLPCxNb4rSEuiq5t4,46080\naiohttp/_http_writer.pyx,sha256=w60HP6TVQKmrs_nHm8FlSNYiRX0EBo7Hyq9imUmDNjo,4721\naiohttp/_websocket/.hash/mask.pxd.hash,sha256=MtKRHuamwsRzCTtELIaBcyklRCAFDonBlAPO_IRg3aY,64\naiohttp/_websocket/.hash/mask.pyx.hash,sha256=eOyT813GYbX_MUjzLOpzr-vTu3J_gpUOy8EzNgE7ntQ,64\naiohttp/_websocket/.hash/reader_c.pxd.hash,sha256=yvt0gruPh-Of05bSNwxeoYQyBSudgK1tdYTXBHa2qh8,64\naiohttp/_websocket/__init__.py,sha256=R51KWH5kkdtDLb7T-ilztksbfweKCy3t22SgxGtiY-4,45\naiohttp/_websocket/__pycache__/__init__.cpython-313.pyc,,\naiohttp/_websocket/__pycache__/helpers.cpython-313.pyc,,\naiohttp/_websocket/__pycache__/models.cpython-313.pyc,,\naiohttp/_websocket/__pycache__/reader.cpython-313.pyc,,\naiohttp/_websocket/__pycache__/reader_c.cpython-313.pyc,,\naiohttp/_websocket/__pycache__/reader_py.cpython-313.pyc,,\naiohttp/_websocket/__pycache__/writer.cpython-313.pyc,,\naiohttp/_websocket/helpers.py,sha256=amqvDhoAKAi8ptB4qUNuQhkaOn-4JxSh_VLAqytmEfw,5185\naiohttp/_websocket/mask.cp313-win_amd64.pyd,sha256=xkXMGtJ9FxbxkWnwDr36YmBs9v3YaNoKyYcHMZHDVZ8,35328\naiohttp/_websocket/mask.pxd,sha256=41TdSZvhcbYSW_Vrw7bF4r_yoor2njtdaZ3bmvK6-jw,115\naiohttp/_websocket/mask.pyx,sha256=Ro7dOOv43HAAqNMz3xyCA11ppcn-vARIvjycStTEYww,1445\naiohttp/_websocket/models.py,sha256=Pz8qvnU43VUCNZcY4g03VwTsHOsb_jSN8iG69xMAc_A,2205\naiohttp/_websocket/reader.py,sha256=1r0cJ-jdFgbSrC6-jI0zjEA1CppzoUn8u_wiebrVVO0,1061\naiohttp/_websocket/reader_c.cp313-win_amd64.pyd,sha256=BRlmXlW_5XrtSdVFtBFzmTJnCqt-4ihM53iDjePn2yg,146944\naiohttp/_websocket/reader_c.pxd,sha256=HNOl4gRWtNBNEYNbK9PGOfFEQwUqJGexBbDKB_20sl0,2735\naiohttp/_websocket/reader_c.py,sha256=aC2X9wkXxZqKCbonWdJQTE8SofT_0JGlhKjy8L2kt_A,19267\naiohttp/_websocket/reader_py.py,sha256=aC2X9wkXxZqKCbonWdJQTE8SofT_0JGlhKjy8L2kt_A,19267\naiohttp/_websocket/writer.py,sha256=Y14_nUYf01ZUkLM1F0-bpMVuVnL0pPAxlOXkzt0jmnk,7317\naiohttp/abc.py,sha256=WDsDbRPEDYGdDFgfBK6G5AbtHoFHPVjSJQGJ1hGi6J4,7416\naiohttp/base_protocol.py,sha256=8vNIv6QV_SDCW-8tfhlyxSwiBD7dAiMTqJI1GI8RG5s,3125\naiohttp/client.py,sha256=4-krgftasFDiTXqRyb_3lcJ-_l7tlI0ke6GRwXnbSXQ,58750\naiohttp/client_exceptions.py,sha256=sJcuvYKaB2nwuSdP7k18y3wc74aU0xAzdJikzzesrPE,11788\naiohttp/client_middleware_digest_auth.py,sha256=qRiYAUnBap7Lv9rYk2EyKxIUtU92Q3-rGziXZzLuRpg,17412\naiohttp/client_middlewares.py,sha256=FEVIXFkQ58n5bhK4BGEqqDCWnDh-GNJmWq20I5Yt6SU,1973\naiohttp/client_proto.py,sha256=rfbg8nUsfpCMM_zGpQygiFn8nzSdBI-731rmXVGHwLc,12469\naiohttp/client_reqrep.py,sha256=k9sjkhnTk6B6YieZVaNlgVcsMUKt8CN44TqxKG-Cyyg,55057\naiohttp/client_ws.py,sha256=9DraHuupuJcT7NOgyeGml8SBr7V5D5ID5-piY1fQMdA,15537\naiohttp/compression_utils.py,sha256=BZ3NuQn_T8b2qQFAvqAeEIbJj09Z9cxQJ3FNYCJ-cLE,9146\naiohttp/connector.py,sha256=HhP6sG_ZDV3pMjhIKBR9QF-aBG8Bim1vNf180dLTP5I,69375\naiohttp/cookiejar.py,sha256=C2fVzQGFieFP9mFDTOvfEc6fb5kPS2ijL2tFKAUW7Sw,19444\naiohttp/formdata.py,sha256=YxvTsr1GMX0dIwoyjevGklsL9DMXbLdh5zDJAfJXJws,6589\naiohttp/hdrs.py,sha256=7htmhgZyE9HqWbPpxHU0r7kAIdT2kpOXQa1AadDh2W8,5232\naiohttp/helpers.py,sha256=zLz193DE3m68gBwsM43cdaqnzz3cdfit0Dhsd9_mXig,30572\naiohttp/http.py,sha256=DGKcwDbgIMpasv7s2jeKCRuixyj7W-RIrihRFjj0xcY,1914\naiohttp/http_exceptions.py,sha256=V6NpG-RTeEKetaZBW4OUP2-BUVgj8vvx4ueP6VpEfTs,3072\naiohttp/http_parser.py,sha256=zFpRwrvWCcogmHEzlDCnNNmrGyCXkvLu_x0fZSLJdrg,37895\naiohttp/http_websocket.py,sha256=b9kBmxPLPFQP_nu_sMhIMIeqDOm0ug8G4prbrhEMHZ0,878\naiohttp/http_writer.py,sha256=jA_aJW7JdH1mihrIYdJcLOHVKQ4Agg3g993v50eITBs,12824\naiohttp/log.py,sha256=zYUTvXsMQ9Sz1yNN8kXwd5Qxu49a1FzjZ_wQqriEc8M,333\naiohttp/multipart.py,sha256=vkr80clTCnYrasuCeZYB2fX9p9KPe8KWmn6nyhnzQHA,41010\naiohttp/payload.py,sha256=IBpXQMv67pEnBNKjGqnutOI5AHrMCvk1OpUcJ24VDQo,40865\naiohttp/payload_streamer.py,sha256=K0iV85iW0vEG3rDkcopruidspynzQvrwW8mJvgPHisg,2289\naiohttp/py.typed,sha256=3VVwXUAWVEVX7sDwyYDnW5ZdBC9_Z9AJAFfLCleUW0k,8\naiohttp/pytest_plugin.py,sha256=ymhjbYHz2Kf0ZU_4Ly0hAp73dhsgrQIzJDo4Aot3_TI,13345\naiohttp/resolver.py,sha256=ePJgZAN5EQY4YuFiuZmVZM6p3UuzJ4qMWM1fu8DJ2Fc,10305\naiohttp/streams.py,sha256=B4LngNMnKyAyvySvm2Pnp_VKT3yRL2QVhn4dlFvqH7M,23056\naiohttp/tcp_helpers.py,sha256=K-hhGh3jd6qCEnHJo8LvFyfJwBjh99UKI7A0aSRVhj4,998\naiohttp/test_utils.py,sha256=zFWAb-rPz1fWRUHnrjnfUH7ORlfIgZ2UZbEGe4YTa9I,23790\naiohttp/tracing.py,sha256=c3C8lnLZ0G1Jj3Iv1GgV-Op8PwcM4m6d931w502hSgI,15607\naiohttp/typedefs.py,sha256=Sx5v2yUyLu8nbabqtJRWj1M1_uW0IZACu78uYD7LBy0,1726\naiohttp/web.py,sha256=ljZAv8EVAddrWuF3qp39KdUyRTUOdrTgSC4xmaC9kaQ,18995\naiohttp/web_app.py,sha256=SQz_CL3JflkiK7o-paVsFak-Olqk9FICOBOzvg4UUc8,20130\naiohttp/web_exceptions.py,sha256=itNRhCMDJFhnMWftr5SyTsoqh-i0n9rzTj0sjcAEUjo,10812\naiohttp/web_fileresponse.py,sha256=QSuIjTA00la-V1EDWzERi9o1krzdvSPLwZmmw73FJtQ,16892\naiohttp/web_log.py,sha256=G5ugloW9noUxPft0SmVWOXw30MviL6rqZc3XrKN_T1U,8081\naiohttp/web_middlewares.py,sha256=mM2-R8eaV2r6Mi9Zc2bDG8QnhE9h0IzPvtDX_fkKR5s,4286\naiohttp/web_protocol.py,sha256=x1GlB6jqPou3QZyMKpKVLdyETwUTIJ-AbesXDEWxKKY,27807\naiohttp/web_request.py,sha256=0oHeOBD0KgXEKhNDLGs1-hDUwgpdPe7mP97mKqSgclU,30749\naiohttp/web_response.py,sha256=Ykb4wQWV0ZS8B1SfayLF56r074Ffvsykvag-l6hX-1A,30198\naiohttp/web_routedef.py,sha256=XC10f57Q36JmYaaQqrecsyfIxHMepCKaKkBEB7hLzJI,6324\naiohttp/web_runner.py,sha256=zyVYVzCgnopiGwnIhKlNZHtLV_IYQ9aC-Vm43j_HRoA,12185\naiohttp/web_server.py,sha256=RZSWt_Mj-Lu89bFYsr_T3rjxW2VNN7PHNJ2mvv2qELs,2972\naiohttp/web_urldispatcher.py,sha256=PPzAeo1CBcKLw6gl5yXOG7ScybdmLftuhPpa5KK4fyk,45303\naiohttp/web_ws.py,sha256=VXHGDtfy_jrBByLvuhnL-A_PmpcoT_ZLyYdj_EcL3Hw,23370\naiohttp/worker.py,sha256=N_9iyS_tR9U0pf3BRaIH2nzA1pjN1Xfi2gGmRrMhnho,8407\n
.venv\Lib\site-packages\aiohttp-3.12.13.dist-info\RECORD
RECORD
Other
9,662
0.7
0
0
vue-tools
662
2023-09-27T12:49:48.341533
MIT
false
853139114da563a9860081142164d178
aiohttp\n
.venv\Lib\site-packages\aiohttp-3.12.13.dist-info\top_level.txt
top_level.txt
Other
8
0.5
0
0
node-utils
537
2023-11-06T08:33:27.216504
BSD-3-Clause
false
3f2e08e98e2f74684b61aa4ac7d5a037
Wheel-Version: 1.0\nGenerator: setuptools (80.9.0)\nRoot-Is-Purelib: false\nTag: cp313-cp313-win_amd64\n\n
.venv\Lib\site-packages\aiohttp-3.12.13.dist-info\WHEEL
WHEEL
Other
101
0.7
0
0
node-utils
977
2024-09-18T10:16:04.926792
BSD-3-Clause
false
eb6c9e665bbbd698545236600675f165
Copyright aio-libs contributors.\n\n Licensed under the Apache License, Version 2.0 (the "License");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an "AS IS" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n
.venv\Lib\site-packages\aiohttp-3.12.13.dist-info\licenses\LICENSE.txt
LICENSE.txt
Other
601
0.95
0.076923
0
python-kit
932
2023-07-15T07:44:29.957151
BSD-3-Clause
false
748073912af33aa59430d3702aa32d41
\n\n
.venv\Lib\site-packages\aiosignal\__pycache__\__init__.cpython-313.pyc
__init__.cpython-313.pyc
Other
2,725
0.95
0.02381
0.026316
python-kit
103
2025-02-20T21:59:17.092019
MIT
false
e986c8eba3ade290ee77e7db0f797d6c
pip\n
.venv\Lib\site-packages\aiosignal-1.4.0.dist-info\INSTALLER
INSTALLER
Other
4
0.5
0
0
vue-tools
76
2024-02-03T07:59:25.370770
MIT
false
365c9bfeb7d89244f2ce01c1de44cb85
Metadata-Version: 2.4\nName: aiosignal\nVersion: 1.4.0\nSummary: aiosignal: a list of registered asynchronous callbacks\nHome-page: https://github.com/aio-libs/aiosignal\nMaintainer: aiohttp team <team@aiohttp.org>\nMaintainer-email: team@aiohttp.org\nLicense: Apache 2.0\nProject-URL: Chat: Gitter, https://gitter.im/aio-libs/Lobby\nProject-URL: CI: GitHub Actions, https://github.com/aio-libs/aiosignal/actions\nProject-URL: Coverage: codecov, https://codecov.io/github/aio-libs/aiosignal\nProject-URL: Docs: RTD, https://docs.aiosignal.org\nProject-URL: GitHub: issues, https://github.com/aio-libs/aiosignal/issues\nProject-URL: GitHub: repo, https://github.com/aio-libs/aiosignal\nClassifier: License :: OSI Approved :: Apache Software License\nClassifier: Intended Audience :: Developers\nClassifier: Programming Language :: Python\nClassifier: Programming Language :: Python :: 3\nClassifier: Programming Language :: Python :: 3 :: Only\nClassifier: Development Status :: 5 - Production/Stable\nClassifier: Operating System :: POSIX\nClassifier: Operating System :: MacOS :: MacOS X\nClassifier: Operating System :: Microsoft :: Windows\nClassifier: Framework :: AsyncIO\nRequires-Python: >=3.9\nDescription-Content-Type: text/x-rst\nLicense-File: LICENSE\nRequires-Dist: frozenlist>=1.1.0\nRequires-Dist: typing-extensions>=4.2; python_version < "3.13"\nDynamic: license-file\n\n=========\naiosignal\n=========\n\n.. image:: https://github.com/aio-libs/aiosignal/workflows/CI/badge.svg\n :target: https://github.com/aio-libs/aiosignal/actions?query=workflow%3ACI\n :alt: GitHub status for master branch\n\n.. image:: https://codecov.io/gh/aio-libs/aiosignal/branch/master/graph/badge.svg?flag=pytest\n :target: https://codecov.io/gh/aio-libs/aiosignal?flags[0]=pytest\n :alt: codecov.io status for master branch\n\n.. image:: https://badge.fury.io/py/aiosignal.svg\n :target: https://pypi.org/project/aiosignal\n :alt: Latest PyPI package version\n\n.. image:: https://readthedocs.org/projects/aiosignal/badge/?version=latest\n :target: https://aiosignal.readthedocs.io/\n :alt: Latest Read The Docs\n\n.. image:: https://img.shields.io/discourse/topics?server=https%3A%2F%2Faio-libs.discourse.group%2F\n :target: https://aio-libs.discourse.group/\n :alt: Discourse group for io-libs\n\n.. image:: https://badges.gitter.im/Join%20Chat.svg\n :target: https://gitter.im/aio-libs/Lobby\n :alt: Chat on Gitter\n\nIntroduction\n============\n\nA project to manage callbacks in `asyncio` projects.\n\n``Signal`` is a list of registered asynchronous callbacks.\n\nThe signal's life-cycle has two stages: after creation its content\ncould be filled by using standard list operations: ``sig.append()``\netc.\n\nAfter you call ``sig.freeze()`` the signal is *frozen*: adding, removing\nand dropping callbacks is forbidden.\n\nThe only available operation is calling the previously registered\ncallbacks by using ``await sig.send(data)``.\n\nFor concrete usage examples see the `Signals\n<https://docs.aiohttp.org/en/stable/web_advanced.html#aiohttp-web-signals>\nsection of the `Web Server Advanced\n<https://docs.aiohttp.org/en/stable/web_advanced.html>` chapter of the `aiohttp\ndocumentation`_.\n\n\nInstallation\n------------\n\n::\n\n $ pip install aiosignal\n\n\nDocumentation\n=============\n\nhttps://aiosignal.readthedocs.io/\n\nLicense\n=======\n\n``aiosignal`` is offered under the Apache 2 license.\n\nSource code\n===========\n\nThe project is hosted on GitHub_\n\nPlease file an issue in the `bug tracker\n<https://github.com/aio-libs/aiosignal/issues>`_ if you have found a bug\nor have some suggestions to improve the library.\n\n.. _GitHub: https://github.com/aio-libs/aiosignal\n.. _aiohttp documentation: https://docs.aiohttp.org/\n
.venv\Lib\site-packages\aiosignal-1.4.0.dist-info\METADATA
METADATA
Other
3,662
0.8
0.035714
0
vue-tools
989
2024-05-01T20:31:05.014181
MIT
false
3ccae9c0532640c4c716808af49aecfd
aiosignal-1.4.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4\naiosignal-1.4.0.dist-info/METADATA,sha256=CSR-8dqLxpZyjUcTDnAuQwf299EB1sSFv_nzpxznAI0,3662\naiosignal-1.4.0.dist-info/RECORD,,\naiosignal-1.4.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91\naiosignal-1.4.0.dist-info/licenses/LICENSE,sha256=b9UkPpLdf5jsacesN3co50kFcJ_1J6W_mNbQJjwE9bY,11332\naiosignal-1.4.0.dist-info/top_level.txt,sha256=z45aNOKGDdrI1roqZY3BGXQ22kJFPHBmVdwtLYLtXC0,10\naiosignal/__init__.py,sha256=TIkmUG9HTBt4dfq2nISYBiZiRB2xwvFtEZydLP0HPL4,1537\naiosignal/__pycache__/__init__.cpython-313.pyc,,\naiosignal/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0\n
.venv\Lib\site-packages\aiosignal-1.4.0.dist-info\RECORD
RECORD
Other
703
0.7
0
0
python-kit
156
2025-05-09T09:15:45.699363
BSD-3-Clause
false
e49259a7628159868e7f100f0724107b
aiosignal\n
.venv\Lib\site-packages\aiosignal-1.4.0.dist-info\top_level.txt
top_level.txt
Other
10
0.5
0
0
python-kit
202
2024-09-06T22:25:19.333686
MIT
false
f23d72ede8fa99a9f65f5004bd3f1f0a
Wheel-Version: 1.0\nGenerator: setuptools (80.9.0)\nRoot-Is-Purelib: true\nTag: py3-none-any\n\n
.venv\Lib\site-packages\aiosignal-1.4.0.dist-info\WHEEL
WHEEL
Other
91
0.5
0
0
python-kit
163
2025-02-04T14:42:17.784938
Apache-2.0
false
08dd01ac2afdbb287cc668d51c7056c8
Apache License\n Version 2.0, January 2004\n http://www.apache.org/licenses/\n\n TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n 1. Definitions.\n\n "License" shall mean the terms and conditions for use, reproduction,\n and distribution as defined by Sections 1 through 9 of this document.\n\n "Licensor" shall mean the copyright owner or entity authorized by\n the copyright owner that is granting the License.\n\n "Legal Entity" shall mean the union of the acting entity and all\n other entities that control, are controlled by, or are under common\n control with that entity. For the purposes of this definition,\n "control" means (i) the power, direct or indirect, to cause the\n direction or management of such entity, whether by contract or\n otherwise, or (ii) ownership of fifty percent (50%) or more of the\n outstanding shares, or (iii) beneficial ownership of such entity.\n\n "You" (or "Your") shall mean an individual or Legal Entity\n exercising permissions granted by this License.\n\n "Source" form shall mean the preferred form for making modifications,\n including but not limited to software source code, documentation\n source, and configuration files.\n\n "Object" form shall mean any form resulting from mechanical\n transformation or translation of a Source form, including but\n not limited to compiled object code, generated documentation,\n and conversions to other media types.\n\n "Work" shall mean the work of authorship, whether in Source or\n Object form, made available under the License, as indicated by a\n copyright notice that is included in or attached to the work\n (an example is provided in the Appendix below).\n\n "Derivative Works" shall mean any work, whether in Source or Object\n form, that is based on (or derived from) the Work and for which the\n editorial revisions, annotations, elaborations, or other modifications\n represent, as a whole, an original work of authorship. For the purposes\n of this License, Derivative Works shall not include works that remain\n separable from, or merely link (or bind by name) to the interfaces of,\n the Work and Derivative Works thereof.\n\n "Contribution" shall mean any work of authorship, including\n the original version of the Work and any modifications or additions\n to that Work or Derivative Works thereof, that is intentionally\n submitted to Licensor for inclusion in the Work by the copyright owner\n or by an individual or Legal Entity authorized to submit on behalf of\n the copyright owner. For the purposes of this definition, "submitted"\n means any form of electronic, verbal, or written communication sent\n to the Licensor or its representatives, including but not limited to\n communication on electronic mailing lists, source code control systems,\n and issue tracking systems that are managed by, or on behalf of, the\n Licensor for the purpose of discussing and improving the Work, but\n excluding communication that is conspicuously marked or otherwise\n designated in writing by the copyright owner as "Not a Contribution."\n\n "Contributor" shall mean Licensor and any individual or Legal Entity\n on behalf of whom a Contribution has been received by Licensor and\n subsequently incorporated within the Work.\n\n 2. Grant of Copyright License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n copyright license to reproduce, prepare Derivative Works of,\n publicly display, publicly perform, sublicense, and distribute the\n Work and such Derivative Works in Source or Object form.\n\n 3. Grant of Patent License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n (except as stated in this section) patent license to make, have made,\n use, offer to sell, sell, import, and otherwise transfer the Work,\n where such license applies only to those patent claims licensable\n by such Contributor that are necessarily infringed by their\n Contribution(s) alone or by combination of their Contribution(s)\n with the Work to which such Contribution(s) was submitted. If You\n institute patent litigation against any entity (including a\n cross-claim or counterclaim in a lawsuit) alleging that the Work\n or a Contribution incorporated within the Work constitutes direct\n or contributory patent infringement, then any patent licenses\n granted to You under this License for that Work shall terminate\n as of the date such litigation is filed.\n\n 4. Redistribution. You may reproduce and distribute copies of the\n Work or Derivative Works thereof in any medium, with or without\n modifications, and in Source or Object form, provided that You\n meet the following conditions:\n\n (a) You must give any other recipients of the Work or\n Derivative Works a copy of this License; and\n\n (b) You must cause any modified files to carry prominent notices\n stating that You changed the files; and\n\n (c) You must retain, in the Source form of any Derivative Works\n that You distribute, all copyright, patent, trademark, and\n attribution notices from the Source form of the Work,\n excluding those notices that do not pertain to any part of\n the Derivative Works; and\n\n (d) If the Work includes a "NOTICE" text file as part of its\n distribution, then any Derivative Works that You distribute must\n include a readable copy of the attribution notices contained\n within such NOTICE file, excluding those notices that do not\n pertain to any part of the Derivative Works, in at least one\n of the following places: within a NOTICE text file distributed\n as part of the Derivative Works; within the Source form or\n documentation, if provided along with the Derivative Works; or,\n within a display generated by the Derivative Works, if and\n wherever such third-party notices normally appear. The contents\n of the NOTICE file are for informational purposes only and\n do not modify the License. You may add Your own attribution\n notices within Derivative Works that You distribute, alongside\n or as an addendum to the NOTICE text from the Work, provided\n that such additional attribution notices cannot be construed\n as modifying the License.\n\n You may add Your own copyright statement to Your modifications and\n may provide additional or different license terms and conditions\n for use, reproduction, or distribution of Your modifications, or\n for any such Derivative Works as a whole, provided Your use,\n reproduction, and distribution of the Work otherwise complies with\n the conditions stated in this License.\n\n 5. Submission of Contributions. Unless You explicitly state otherwise,\n any Contribution intentionally submitted for inclusion in the Work\n by You to the Licensor shall be under the terms and conditions of\n this License, without any additional terms or conditions.\n Notwithstanding the above, nothing herein shall supersede or modify\n the terms of any separate license agreement you may have executed\n with Licensor regarding such Contributions.\n\n 6. Trademarks. This License does not grant permission to use the trade\n names, trademarks, service marks, or product names of the Licensor,\n except as required for reasonable and customary use in describing the\n origin of the Work and reproducing the content of the NOTICE file.\n\n 7. Disclaimer of Warranty. Unless required by applicable law or\n agreed to in writing, Licensor provides the Work (and each\n Contributor provides its Contributions) on an "AS IS" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n implied, including, without limitation, any warranties or conditions\n of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n PARTICULAR PURPOSE. You are solely responsible for determining the\n appropriateness of using or redistributing the Work and assume any\n risks associated with Your exercise of permissions under this License.\n\n 8. Limitation of Liability. In no event and under no legal theory,\n whether in tort (including negligence), contract, or otherwise,\n unless required by applicable law (such as deliberate and grossly\n negligent acts) or agreed to in writing, shall any Contributor be\n liable to You for damages, including any direct, indirect, special,\n incidental, or consequential damages of any character arising as a\n result of this License or out of the use or inability to use the\n Work (including but not limited to damages for loss of goodwill,\n work stoppage, computer failure or malfunction, or any and all\n other commercial damages or losses), even if such Contributor\n has been advised of the possibility of such damages.\n\n 9. Accepting Warranty or Additional Liability. While redistributing\n the Work or Derivative Works thereof, You may choose to offer,\n and charge a fee for, acceptance of support, warranty, indemnity,\n or other liability obligations and/or rights consistent with this\n License. However, in accepting such obligations, You may act only\n on Your own behalf and on Your sole responsibility, not on behalf\n of any other Contributor, and only if You agree to indemnify,\n defend, and hold each Contributor harmless for any liability\n incurred by, or claims asserted against, such Contributor by reason\n of your accepting any such warranty or additional liability.\n\n END OF TERMS AND CONDITIONS\n\n APPENDIX: How to apply the Apache License to your work.\n\n To apply the Apache License to your work, attach the following\n boilerplate notice, with the fields enclosed by brackets "{}"\n replaced with your own identifying information. (Don't include\n the brackets!) The text should be enclosed in the appropriate\n comment syntax for the file format. We also recommend that a\n file or class name and description of purpose be included on the\n same "printed page" as the copyright notice for easier\n identification within third-party archives.\n\n Copyright 2013-2019 Nikolay Kim and Andrew Svetlov\n\n Licensed under the Apache License, Version 2.0 (the "License");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an "AS IS" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n
.venv\Lib\site-packages\aiosignal-1.4.0.dist-info\licenses\LICENSE
LICENSE
Other
11,332
0.95
0.119403
0
react-lib
126
2024-04-13T21:35:14.762217
Apache-2.0
false
cf056e8e7a0a5477451af18b7b5aa98c