content
stringlengths 1
103k
⌀ | path
stringlengths 8
216
| filename
stringlengths 2
179
| language
stringclasses 15
values | size_bytes
int64 2
189k
| quality_score
float64 0.5
0.95
| complexity
float64 0
1
| documentation_ratio
float64 0
1
| repository
stringclasses 5
values | stars
int64 0
1k
| created_date
stringdate 2023-07-10 19:21:08
2025-07-09 19:11:45
| license
stringclasses 4
values | is_test
bool 2
classes | file_hash
stringlengths 32
32
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
_argon2_cffi_bindings/__init__.py,sha256=Gm_s_NmP0nyKhjkqPk-5gq6aluEEO6YziCY8cZzs30k,92\n_argon2_cffi_bindings/__pycache__/__init__.cpython-313.pyc,,\n_argon2_cffi_bindings/__pycache__/_ffi_build.cpython-313.pyc,,\n_argon2_cffi_bindings/_ffi.pyd,sha256=LHT5PPJFHMvV7q8LTQd26yhWzVilzHJ5AxvmpiqD568,53248\n_argon2_cffi_bindings/_ffi_build.py,sha256=GWyfVuQ-48rPhhdo2rjDc_tr1QYsRo2fPTqYzr_5xSk,6316\nargon2_cffi_bindings-21.2.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4\nargon2_cffi_bindings-21.2.0.dist-info/LICENSE,sha256=WrxzxDB5_M_mxNxxCvLftKWaZTmxQ0I29u-MdbAVDWE,1103\nargon2_cffi_bindings-21.2.0.dist-info/METADATA,sha256=Zoe40TTiS4EwkPh7nbOWc1pHzu6HMeQCGTjsV17A_Mk,6705\nargon2_cffi_bindings-21.2.0.dist-info/RECORD,,\nargon2_cffi_bindings-21.2.0.dist-info/WHEEL,sha256=g0Gp_9X_YiwOuaF1hZMQNaS1qKUg2WIXRJjlZWggGSw,100\nargon2_cffi_bindings-21.2.0.dist-info/top_level.txt,sha256=WyRJzxvIv58cyvTye2AsVz50Lw0hDxUYBuCH1rUb_tg,27\n | .venv\Lib\site-packages\argon2_cffi_bindings-21.2.0.dist-info\RECORD | RECORD | Other | 961 | 0.7 | 0 | 0 | vue-tools | 921 | 2023-08-04T03:21:34.001834 | BSD-3-Clause | false | 37299219b3a55aae865ac545b1992f27 |
_argon2_cffi_bindings\n_ffi\n | .venv\Lib\site-packages\argon2_cffi_bindings-21.2.0.dist-info\top_level.txt | top_level.txt | Other | 27 | 0.5 | 0 | 0 | python-kit | 799 | 2023-08-06T06:11:25.781631 | Apache-2.0 | false | 66eca871f9412fae4ecf69ccbb621aef |
Wheel-Version: 1.0\nGenerator: bdist_wheel (0.37.0)\nRoot-Is-Purelib: false\nTag: cp36-abi3-win_amd64\n\n | .venv\Lib\site-packages\argon2_cffi_bindings-21.2.0.dist-info\WHEEL | WHEEL | Other | 100 | 0.7 | 0 | 0 | vue-tools | 577 | 2023-08-04T15:16:10.589262 | GPL-3.0 | false | 81473bb8dd3c8c2fab84df8d7fe8e9fb |
"""\nProvides the default implementation of :class:`ArrowFactory <arrow.factory.ArrowFactory>`\nmethods for use as a module API.\n\n"""\n\nfrom datetime import date, datetime\nfrom datetime import tzinfo as dt_tzinfo\nfrom time import struct_time\nfrom typing import Any, List, Optional, Tuple, Type, Union, overload\n\nfrom arrow.arrow import TZ_EXPR, Arrow\nfrom arrow.constants import DEFAULT_LOCALE\nfrom arrow.factory import ArrowFactory\n\n# internal default factory.\n_factory = ArrowFactory()\n\n# TODO: Use Positional Only Argument (https://www.python.org/dev/peps/pep-0570/)\n# after Python 3.7 deprecation\n\n\n@overload\ndef get(\n *,\n locale: str = DEFAULT_LOCALE,\n tzinfo: Optional[TZ_EXPR] = None,\n normalize_whitespace: bool = False,\n) -> Arrow:\n ... # pragma: no cover\n\n\n@overload\ndef get(\n *args: int,\n locale: str = DEFAULT_LOCALE,\n tzinfo: Optional[TZ_EXPR] = None,\n normalize_whitespace: bool = False,\n) -> Arrow:\n ... # pragma: no cover\n\n\n@overload\ndef get(\n __obj: Union[\n Arrow,\n datetime,\n date,\n struct_time,\n dt_tzinfo,\n int,\n float,\n str,\n Tuple[int, int, int],\n ],\n *,\n locale: str = DEFAULT_LOCALE,\n tzinfo: Optional[TZ_EXPR] = None,\n normalize_whitespace: bool = False,\n) -> Arrow:\n ... # pragma: no cover\n\n\n@overload\ndef get(\n __arg1: Union[datetime, date],\n __arg2: TZ_EXPR,\n *,\n locale: str = DEFAULT_LOCALE,\n tzinfo: Optional[TZ_EXPR] = None,\n normalize_whitespace: bool = False,\n) -> Arrow:\n ... # pragma: no cover\n\n\n@overload\ndef get(\n __arg1: str,\n __arg2: Union[str, List[str]],\n *,\n locale: str = DEFAULT_LOCALE,\n tzinfo: Optional[TZ_EXPR] = None,\n normalize_whitespace: bool = False,\n) -> Arrow:\n ... # pragma: no cover\n\n\ndef get(*args: Any, **kwargs: Any) -> Arrow:\n """Calls the default :class:`ArrowFactory <arrow.factory.ArrowFactory>` ``get`` method."""\n\n return _factory.get(*args, **kwargs)\n\n\nget.__doc__ = _factory.get.__doc__\n\n\ndef utcnow() -> Arrow:\n """Calls the default :class:`ArrowFactory <arrow.factory.ArrowFactory>` ``utcnow`` method."""\n\n return _factory.utcnow()\n\n\nutcnow.__doc__ = _factory.utcnow.__doc__\n\n\ndef now(tz: Optional[TZ_EXPR] = None) -> Arrow:\n """Calls the default :class:`ArrowFactory <arrow.factory.ArrowFactory>` ``now`` method."""\n\n return _factory.now(tz)\n\n\nnow.__doc__ = _factory.now.__doc__\n\n\ndef factory(type: Type[Arrow]) -> ArrowFactory:\n """Returns an :class:`.ArrowFactory` for the specified :class:`Arrow <arrow.arrow.Arrow>`\n or derived type.\n\n :param type: the type, :class:`Arrow <arrow.arrow.Arrow>` or derived.\n\n """\n\n return ArrowFactory(type)\n\n\n__all__ = ["get", "utcnow", "now", "factory"]\n | .venv\Lib\site-packages\arrow\api.py | api.py | Python | 2,755 | 0.95 | 0.142857 | 0.089888 | python-kit | 141 | 2024-12-30T10:38:20.921188 | GPL-3.0 | false | e52f968787adbae9f01bb313485ec309 |
"""\nImplements the :class:`ArrowFactory <arrow.factory.ArrowFactory>` class,\nproviding factory methods for common :class:`Arrow <arrow.arrow.Arrow>`\nconstruction scenarios.\n\n"""\n\n\nimport calendar\nfrom datetime import date, datetime\nfrom datetime import tzinfo as dt_tzinfo\nfrom decimal import Decimal\nfrom time import struct_time\nfrom typing import Any, List, Optional, Tuple, Type, Union, overload\n\nfrom dateutil import tz as dateutil_tz\n\nfrom arrow import parser\nfrom arrow.arrow import TZ_EXPR, Arrow\nfrom arrow.constants import DEFAULT_LOCALE\nfrom arrow.util import is_timestamp, iso_to_gregorian\n\n\nclass ArrowFactory:\n """A factory for generating :class:`Arrow <arrow.arrow.Arrow>` objects.\n\n :param type: (optional) the :class:`Arrow <arrow.arrow.Arrow>`-based class to construct from.\n Defaults to :class:`Arrow <arrow.arrow.Arrow>`.\n\n """\n\n type: Type[Arrow]\n\n def __init__(self, type: Type[Arrow] = Arrow) -> None:\n self.type = type\n\n @overload\n def get(\n self,\n *,\n locale: str = DEFAULT_LOCALE,\n tzinfo: Optional[TZ_EXPR] = None,\n normalize_whitespace: bool = False,\n ) -> Arrow:\n ... # pragma: no cover\n\n @overload\n def get(\n self,\n __obj: Union[\n Arrow,\n datetime,\n date,\n struct_time,\n dt_tzinfo,\n int,\n float,\n str,\n Tuple[int, int, int],\n ],\n *,\n locale: str = DEFAULT_LOCALE,\n tzinfo: Optional[TZ_EXPR] = None,\n normalize_whitespace: bool = False,\n ) -> Arrow:\n ... # pragma: no cover\n\n @overload\n def get(\n self,\n __arg1: Union[datetime, date],\n __arg2: TZ_EXPR,\n *,\n locale: str = DEFAULT_LOCALE,\n tzinfo: Optional[TZ_EXPR] = None,\n normalize_whitespace: bool = False,\n ) -> Arrow:\n ... # pragma: no cover\n\n @overload\n def get(\n self,\n __arg1: str,\n __arg2: Union[str, List[str]],\n *,\n locale: str = DEFAULT_LOCALE,\n tzinfo: Optional[TZ_EXPR] = None,\n normalize_whitespace: bool = False,\n ) -> Arrow:\n ... # pragma: no cover\n\n def get(self, *args: Any, **kwargs: Any) -> Arrow:\n """Returns an :class:`Arrow <arrow.arrow.Arrow>` object based on flexible inputs.\n\n :param locale: (optional) a ``str`` specifying a locale for the parser. Defaults to 'en-us'.\n :param tzinfo: (optional) a :ref:`timezone expression <tz-expr>` or tzinfo object.\n Replaces the timezone unless using an input form that is explicitly UTC or specifies\n the timezone in a positional argument. Defaults to UTC.\n :param normalize_whitespace: (optional) a ``bool`` specifying whether or not to normalize\n redundant whitespace (spaces, tabs, and newlines) in a datetime string before parsing.\n Defaults to false.\n\n Usage::\n\n >>> import arrow\n\n **No inputs** to get current UTC time::\n\n >>> arrow.get()\n <Arrow [2013-05-08T05:51:43.316458+00:00]>\n\n **One** :class:`Arrow <arrow.arrow.Arrow>` object, to get a copy.\n\n >>> arw = arrow.utcnow()\n >>> arrow.get(arw)\n <Arrow [2013-10-23T15:21:54.354846+00:00]>\n\n **One** ``float`` or ``int``, convertible to a floating-point timestamp, to get\n that timestamp in UTC::\n\n >>> arrow.get(1367992474.293378)\n <Arrow [2013-05-08T05:54:34.293378+00:00]>\n\n >>> arrow.get(1367992474)\n <Arrow [2013-05-08T05:54:34+00:00]>\n\n **One** ISO 8601-formatted ``str``, to parse it::\n\n >>> arrow.get('2013-09-29T01:26:43.830580')\n <Arrow [2013-09-29T01:26:43.830580+00:00]>\n\n **One** ISO 8601-formatted ``str``, in basic format, to parse it::\n\n >>> arrow.get('20160413T133656.456289')\n <Arrow [2016-04-13T13:36:56.456289+00:00]>\n\n **One** ``tzinfo``, to get the current time **converted** to that timezone::\n\n >>> arrow.get(tz.tzlocal())\n <Arrow [2013-05-07T22:57:28.484717-07:00]>\n\n **One** naive ``datetime``, to get that datetime in UTC::\n\n >>> arrow.get(datetime(2013, 5, 5))\n <Arrow [2013-05-05T00:00:00+00:00]>\n\n **One** aware ``datetime``, to get that datetime::\n\n >>> arrow.get(datetime(2013, 5, 5, tzinfo=tz.tzlocal()))\n <Arrow [2013-05-05T00:00:00-07:00]>\n\n **One** naive ``date``, to get that date in UTC::\n\n >>> arrow.get(date(2013, 5, 5))\n <Arrow [2013-05-05T00:00:00+00:00]>\n\n **One** time.struct time::\n\n >>> arrow.get(gmtime(0))\n <Arrow [1970-01-01T00:00:00+00:00]>\n\n **One** iso calendar ``tuple``, to get that week date in UTC::\n\n >>> arrow.get((2013, 18, 7))\n <Arrow [2013-05-05T00:00:00+00:00]>\n\n **Two** arguments, a naive or aware ``datetime``, and a replacement\n :ref:`timezone expression <tz-expr>`::\n\n >>> arrow.get(datetime(2013, 5, 5), 'US/Pacific')\n <Arrow [2013-05-05T00:00:00-07:00]>\n\n **Two** arguments, a naive ``date``, and a replacement\n :ref:`timezone expression <tz-expr>`::\n\n >>> arrow.get(date(2013, 5, 5), 'US/Pacific')\n <Arrow [2013-05-05T00:00:00-07:00]>\n\n **Two** arguments, both ``str``, to parse the first according to the format of the second::\n\n >>> arrow.get('2013-05-05 12:30:45 America/Chicago', 'YYYY-MM-DD HH:mm:ss ZZZ')\n <Arrow [2013-05-05T12:30:45-05:00]>\n\n **Two** arguments, first a ``str`` to parse and second a ``list`` of formats to try::\n\n >>> arrow.get('2013-05-05 12:30:45', ['MM/DD/YYYY', 'YYYY-MM-DD HH:mm:ss'])\n <Arrow [2013-05-05T12:30:45+00:00]>\n\n **Three or more** arguments, as for the direct constructor of an ``Arrow`` object::\n\n >>> arrow.get(2013, 5, 5, 12, 30, 45)\n <Arrow [2013-05-05T12:30:45+00:00]>\n\n """\n\n arg_count = len(args)\n locale = kwargs.pop("locale", DEFAULT_LOCALE)\n tz = kwargs.get("tzinfo", None)\n normalize_whitespace = kwargs.pop("normalize_whitespace", False)\n\n # if kwargs given, send to constructor unless only tzinfo provided\n if len(kwargs) > 1:\n arg_count = 3\n\n # tzinfo kwarg is not provided\n if len(kwargs) == 1 and tz is None:\n arg_count = 3\n\n # () -> now, @ tzinfo or utc\n if arg_count == 0:\n if isinstance(tz, str):\n tz = parser.TzinfoParser.parse(tz)\n return self.type.now(tzinfo=tz)\n\n if isinstance(tz, dt_tzinfo):\n return self.type.now(tzinfo=tz)\n\n return self.type.utcnow()\n\n if arg_count == 1:\n arg = args[0]\n if isinstance(arg, Decimal):\n arg = float(arg)\n\n # (None) -> raises an exception\n if arg is None:\n raise TypeError("Cannot parse argument of type None.")\n\n # try (int, float) -> from timestamp @ tzinfo\n elif not isinstance(arg, str) and is_timestamp(arg):\n if tz is None:\n # set to UTC by default\n tz = dateutil_tz.tzutc()\n return self.type.fromtimestamp(arg, tzinfo=tz)\n\n # (Arrow) -> from the object's datetime @ tzinfo\n elif isinstance(arg, Arrow):\n return self.type.fromdatetime(arg.datetime, tzinfo=tz)\n\n # (datetime) -> from datetime @ tzinfo\n elif isinstance(arg, datetime):\n return self.type.fromdatetime(arg, tzinfo=tz)\n\n # (date) -> from date @ tzinfo\n elif isinstance(arg, date):\n return self.type.fromdate(arg, tzinfo=tz)\n\n # (tzinfo) -> now @ tzinfo\n elif isinstance(arg, dt_tzinfo):\n return self.type.now(tzinfo=arg)\n\n # (str) -> parse @ tzinfo\n elif isinstance(arg, str):\n dt = parser.DateTimeParser(locale).parse_iso(arg, normalize_whitespace)\n return self.type.fromdatetime(dt, tzinfo=tz)\n\n # (struct_time) -> from struct_time\n elif isinstance(arg, struct_time):\n return self.type.utcfromtimestamp(calendar.timegm(arg))\n\n # (iso calendar) -> convert then from date @ tzinfo\n elif isinstance(arg, tuple) and len(arg) == 3:\n d = iso_to_gregorian(*arg)\n return self.type.fromdate(d, tzinfo=tz)\n\n else:\n raise TypeError(f"Cannot parse single argument of type {type(arg)!r}.")\n\n elif arg_count == 2:\n arg_1, arg_2 = args[0], args[1]\n\n if isinstance(arg_1, datetime):\n # (datetime, tzinfo/str) -> fromdatetime @ tzinfo\n if isinstance(arg_2, (dt_tzinfo, str)):\n return self.type.fromdatetime(arg_1, tzinfo=arg_2)\n else:\n raise TypeError(\n f"Cannot parse two arguments of types 'datetime', {type(arg_2)!r}."\n )\n\n elif isinstance(arg_1, date):\n # (date, tzinfo/str) -> fromdate @ tzinfo\n if isinstance(arg_2, (dt_tzinfo, str)):\n return self.type.fromdate(arg_1, tzinfo=arg_2)\n else:\n raise TypeError(\n f"Cannot parse two arguments of types 'date', {type(arg_2)!r}."\n )\n\n # (str, format) -> parse @ tzinfo\n elif isinstance(arg_1, str) and isinstance(arg_2, (str, list)):\n dt = parser.DateTimeParser(locale).parse(\n args[0], args[1], normalize_whitespace\n )\n return self.type.fromdatetime(dt, tzinfo=tz)\n\n else:\n raise TypeError(\n f"Cannot parse two arguments of types {type(arg_1)!r} and {type(arg_2)!r}."\n )\n\n # 3+ args -> datetime-like via constructor\n else:\n return self.type(*args, **kwargs)\n\n def utcnow(self) -> Arrow:\n """Returns an :class:`Arrow <arrow.arrow.Arrow>` object, representing "now" in UTC time.\n\n Usage::\n\n >>> import arrow\n >>> arrow.utcnow()\n <Arrow [2013-05-08T05:19:07.018993+00:00]>\n """\n\n return self.type.utcnow()\n\n def now(self, tz: Optional[TZ_EXPR] = None) -> Arrow:\n """Returns an :class:`Arrow <arrow.arrow.Arrow>` object, representing "now" in the given\n timezone.\n\n :param tz: (optional) A :ref:`timezone expression <tz-expr>`. Defaults to local time.\n\n Usage::\n\n >>> import arrow\n >>> arrow.now()\n <Arrow [2013-05-07T22:19:11.363410-07:00]>\n\n >>> arrow.now('US/Pacific')\n <Arrow [2013-05-07T22:19:15.251821-07:00]>\n\n >>> arrow.now('+02:00')\n <Arrow [2013-05-08T07:19:25.618646+02:00]>\n\n >>> arrow.now('local')\n <Arrow [2013-05-07T22:19:39.130059-07:00]>\n """\n\n if tz is None:\n tz = dateutil_tz.tzlocal()\n elif not isinstance(tz, dt_tzinfo):\n tz = parser.TzinfoParser.parse(tz)\n\n return self.type.now(tz)\n | .venv\Lib\site-packages\arrow\factory.py | factory.py | Python | 11,432 | 0.95 | 0.115942 | 0.144531 | react-lib | 620 | 2023-09-27T12:36:59.212620 | MIT | false | e2f04a056b39754fbcda68633da8ad69 |
"""Provides the :class:`Arrow <arrow.formatter.DateTimeFormatter>` class, an improved formatter for datetimes."""\n\nimport re\nimport sys\nfrom datetime import datetime, timedelta\nfrom typing import Optional, Pattern, cast\n\nfrom dateutil import tz as dateutil_tz\n\nfrom arrow import locales\nfrom arrow.constants import DEFAULT_LOCALE\n\nif sys.version_info < (3, 8): # pragma: no cover\n from typing_extensions import Final\nelse:\n from typing import Final # pragma: no cover\n\n\nFORMAT_ATOM: Final[str] = "YYYY-MM-DD HH:mm:ssZZ"\nFORMAT_COOKIE: Final[str] = "dddd, DD-MMM-YYYY HH:mm:ss ZZZ"\nFORMAT_RFC822: Final[str] = "ddd, DD MMM YY HH:mm:ss Z"\nFORMAT_RFC850: Final[str] = "dddd, DD-MMM-YY HH:mm:ss ZZZ"\nFORMAT_RFC1036: Final[str] = "ddd, DD MMM YY HH:mm:ss Z"\nFORMAT_RFC1123: Final[str] = "ddd, DD MMM YYYY HH:mm:ss Z"\nFORMAT_RFC2822: Final[str] = "ddd, DD MMM YYYY HH:mm:ss Z"\nFORMAT_RFC3339: Final[str] = "YYYY-MM-DD HH:mm:ssZZ"\nFORMAT_RSS: Final[str] = "ddd, DD MMM YYYY HH:mm:ss Z"\nFORMAT_W3C: Final[str] = "YYYY-MM-DD HH:mm:ssZZ"\n\n\nclass DateTimeFormatter:\n # This pattern matches characters enclosed in square brackets are matched as\n # an atomic group. For more info on atomic groups and how to they are\n # emulated in Python's re library, see https://stackoverflow.com/a/13577411/2701578\n\n _FORMAT_RE: Final[Pattern[str]] = re.compile(\n r"(\[(?:(?=(?P<literal>[^]]))(?P=literal))*\]|YYY?Y?|MM?M?M?|Do|DD?D?D?|d?dd?d?|HH?|hh?|mm?|ss?|SS?S?S?S?S?|ZZ?Z?|a|A|X|x|W)"\n )\n\n locale: locales.Locale\n\n def __init__(self, locale: str = DEFAULT_LOCALE) -> None:\n self.locale = locales.get_locale(locale)\n\n def format(cls, dt: datetime, fmt: str) -> str:\n # FIXME: _format_token() is nullable\n return cls._FORMAT_RE.sub(\n lambda m: cast(str, cls._format_token(dt, m.group(0))), fmt\n )\n\n def _format_token(self, dt: datetime, token: Optional[str]) -> Optional[str]:\n if token and token.startswith("[") and token.endswith("]"):\n return token[1:-1]\n\n if token == "YYYY":\n return self.locale.year_full(dt.year)\n if token == "YY":\n return self.locale.year_abbreviation(dt.year)\n\n if token == "MMMM":\n return self.locale.month_name(dt.month)\n if token == "MMM":\n return self.locale.month_abbreviation(dt.month)\n if token == "MM":\n return f"{dt.month:02d}"\n if token == "M":\n return f"{dt.month}"\n\n if token == "DDDD":\n return f"{dt.timetuple().tm_yday:03d}"\n if token == "DDD":\n return f"{dt.timetuple().tm_yday}"\n if token == "DD":\n return f"{dt.day:02d}"\n if token == "D":\n return f"{dt.day}"\n\n if token == "Do":\n return self.locale.ordinal_number(dt.day)\n\n if token == "dddd":\n return self.locale.day_name(dt.isoweekday())\n if token == "ddd":\n return self.locale.day_abbreviation(dt.isoweekday())\n if token == "d":\n return f"{dt.isoweekday()}"\n\n if token == "HH":\n return f"{dt.hour:02d}"\n if token == "H":\n return f"{dt.hour}"\n if token == "hh":\n return f"{dt.hour if 0 < dt.hour < 13 else abs(dt.hour - 12):02d}"\n if token == "h":\n return f"{dt.hour if 0 < dt.hour < 13 else abs(dt.hour - 12)}"\n\n if token == "mm":\n return f"{dt.minute:02d}"\n if token == "m":\n return f"{dt.minute}"\n\n if token == "ss":\n return f"{dt.second:02d}"\n if token == "s":\n return f"{dt.second}"\n\n if token == "SSSSSS":\n return f"{dt.microsecond:06d}"\n if token == "SSSSS":\n return f"{dt.microsecond // 10:05d}"\n if token == "SSSS":\n return f"{dt.microsecond // 100:04d}"\n if token == "SSS":\n return f"{dt.microsecond // 1000:03d}"\n if token == "SS":\n return f"{dt.microsecond // 10000:02d}"\n if token == "S":\n return f"{dt.microsecond // 100000}"\n\n if token == "X":\n return f"{dt.timestamp()}"\n\n if token == "x":\n return f"{dt.timestamp() * 1_000_000:.0f}"\n\n if token == "ZZZ":\n return dt.tzname()\n\n if token in ["ZZ", "Z"]:\n separator = ":" if token == "ZZ" else ""\n tz = dateutil_tz.tzutc() if dt.tzinfo is None else dt.tzinfo\n # `dt` must be aware object. Otherwise, this line will raise AttributeError\n # https://github.com/arrow-py/arrow/pull/883#discussion_r529866834\n # datetime awareness: https://docs.python.org/3/library/datetime.html#aware-and-naive-objects\n total_minutes = int(cast(timedelta, tz.utcoffset(dt)).total_seconds() / 60)\n\n sign = "+" if total_minutes >= 0 else "-"\n total_minutes = abs(total_minutes)\n hour, minute = divmod(total_minutes, 60)\n\n return f"{sign}{hour:02d}{separator}{minute:02d}"\n\n if token in ("a", "A"):\n return self.locale.meridian(dt.hour, token)\n\n if token == "W":\n year, week, day = dt.isocalendar()\n return f"{year}-W{week:02d}-{day}"\n | .venv\Lib\site-packages\arrow\formatter.py | formatter.py | Python | 5,267 | 0.95 | 0.324324 | 0.059322 | react-lib | 194 | 2025-02-14T04:49:25.025155 | Apache-2.0 | false | 3c464ad5e11bfeee09471ef9af5f4738 |
"""Helpful functions used internally within arrow."""\n\nimport datetime\nfrom typing import Any, Optional, cast\n\nfrom dateutil.rrule import WEEKLY, rrule\n\nfrom arrow.constants import (\n MAX_ORDINAL,\n MAX_TIMESTAMP,\n MAX_TIMESTAMP_MS,\n MAX_TIMESTAMP_US,\n MIN_ORDINAL,\n)\n\n\ndef next_weekday(\n start_date: Optional[datetime.date], weekday: int\n) -> datetime.datetime:\n """Get next weekday from the specified start date.\n\n :param start_date: Datetime object representing the start date.\n :param weekday: Next weekday to obtain. Can be a value between 0 (Monday) and 6 (Sunday).\n :return: Datetime object corresponding to the next weekday after start_date.\n\n Usage::\n\n # Get first Monday after epoch\n >>> next_weekday(datetime(1970, 1, 1), 0)\n 1970-01-05 00:00:00\n\n # Get first Thursday after epoch\n >>> next_weekday(datetime(1970, 1, 1), 3)\n 1970-01-01 00:00:00\n\n # Get first Sunday after epoch\n >>> next_weekday(datetime(1970, 1, 1), 6)\n 1970-01-04 00:00:00\n """\n if weekday < 0 or weekday > 6:\n raise ValueError("Weekday must be between 0 (Monday) and 6 (Sunday).")\n return cast(\n datetime.datetime,\n rrule(freq=WEEKLY, dtstart=start_date, byweekday=weekday, count=1)[0],\n )\n\n\ndef is_timestamp(value: Any) -> bool:\n """Check if value is a valid timestamp."""\n if isinstance(value, bool):\n return False\n if not isinstance(value, (int, float, str)):\n return False\n try:\n float(value)\n return True\n except ValueError:\n return False\n\n\ndef validate_ordinal(value: Any) -> None:\n """Raise an exception if value is an invalid Gregorian ordinal.\n\n :param value: the input to be checked\n\n """\n if isinstance(value, bool) or not isinstance(value, int):\n raise TypeError(f"Ordinal must be an integer (got type {type(value)}).")\n if not (MIN_ORDINAL <= value <= MAX_ORDINAL):\n raise ValueError(f"Ordinal {value} is out of range.")\n\n\ndef normalize_timestamp(timestamp: float) -> float:\n """Normalize millisecond and microsecond timestamps into normal timestamps."""\n if timestamp > MAX_TIMESTAMP:\n if timestamp < MAX_TIMESTAMP_MS:\n timestamp /= 1000\n elif timestamp < MAX_TIMESTAMP_US:\n timestamp /= 1_000_000\n else:\n raise ValueError(f"The specified timestamp {timestamp!r} is too large.")\n return timestamp\n\n\n# Credit to https://stackoverflow.com/a/1700069\ndef iso_to_gregorian(iso_year: int, iso_week: int, iso_day: int) -> datetime.date:\n """Converts an ISO week date into a datetime object.\n\n :param iso_year: the year\n :param iso_week: the week number, each year has either 52 or 53 weeks\n :param iso_day: the day numbered 1 through 7, beginning with Monday\n\n """\n\n if not 1 <= iso_week <= 53:\n raise ValueError("ISO Calendar week value must be between 1-53.")\n\n if not 1 <= iso_day <= 7:\n raise ValueError("ISO Calendar day value must be between 1-7")\n\n # The first week of the year always contains 4 Jan.\n fourth_jan = datetime.date(iso_year, 1, 4)\n delta = datetime.timedelta(fourth_jan.isoweekday() - 1)\n year_start = fourth_jan - delta\n gregorian = year_start + datetime.timedelta(days=iso_day - 1, weeks=iso_week - 1)\n\n return gregorian\n\n\ndef validate_bounds(bounds: str) -> None:\n if bounds != "()" and bounds != "(]" and bounds != "[)" and bounds != "[]":\n raise ValueError(\n "Invalid bounds. Please select between '()', '(]', '[)', or '[]'."\n )\n\n\n__all__ = ["next_weekday", "is_timestamp", "validate_ordinal", "iso_to_gregorian"]\n | .venv\Lib\site-packages\arrow\util.py | util.py | Python | 3,679 | 0.95 | 0.162393 | 0.057471 | node-utils | 773 | 2025-01-30T13:49:16.603605 | Apache-2.0 | false | 3960cac2a8fcf79b00bd4fd38249e82e |
__version__ = "1.3.0"\n | .venv\Lib\site-packages\arrow\_version.py | _version.py | Python | 22 | 0.5 | 0 | 0 | vue-tools | 361 | 2024-02-14T06:28:39.748153 | MIT | false | af06e7adc305e1d0a7751f604a3b1a99 |
\n\n | .venv\Lib\site-packages\arrow\__pycache__\api.cpython-313.pyc | api.cpython-313.pyc | Other | 3,729 | 0.8 | 0.125 | 0 | node-utils | 276 | 2023-12-22T22:12:17.114748 | GPL-3.0 | false | 4ef3fef3e38da0e67aedcc7b59adca9f |
\n\n | .venv\Lib\site-packages\arrow\__pycache__\arrow.cpython-313.pyc | arrow.cpython-313.pyc | Other | 68,704 | 0.75 | 0.051591 | 0.011653 | node-utils | 456 | 2024-10-19T06:04:44.359324 | MIT | false | 1622dd6107bff4756f7fa626db573128 |
\n\n | .venv\Lib\site-packages\arrow\__pycache__\constants.cpython-313.pyc | constants.cpython-313.pyc | Other | 2,223 | 0.8 | 0 | 0 | python-kit | 483 | 2024-09-17T04:16:22.760067 | MIT | false | 021856f3739e00d296a94d961bf5ec52 |
\n\n | .venv\Lib\site-packages\arrow\__pycache__\factory.cpython-313.pyc | factory.cpython-313.pyc | Other | 12,030 | 0.95 | 0.078818 | 0.103896 | vue-tools | 510 | 2023-08-06T19:05:44.341236 | BSD-3-Clause | false | 7070e639966a98b0dcad21b222f75c47 |
\n\n | .venv\Lib\site-packages\arrow\__pycache__\formatter.cpython-313.pyc | formatter.cpython-313.pyc | Other | 7,402 | 0.8 | 0.037037 | 0 | react-lib | 956 | 2025-02-20T20:02:52.802700 | GPL-3.0 | false | 739787940a5c480446fa29e85d377176 |
\n\n | .venv\Lib\site-packages\arrow\__pycache__\parser.cpython-313.pyc | parser.cpython-313.pyc | Other | 25,101 | 0.95 | 0.014337 | 0 | python-kit | 257 | 2025-01-16T07:19:49.904281 | GPL-3.0 | false | 3c82e284db5acc7cd958e9b53ff1a0b6 |
\n\n | .venv\Lib\site-packages\arrow\__pycache__\util.cpython-313.pyc | util.cpython-313.pyc | Other | 4,897 | 0.95 | 0.026667 | 0.046154 | python-kit | 295 | 2024-07-26T05:16:07.002871 | Apache-2.0 | false | 6edaf206b4b22eed1891a3cce8f8d969 |
\n\n | .venv\Lib\site-packages\arrow\__pycache__\_version.cpython-313.pyc | _version.cpython-313.pyc | Other | 206 | 0.7 | 0 | 0 | python-kit | 703 | 2024-07-04T02:33:15.245834 | GPL-3.0 | false | 52829bfcd6358a6664b4376ce1ddc564 |
\n\n | .venv\Lib\site-packages\arrow\__pycache__\__init__.cpython-313.pyc | __init__.cpython-313.pyc | Other | 802 | 0.7 | 0 | 0 | node-utils | 1,000 | 2023-08-02T02:42:58.844873 | Apache-2.0 | false | c8371e80ac176a868a9fa2e47de7468b |
pip\n | .venv\Lib\site-packages\arrow-1.3.0.dist-info\INSTALLER | INSTALLER | Other | 4 | 0.5 | 0 | 0 | node-utils | 277 | 2023-07-14T06:57:31.863845 | MIT | false | 365c9bfeb7d89244f2ce01c1de44cb85 |
Apache License\n Version 2.0, January 2004\n http://www.apache.org/licenses/\n\n TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n 1. Definitions.\n\n "License" shall mean the terms and conditions for use, reproduction,\n and distribution as defined by Sections 1 through 9 of this document.\n\n "Licensor" shall mean the copyright owner or entity authorized by\n the copyright owner that is granting the License.\n\n "Legal Entity" shall mean the union of the acting entity and all\n other entities that control, are controlled by, or are under common\n control with that entity. For the purposes of this definition,\n "control" means (i) the power, direct or indirect, to cause the\n direction or management of such entity, whether by contract or\n otherwise, or (ii) ownership of fifty percent (50%) or more of the\n outstanding shares, or (iii) beneficial ownership of such entity.\n\n "You" (or "Your") shall mean an individual or Legal Entity\n exercising permissions granted by this License.\n\n "Source" form shall mean the preferred form for making modifications,\n including but not limited to software source code, documentation\n source, and configuration files.\n\n "Object" form shall mean any form resulting from mechanical\n transformation or translation of a Source form, including but\n not limited to compiled object code, generated documentation,\n and conversions to other media types.\n\n "Work" shall mean the work of authorship, whether in Source or\n Object form, made available under the License, as indicated by a\n copyright notice that is included in or attached to the work\n (an example is provided in the Appendix below).\n\n "Derivative Works" shall mean any work, whether in Source or Object\n form, that is based on (or derived from) the Work and for which the\n editorial revisions, annotations, elaborations, or other modifications\n represent, as a whole, an original work of authorship. For the purposes\n of this License, Derivative Works shall not include works that remain\n separable from, or merely link (or bind by name) to the interfaces of,\n the Work and Derivative Works thereof.\n\n "Contribution" shall mean any work of authorship, including\n the original version of the Work and any modifications or additions\n to that Work or Derivative Works thereof, that is intentionally\n submitted to Licensor for inclusion in the Work by the copyright owner\n or by an individual or Legal Entity authorized to submit on behalf of\n the copyright owner. For the purposes of this definition, "submitted"\n means any form of electronic, verbal, or written communication sent\n to the Licensor or its representatives, including but not limited to\n communication on electronic mailing lists, source code control systems,\n and issue tracking systems that are managed by, or on behalf of, the\n Licensor for the purpose of discussing and improving the Work, but\n excluding communication that is conspicuously marked or otherwise\n designated in writing by the copyright owner as "Not a Contribution."\n\n "Contributor" shall mean Licensor and any individual or Legal Entity\n on behalf of whom a Contribution has been received by Licensor and\n subsequently incorporated within the Work.\n\n 2. Grant of Copyright License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n copyright license to reproduce, prepare Derivative Works of,\n publicly display, publicly perform, sublicense, and distribute the\n Work and such Derivative Works in Source or Object form.\n\n 3. Grant of Patent License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n (except as stated in this section) patent license to make, have made,\n use, offer to sell, sell, import, and otherwise transfer the Work,\n where such license applies only to those patent claims licensable\n by such Contributor that are necessarily infringed by their\n Contribution(s) alone or by combination of their Contribution(s)\n with the Work to which such Contribution(s) was submitted. If You\n institute patent litigation against any entity (including a\n cross-claim or counterclaim in a lawsuit) alleging that the Work\n or a Contribution incorporated within the Work constitutes direct\n or contributory patent infringement, then any patent licenses\n granted to You under this License for that Work shall terminate\n as of the date such litigation is filed.\n\n 4. Redistribution. You may reproduce and distribute copies of the\n Work or Derivative Works thereof in any medium, with or without\n modifications, and in Source or Object form, provided that You\n meet the following conditions:\n\n (a) You must give any other recipients of the Work or\n Derivative Works a copy of this License; and\n\n (b) You must cause any modified files to carry prominent notices\n stating that You changed the files; and\n\n (c) You must retain, in the Source form of any Derivative Works\n that You distribute, all copyright, patent, trademark, and\n attribution notices from the Source form of the Work,\n excluding those notices that do not pertain to any part of\n the Derivative Works; and\n\n (d) If the Work includes a "NOTICE" text file as part of its\n distribution, then any Derivative Works that You distribute must\n include a readable copy of the attribution notices contained\n within such NOTICE file, excluding those notices that do not\n pertain to any part of the Derivative Works, in at least one\n of the following places: within a NOTICE text file distributed\n as part of the Derivative Works; within the Source form or\n documentation, if provided along with the Derivative Works; or,\n within a display generated by the Derivative Works, if and\n wherever such third-party notices normally appear. The contents\n of the NOTICE file are for informational purposes only and\n do not modify the License. You may add Your own attribution\n notices within Derivative Works that You distribute, alongside\n or as an addendum to the NOTICE text from the Work, provided\n that such additional attribution notices cannot be construed\n as modifying the License.\n\n You may add Your own copyright statement to Your modifications and\n may provide additional or different license terms and conditions\n for use, reproduction, or distribution of Your modifications, or\n for any such Derivative Works as a whole, provided Your use,\n reproduction, and distribution of the Work otherwise complies with\n the conditions stated in this License.\n\n 5. Submission of Contributions. Unless You explicitly state otherwise,\n any Contribution intentionally submitted for inclusion in the Work\n by You to the Licensor shall be under the terms and conditions of\n this License, without any additional terms or conditions.\n Notwithstanding the above, nothing herein shall supersede or modify\n the terms of any separate license agreement you may have executed\n with Licensor regarding such Contributions.\n\n 6. Trademarks. This License does not grant permission to use the trade\n names, trademarks, service marks, or product names of the Licensor,\n except as required for reasonable and customary use in describing the\n origin of the Work and reproducing the content of the NOTICE file.\n\n 7. Disclaimer of Warranty. Unless required by applicable law or\n agreed to in writing, Licensor provides the Work (and each\n Contributor provides its Contributions) on an "AS IS" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n implied, including, without limitation, any warranties or conditions\n of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n PARTICULAR PURPOSE. You are solely responsible for determining the\n appropriateness of using or redistributing the Work and assume any\n risks associated with Your exercise of permissions under this License.\n\n 8. Limitation of Liability. In no event and under no legal theory,\n whether in tort (including negligence), contract, or otherwise,\n unless required by applicable law (such as deliberate and grossly\n negligent acts) or agreed to in writing, shall any Contributor be\n liable to You for damages, including any direct, indirect, special,\n incidental, or consequential damages of any character arising as a\n result of this License or out of the use or inability to use the\n Work (including but not limited to damages for loss of goodwill,\n work stoppage, computer failure or malfunction, or any and all\n other commercial damages or losses), even if such Contributor\n has been advised of the possibility of such damages.\n\n 9. Accepting Warranty or Additional Liability. While redistributing\n the Work or Derivative Works thereof, You may choose to offer,\n and charge a fee for, acceptance of support, warranty, indemnity,\n or other liability obligations and/or rights consistent with this\n License. However, in accepting such obligations, You may act only\n on Your own behalf and on Your sole responsibility, not on behalf\n of any other Contributor, and only if You agree to indemnify,\n defend, and hold each Contributor harmless for any liability\n incurred by, or claims asserted against, such Contributor by reason\n of your accepting any such warranty or additional liability.\n\n END OF TERMS AND CONDITIONS\n\n APPENDIX: How to apply the Apache License to your work.\n\n To apply the Apache License to your work, attach the following\n boilerplate notice, with the fields enclosed by brackets "[]"\n replaced with your own identifying information. (Don't include\n the brackets!) The text should be enclosed in the appropriate\n comment syntax for the file format. We also recommend that a\n file or class name and description of purpose be included on the\n same "printed page" as the copyright notice for easier\n identification within third-party archives.\n\n Copyright 2023 Chris Smith\n\n Licensed under the Apache License, Version 2.0 (the "License");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an "AS IS" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n | .venv\Lib\site-packages\arrow-1.3.0.dist-info\LICENSE | LICENSE | Other | 11,341 | 0.95 | 0.119403 | 0 | node-utils | 641 | 2024-02-15T10:52:17.388646 | BSD-3-Clause | false | 14a2e29a9d542fb9052d75344d67619d |
Metadata-Version: 2.1\nName: arrow\nVersion: 1.3.0\nSummary: Better dates & times for Python\nKeywords: arrow,date,time,datetime,timestamp,timezone,humanize\nAuthor-email: Chris Smith <crsmithdev@gmail.com>\nRequires-Python: >=3.8\nDescription-Content-Type: text/x-rst\nClassifier: Development Status :: 5 - Production/Stable\nClassifier: Intended Audience :: Developers\nClassifier: Intended Audience :: Information Technology\nClassifier: License :: OSI Approved :: Apache Software License\nClassifier: Topic :: Software Development :: Libraries :: Python Modules\nClassifier: Programming Language :: Python :: 3\nClassifier: Programming Language :: Python :: 3 :: Only\nClassifier: Programming Language :: Python :: 3.8\nClassifier: Programming Language :: Python :: 3.9\nClassifier: Programming Language :: Python :: 3.10\nClassifier: Programming Language :: Python :: 3.11\nClassifier: Programming Language :: Python :: 3.12\nClassifier: Operating System :: OS Independent\nRequires-Dist: python-dateutil>=2.7.0\nRequires-Dist: types-python-dateutil>=2.8.10\nRequires-Dist: doc8 ; extra == "doc"\nRequires-Dist: sphinx>=7.0.0 ; extra == "doc"\nRequires-Dist: sphinx-autobuild ; extra == "doc"\nRequires-Dist: sphinx-autodoc-typehints ; extra == "doc"\nRequires-Dist: sphinx_rtd_theme>=1.3.0 ; extra == "doc"\nRequires-Dist: dateparser==1.* ; extra == "test"\nRequires-Dist: pre-commit ; extra == "test"\nRequires-Dist: pytest ; extra == "test"\nRequires-Dist: pytest-cov ; extra == "test"\nRequires-Dist: pytest-mock ; extra == "test"\nRequires-Dist: pytz==2021.1 ; extra == "test"\nRequires-Dist: simplejson==3.* ; extra == "test"\nProject-URL: Documentation, https://arrow.readthedocs.io\nProject-URL: Issues, https://github.com/arrow-py/arrow/issues\nProject-URL: Source, https://github.com/arrow-py/arrow\nProvides-Extra: doc\nProvides-Extra: test\n\nArrow: Better dates & times for Python\n======================================\n\n.. start-inclusion-marker-do-not-remove\n\n.. image:: https://github.com/arrow-py/arrow/workflows/tests/badge.svg?branch=master\n :alt: Build Status\n :target: https://github.com/arrow-py/arrow/actions?query=workflow%3Atests+branch%3Amaster\n\n.. image:: https://codecov.io/gh/arrow-py/arrow/branch/master/graph/badge.svg\n :alt: Coverage\n :target: https://codecov.io/gh/arrow-py/arrow\n\n.. image:: https://img.shields.io/pypi/v/arrow.svg\n :alt: PyPI Version\n :target: https://pypi.python.org/pypi/arrow\n\n.. image:: https://img.shields.io/pypi/pyversions/arrow.svg\n :alt: Supported Python Versions\n :target: https://pypi.python.org/pypi/arrow\n\n.. image:: https://img.shields.io/pypi/l/arrow.svg\n :alt: License\n :target: https://pypi.python.org/pypi/arrow\n\n.. image:: https://img.shields.io/badge/code%20style-black-000000.svg\n :alt: Code Style: Black\n :target: https://github.com/psf/black\n\n\n**Arrow** is a Python library that offers a sensible and human-friendly approach to creating, manipulating, formatting and converting dates, times and timestamps. It implements and updates the datetime type, plugging gaps in functionality and providing an intelligent module API that supports many common creation scenarios. Simply put, it helps you work with dates and times with fewer imports and a lot less code.\n\nArrow is named after the `arrow of time <https://en.wikipedia.org/wiki/Arrow_of_time>`_ and is heavily inspired by `moment.js <https://github.com/moment/moment>`_ and `requests <https://github.com/psf/requests>`_.\n\nWhy use Arrow over built-in modules?\n------------------------------------\n\nPython's standard library and some other low-level modules have near-complete date, time and timezone functionality, but don't work very well from a usability perspective:\n\n- Too many modules: datetime, time, calendar, dateutil, pytz and more\n- Too many types: date, time, datetime, tzinfo, timedelta, relativedelta, etc.\n- Timezones and timestamp conversions are verbose and unpleasant\n- Timezone naivety is the norm\n- Gaps in functionality: ISO 8601 parsing, timespans, humanization\n\nFeatures\n--------\n\n- Fully-implemented, drop-in replacement for datetime\n- Support for Python 3.6+\n- Timezone-aware and UTC by default\n- Super-simple creation options for many common input scenarios\n- ``shift`` method with support for relative offsets, including weeks\n- Format and parse strings automatically\n- Wide support for the `ISO 8601 <https://en.wikipedia.org/wiki/ISO_8601>`_ standard\n- Timezone conversion\n- Support for ``dateutil``, ``pytz``, and ``ZoneInfo`` tzinfo objects\n- Generates time spans, ranges, floors and ceilings for time frames ranging from microsecond to year\n- Humanize dates and times with a growing list of contributed locales\n- Extensible for your own Arrow-derived types\n- Full support for PEP 484-style type hints\n\nQuick Start\n-----------\n\nInstallation\n~~~~~~~~~~~~\n\nTo install Arrow, use `pip <https://pip.pypa.io/en/stable/quickstart/>`_ or `pipenv <https://docs.pipenv.org>`_:\n\n.. code-block:: console\n\n $ pip install -U arrow\n\nExample Usage\n~~~~~~~~~~~~~\n\n.. code-block:: python\n\n >>> import arrow\n >>> arrow.get('2013-05-11T21:23:58.970460+07:00')\n <Arrow [2013-05-11T21:23:58.970460+07:00]>\n\n >>> utc = arrow.utcnow()\n >>> utc\n <Arrow [2013-05-11T21:23:58.970460+00:00]>\n\n >>> utc = utc.shift(hours=-1)\n >>> utc\n <Arrow [2013-05-11T20:23:58.970460+00:00]>\n\n >>> local = utc.to('US/Pacific')\n >>> local\n <Arrow [2013-05-11T13:23:58.970460-07:00]>\n\n >>> local.timestamp()\n 1368303838.970460\n\n >>> local.format()\n '2013-05-11 13:23:58 -07:00'\n\n >>> local.format('YYYY-MM-DD HH:mm:ss ZZ')\n '2013-05-11 13:23:58 -07:00'\n\n >>> local.humanize()\n 'an hour ago'\n\n >>> local.humanize(locale='ko-kr')\n '한시간 전'\n\n.. end-inclusion-marker-do-not-remove\n\nDocumentation\n-------------\n\nFor full documentation, please visit `arrow.readthedocs.io <https://arrow.readthedocs.io>`_.\n\nContributing\n------------\n\nContributions are welcome for both code and localizations (adding and updating locales). Begin by gaining familiarity with the Arrow library and its features. Then, jump into contributing:\n\n#. Find an issue or feature to tackle on the `issue tracker <https://github.com/arrow-py/arrow/issues>`_. Issues marked with the `"good first issue" label <https://github.com/arrow-py/arrow/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22>`_ may be a great place to start!\n#. Fork `this repository <https://github.com/arrow-py/arrow>`_ on GitHub and begin making changes in a branch.\n#. Add a few tests to ensure that the bug was fixed or the feature works as expected.\n#. Run the entire test suite and linting checks by running one of the following commands: ``tox && tox -e lint,docs`` (if you have `tox <https://tox.readthedocs.io>`_ installed) **OR** ``make build39 && make test && make lint`` (if you do not have Python 3.9 installed, replace ``build39`` with the latest Python version on your system).\n#. Submit a pull request and await feedback 😃.\n\nIf you have any questions along the way, feel free to ask them `here <https://github.com/arrow-py/arrow/discussions>`_.\n\nSupport Arrow\n-------------\n\n`Open Collective <https://opencollective.com/>`_ is an online funding platform that provides tools to raise money and share your finances with full transparency. It is the platform of choice for individuals and companies to make one-time or recurring donations directly to the project. If you are interested in making a financial contribution, please visit the `Arrow collective <https://opencollective.com/arrow>`_.\n\n | .venv\Lib\site-packages\arrow-1.3.0.dist-info\METADATA | METADATA | Other | 7,534 | 0.95 | 0.085227 | 0.044776 | awesome-app | 751 | 2024-05-26T00:01:51.657409 | BSD-3-Clause | false | efc6d44c11c2ca5e3f9b464d58ba07ee |
arrow-1.3.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4\narrow-1.3.0.dist-info/LICENSE,sha256=tIH4cpbLCr2xP9jLuUsUwyi-iA7J5oVHphuE2s_9Bno,11341\narrow-1.3.0.dist-info/METADATA,sha256=P7gh6Gt6pIqBLBP577OoTZWFhRmAOpMyiwpuNEeklac,7534\narrow-1.3.0.dist-info/RECORD,,\narrow-1.3.0.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81\narrow/__init__.py,sha256=HxsSJGl56GoeHB__No-kdGmC_Wes_Ttf0ohOy7OoFig,872\narrow/__pycache__/__init__.cpython-313.pyc,,\narrow/__pycache__/_version.cpython-313.pyc,,\narrow/__pycache__/api.cpython-313.pyc,,\narrow/__pycache__/arrow.cpython-313.pyc,,\narrow/__pycache__/constants.cpython-313.pyc,,\narrow/__pycache__/factory.cpython-313.pyc,,\narrow/__pycache__/formatter.cpython-313.pyc,,\narrow/__pycache__/locales.cpython-313.pyc,,\narrow/__pycache__/parser.cpython-313.pyc,,\narrow/__pycache__/util.cpython-313.pyc,,\narrow/_version.py,sha256=F5mW07pSyGrqDNY2Ehr-UpDzpBtN-FsYU0QGZWf6PJE,22\narrow/api.py,sha256=6tdqrG0NjrKO22_eWHU4a5xerfR6IrZPY-yynGpnvTM,2755\narrow/arrow.py,sha256=m9XvNnpQ1aTHZWXPud3W2-QMfilgWXnUCnuZInwf27g,63517\narrow/constants.py,sha256=y3scgWgxiFuQg4DeFlhmexy1BA7K8LFNZyqK-VWPQJs,3238\narrow/factory.py,sha256=qiDSokfcVWJhiJbIkOcU1Ohh4N0PdKxghsJzBnI8AUo,11432\narrow/formatter.py,sha256=0D0-AjBZwuay9312KvY0UnaVBfAZj-vEIqWcG0_3ZDQ,5267\narrow/locales.py,sha256=6g5xHq5UkIAZPF8N2PvzN_xoUvsfNcPhNfJw0TUi8tw,156894\narrow/parser.py,sha256=FO6NWpzjvZcsMhIck6pd7hKe1ijlKUZE9l_OFlyskyw,25790\narrow/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0\narrow/util.py,sha256=xnDevqRyNeYWbl3x-n_Tyo4cOgHcdgbxFECFsJ1XoEc,3679\n | .venv\Lib\site-packages\arrow-1.3.0.dist-info\RECORD | RECORD | Other | 1,630 | 0.7 | 0 | 0 | python-kit | 904 | 2024-01-05T12:02:25.916914 | Apache-2.0 | false | 7991226d80693fe313451dc9e4483675 |
Wheel-Version: 1.0\nGenerator: flit 3.9.0\nRoot-Is-Purelib: true\nTag: py3-none-any\n | .venv\Lib\site-packages\arrow-1.3.0.dist-info\WHEEL | WHEEL | Other | 81 | 0.5 | 0 | 0 | react-lib | 571 | 2024-10-24T13:43:26.815748 | Apache-2.0 | false | 24019423ea7c0c2df41c8272a3791e7b |
try:\n from astroid import nodes as astroid_node_classes\n\n # astroid_node_classes should be whichever module has the NodeNG class\n from astroid.nodes import NodeNG\n from astroid.nodes import BaseContainer\nexcept Exception:\n try:\n from astroid import node_classes as astroid_node_classes\n from astroid.node_classes import NodeNG\n from astroid.node_classes import _BaseContainer as BaseContainer\n except Exception: # pragma: no cover\n astroid_node_classes = None\n NodeNG = None\n BaseContainer = None\n\n\n__all__ = ["astroid_node_classes", "NodeNG", "BaseContainer"]\n | .venv\Lib\site-packages\asttokens\astroid_compat.py | astroid_compat.py | Python | 586 | 0.95 | 0.166667 | 0.066667 | node-utils | 575 | 2024-09-29T06:41:50.066339 | Apache-2.0 | false | 14262ba7d216736e002dc9c4df39daeb |
# Copyright 2016 Grist Labs, Inc.\n#\n# Licensed under the Apache License, Version 2.0 (the "License");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an "AS IS" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport abc\nimport ast\nimport bisect\nimport sys\nimport token\nfrom ast import Module\nfrom typing import Iterable, Iterator, List, Optional, Tuple, Any, cast, TYPE_CHECKING\n\nfrom .line_numbers import LineNumbers\nfrom .util import (\n Token, match_token, is_non_coding_token, patched_generate_tokens, last_stmt,\n annotate_fstring_nodes, generate_tokens, is_module, is_stmt\n)\n\nif TYPE_CHECKING: # pragma: no cover\n from .util import AstNode, TokenInfo\n\n\nclass ASTTextBase(metaclass=abc.ABCMeta):\n def __init__(self, source_text: str, filename: str) -> None:\n self._filename = filename\n\n # Decode source after parsing to let Python 2 handle coding declarations.\n # (If the encoding was not utf-8 compatible, then even if it parses correctly,\n # we'll fail with a unicode error here.)\n source_text = str(source_text)\n\n self._text = source_text\n self._line_numbers = LineNumbers(source_text)\n\n @abc.abstractmethod\n def get_text_positions(self, node, padded):\n # type: (AstNode, bool) -> Tuple[Tuple[int, int], Tuple[int, int]]\n """\n Returns two ``(lineno, col_offset)`` tuples for the start and end of the given node.\n If the positions can't be determined, or the nodes don't correspond to any particular text,\n returns ``(1, 0)`` for both.\n\n ``padded`` corresponds to the ``padded`` argument to ``ast.get_source_segment()``.\n This means that if ``padded`` is True, the start position will be adjusted to include\n leading whitespace if ``node`` is a multiline statement.\n """\n raise NotImplementedError # pragma: no cover\n\n def get_text_range(self, node, padded=True):\n # type: (AstNode, bool) -> Tuple[int, int]\n """\n Returns the (startpos, endpos) positions in source text corresponding to the given node.\n Returns (0, 0) for nodes (like `Load`) that don't correspond to any particular text.\n\n See ``get_text_positions()`` for details on the ``padded`` argument.\n """\n start, end = self.get_text_positions(node, padded)\n return (\n self._line_numbers.line_to_offset(*start),\n self._line_numbers.line_to_offset(*end),\n )\n\n def get_text(self, node, padded=True):\n # type: (AstNode, bool) -> str\n """\n Returns the text corresponding to the given node.\n Returns '' for nodes (like `Load`) that don't correspond to any particular text.\n\n See ``get_text_positions()`` for details on the ``padded`` argument.\n """\n start, end = self.get_text_range(node, padded)\n return self._text[start: end]\n\n\nclass ASTTokens(ASTTextBase):\n """\n ASTTokens maintains the text of Python code in several forms: as a string, as line numbers, and\n as tokens, and is used to mark and access token and position information.\n\n ``source_text`` must be a unicode or UTF8-encoded string. If you pass in UTF8 bytes, remember\n that all offsets you'll get are to the unicode text, which is available as the ``.text``\n property.\n\n If ``parse`` is set, the ``source_text`` will be parsed with ``ast.parse()``, and the resulting\n tree marked with token info and made available as the ``.tree`` property.\n\n If ``tree`` is given, it will be marked and made available as the ``.tree`` property. In\n addition to the trees produced by the ``ast`` module, ASTTokens will also mark trees produced\n using ``astroid`` library <https://www.astroid.org>.\n\n If only ``source_text`` is given, you may use ``.mark_tokens(tree)`` to mark the nodes of an AST\n tree created separately.\n """\n\n def __init__(self, source_text, parse=False, tree=None, filename='<unknown>', tokens=None):\n # type: (Any, bool, Optional[Module], str, Iterable[TokenInfo]) -> None\n super(ASTTokens, self).__init__(source_text, filename)\n\n self._tree = ast.parse(source_text, filename) if parse else tree\n\n # Tokenize the code.\n if tokens is None:\n tokens = generate_tokens(self._text)\n self._tokens = list(self._translate_tokens(tokens))\n\n # Extract the start positions of all tokens, so that we can quickly map positions to tokens.\n self._token_offsets = [tok.startpos for tok in self._tokens]\n\n if self._tree:\n self.mark_tokens(self._tree)\n\n def mark_tokens(self, root_node):\n # type: (Module) -> None\n """\n Given the root of the AST or Astroid tree produced from source_text, visits all nodes marking\n them with token and position information by adding ``.first_token`` and\n ``.last_token`` attributes. This is done automatically in the constructor when ``parse`` or\n ``tree`` arguments are set, but may be used manually with a separate AST or Astroid tree.\n """\n # The hard work of this class is done by MarkTokens\n from .mark_tokens import MarkTokens # to avoid import loops\n MarkTokens(self).visit_tree(root_node)\n\n def _translate_tokens(self, original_tokens):\n # type: (Iterable[TokenInfo]) -> Iterator[Token]\n """\n Translates the given standard library tokens into our own representation.\n """\n for index, tok in enumerate(patched_generate_tokens(original_tokens)):\n tok_type, tok_str, start, end, line = tok\n yield Token(tok_type, tok_str, start, end, line, index,\n self._line_numbers.line_to_offset(start[0], start[1]),\n self._line_numbers.line_to_offset(end[0], end[1]))\n\n @property\n def text(self):\n # type: () -> str\n """The source code passed into the constructor."""\n return self._text\n\n @property\n def tokens(self):\n # type: () -> List[Token]\n """The list of tokens corresponding to the source code from the constructor."""\n return self._tokens\n\n @property\n def tree(self):\n # type: () -> Optional[Module]\n """The root of the AST tree passed into the constructor or parsed from the source code."""\n return self._tree\n\n @property\n def filename(self):\n # type: () -> str\n """The filename that was parsed"""\n return self._filename\n\n def get_token_from_offset(self, offset):\n # type: (int) -> Token\n """\n Returns the token containing the given character offset (0-based position in source text),\n or the preceeding token if the position is between tokens.\n """\n return self._tokens[bisect.bisect(self._token_offsets, offset) - 1]\n\n def get_token(self, lineno, col_offset):\n # type: (int, int) -> Token\n """\n Returns the token containing the given (lineno, col_offset) position, or the preceeding token\n if the position is between tokens.\n """\n # TODO: add test for multibyte unicode. We need to translate offsets from ast module (which\n # are in utf8) to offsets into the unicode text. tokenize module seems to use unicode offsets\n # but isn't explicit.\n return self.get_token_from_offset(self._line_numbers.line_to_offset(lineno, col_offset))\n\n def get_token_from_utf8(self, lineno, col_offset):\n # type: (int, int) -> Token\n """\n Same as get_token(), but interprets col_offset as a UTF8 offset, which is what `ast` uses.\n """\n return self.get_token(lineno, self._line_numbers.from_utf8_col(lineno, col_offset))\n\n def next_token(self, tok, include_extra=False):\n # type: (Token, bool) -> Token\n """\n Returns the next token after the given one. If include_extra is True, includes non-coding\n tokens from the tokenize module, such as NL and COMMENT.\n """\n i = tok.index + 1\n if not include_extra:\n while is_non_coding_token(self._tokens[i].type):\n i += 1\n return self._tokens[i]\n\n def prev_token(self, tok, include_extra=False):\n # type: (Token, bool) -> Token\n """\n Returns the previous token before the given one. If include_extra is True, includes non-coding\n tokens from the tokenize module, such as NL and COMMENT.\n """\n i = tok.index - 1\n if not include_extra:\n while is_non_coding_token(self._tokens[i].type):\n i -= 1\n return self._tokens[i]\n\n def find_token(self, start_token, tok_type, tok_str=None, reverse=False):\n # type: (Token, int, Optional[str], bool) -> Token\n """\n Looks for the first token, starting at start_token, that matches tok_type and, if given, the\n token string. Searches backwards if reverse is True. Returns ENDMARKER token if not found (you\n can check it with `token.ISEOF(t.type)`).\n """\n t = start_token\n advance = self.prev_token if reverse else self.next_token\n while not match_token(t, tok_type, tok_str) and not token.ISEOF(t.type):\n t = advance(t, include_extra=True)\n return t\n\n def token_range(self,\n first_token, # type: Token\n last_token, # type: Token\n include_extra=False, # type: bool\n ):\n # type: (...) -> Iterator[Token]\n """\n Yields all tokens in order from first_token through and including last_token. If\n include_extra is True, includes non-coding tokens such as tokenize.NL and .COMMENT.\n """\n for i in range(first_token.index, last_token.index + 1):\n if include_extra or not is_non_coding_token(self._tokens[i].type):\n yield self._tokens[i]\n\n def get_tokens(self, node, include_extra=False):\n # type: (AstNode, bool) -> Iterator[Token]\n """\n Yields all tokens making up the given node. If include_extra is True, includes non-coding\n tokens such as tokenize.NL and .COMMENT.\n """\n return self.token_range(node.first_token, node.last_token, include_extra=include_extra)\n\n def get_text_positions(self, node, padded):\n # type: (AstNode, bool) -> Tuple[Tuple[int, int], Tuple[int, int]]\n """\n Returns two ``(lineno, col_offset)`` tuples for the start and end of the given node.\n If the positions can't be determined, or the nodes don't correspond to any particular text,\n returns ``(1, 0)`` for both.\n\n ``padded`` corresponds to the ``padded`` argument to ``ast.get_source_segment()``.\n This means that if ``padded`` is True, the start position will be adjusted to include\n leading whitespace if ``node`` is a multiline statement.\n """\n if not hasattr(node, 'first_token'):\n return (1, 0), (1, 0)\n\n start = node.first_token.start\n end = node.last_token.end\n if padded and any(match_token(t, token.NEWLINE) for t in self.get_tokens(node)):\n # Set col_offset to 0 to include leading indentation for multiline statements.\n start = (start[0], 0)\n\n return start, end\n\n\nclass ASTText(ASTTextBase):\n """\n Supports the same ``get_text*`` methods as ``ASTTokens``,\n but uses the AST to determine the text positions instead of tokens.\n This is faster than ``ASTTokens`` as it requires less setup work.\n\n It also (sometimes) supports nodes inside f-strings, which ``ASTTokens`` doesn't.\n\n Some node types and/or Python versions are not supported.\n In these cases the ``get_text*`` methods will fall back to using ``ASTTokens``\n which incurs the usual setup cost the first time.\n If you want to avoid this, check ``supports_tokenless(node)`` before calling ``get_text*`` methods.\n """\n def __init__(self, source_text, tree=None, filename='<unknown>'):\n # type: (Any, Optional[Module], str) -> None\n super(ASTText, self).__init__(source_text, filename)\n\n self._tree = tree\n if self._tree is not None:\n annotate_fstring_nodes(self._tree)\n\n self._asttokens = None # type: Optional[ASTTokens]\n\n @property\n def tree(self):\n # type: () -> Module\n if self._tree is None:\n self._tree = ast.parse(self._text, self._filename)\n annotate_fstring_nodes(self._tree)\n return self._tree\n\n @property\n def asttokens(self):\n # type: () -> ASTTokens\n if self._asttokens is None:\n self._asttokens = ASTTokens(\n self._text,\n tree=self.tree,\n filename=self._filename,\n )\n return self._asttokens\n\n def _get_text_positions_tokenless(self, node, padded):\n # type: (AstNode, bool) -> Tuple[Tuple[int, int], Tuple[int, int]]\n """\n Version of ``get_text_positions()`` that doesn't use tokens.\n """\n if is_module(node):\n # Modules don't have position info, so just return the range of the whole text.\n # The token-using method does something different, but its behavior seems weird and inconsistent.\n # For example, in a file with only comments, it only returns the first line.\n # It's hard to imagine a case when this matters.\n return (1, 0), self._line_numbers.offset_to_line(len(self._text))\n\n if getattr(node, 'lineno', None) is None:\n return (1, 0), (1, 0)\n\n assert node # tell mypy that node is not None, which we allowed up to here for compatibility\n\n decorators = getattr(node, 'decorator_list', [])\n if not decorators:\n # Astroid uses node.decorators.nodes instead of node.decorator_list.\n decorators_node = getattr(node, 'decorators', None)\n decorators = getattr(decorators_node, 'nodes', [])\n if decorators:\n # Function/Class definition nodes are marked by AST as starting at def/class,\n # not the first decorator. This doesn't match the token-using behavior,\n # or inspect.getsource(), and just seems weird.\n start_node = decorators[0]\n else:\n start_node = node\n\n start_lineno = start_node.lineno\n end_node = last_stmt(node)\n\n # Include leading indentation for multiline statements.\n # This doesn't mean simple statements that happen to be on multiple lines,\n # but compound statements where inner indentation matters.\n # So we don't just compare node.lineno and node.end_lineno,\n # we check for a contained statement starting on a different line.\n if padded and (\n start_lineno != end_node.lineno\n or (\n # Astroid docstrings aren't treated as separate statements.\n # So to handle function/class definitions with a docstring but no other body,\n # we just check that the node is a statement with a docstring\n # and spanning multiple lines in the simple, literal sense.\n start_lineno != node.end_lineno\n and getattr(node, "doc_node", None)\n and is_stmt(node)\n )\n ):\n start_col_offset = 0\n else:\n start_col_offset = self._line_numbers.from_utf8_col(start_lineno, start_node.col_offset)\n\n start = (start_lineno, start_col_offset)\n\n # To match the token-using behaviour, we exclude trailing semicolons and comments.\n # This means that for blocks containing multiple statements, we have to use the last one\n # instead of the actual node for end_lineno and end_col_offset.\n end_lineno = cast(int, end_node.end_lineno)\n end_col_offset = cast(int, end_node.end_col_offset)\n end_col_offset = self._line_numbers.from_utf8_col(end_lineno, end_col_offset)\n end = (end_lineno, end_col_offset)\n\n return start, end\n\n def get_text_positions(self, node, padded):\n # type: (AstNode, bool) -> Tuple[Tuple[int, int], Tuple[int, int]]\n """\n Returns two ``(lineno, col_offset)`` tuples for the start and end of the given node.\n If the positions can't be determined, or the nodes don't correspond to any particular text,\n returns ``(1, 0)`` for both.\n\n ``padded`` corresponds to the ``padded`` argument to ``ast.get_source_segment()``.\n This means that if ``padded`` is True, the start position will be adjusted to include\n leading whitespace if ``node`` is a multiline statement.\n """\n if getattr(node, "_broken_positions", None):\n # This node was marked in util.annotate_fstring_nodes as having untrustworthy lineno/col_offset.\n return (1, 0), (1, 0)\n\n if supports_tokenless(node):\n return self._get_text_positions_tokenless(node, padded)\n\n return self.asttokens.get_text_positions(node, padded)\n\n\n# Node types that _get_text_positions_tokenless doesn't support.\n# These initial values are missing lineno.\n_unsupported_tokenless_types = ("arguments", "Arguments", "withitem") # type: Tuple[str, ...]\nif sys.version_info[:2] == (3, 8):\n # _get_text_positions_tokenless works incorrectly for these types due to bugs in Python 3.8.\n _unsupported_tokenless_types += ("arg", "Starred")\n # no lineno in 3.8\n _unsupported_tokenless_types += ("Slice", "ExtSlice", "Index", "keyword")\n\n\ndef supports_tokenless(node=None):\n # type: (Any) -> bool\n """\n Returns True if the Python version and the node (if given) are supported by\n the ``get_text*`` methods of ``ASTText`` without falling back to ``ASTTokens``.\n See ``ASTText`` for why this matters.\n\n The following cases are not supported:\n\n - PyPy\n - ``ast.arguments`` / ``astroid.Arguments``\n - ``ast.withitem``\n - ``astroid.Comprehension``\n - ``astroid.AssignName`` inside ``astroid.Arguments`` or ``astroid.ExceptHandler``\n - The following nodes in Python 3.8 only:\n - ``ast.arg``\n - ``ast.Starred``\n - ``ast.Slice``\n - ``ast.ExtSlice``\n - ``ast.Index``\n - ``ast.keyword``\n """\n return (\n type(node).__name__ not in _unsupported_tokenless_types\n and not (\n # astroid nodes\n not isinstance(node, ast.AST) and node is not None and (\n (\n type(node).__name__ == "AssignName"\n and type(node.parent).__name__ in ("Arguments", "ExceptHandler")\n )\n )\n )\n and 'pypy' not in sys.version.lower()\n )\n | .venv\Lib\site-packages\asttokens\asttokens.py | asttokens.py | Python | 17,760 | 0.95 | 0.215556 | 0.194226 | vue-tools | 81 | 2024-03-03T15:25:00.787957 | BSD-3-Clause | false | df9af8db1ba800acff8039bf40f0f9bc |
# Copyright 2016 Grist Labs, Inc.\n#\n# Licensed under the Apache License, Version 2.0 (the "License");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an "AS IS" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport bisect\nimport re\nfrom typing import Dict, List, Tuple\n\n_line_start_re = re.compile(r'^', re.M)\n\nclass LineNumbers:\n """\n Class to convert between character offsets in a text string, and pairs (line, column) of 1-based\n line and 0-based column numbers, as used by tokens and AST nodes.\n\n This class expects unicode for input and stores positions in unicode. But it supports\n translating to and from utf8 offsets, which are used by ast parsing.\n """\n def __init__(self, text):\n # type: (str) -> None\n # A list of character offsets of each line's first character.\n self._line_offsets = [m.start(0) for m in _line_start_re.finditer(text)]\n self._text = text\n self._text_len = len(text)\n self._utf8_offset_cache = {} # type: Dict[int, List[int]] # maps line num to list of char offset for each byte in line\n\n def from_utf8_col(self, line, utf8_column):\n # type: (int, int) -> int\n """\n Given a 1-based line number and 0-based utf8 column, returns a 0-based unicode column.\n """\n offsets = self._utf8_offset_cache.get(line)\n if offsets is None:\n end_offset = self._line_offsets[line] if line < len(self._line_offsets) else self._text_len\n line_text = self._text[self._line_offsets[line - 1] : end_offset]\n\n offsets = [i for i,c in enumerate(line_text) for byte in c.encode('utf8')]\n offsets.append(len(line_text))\n self._utf8_offset_cache[line] = offsets\n\n return offsets[max(0, min(len(offsets)-1, utf8_column))]\n\n def line_to_offset(self, line, column):\n # type: (int, int) -> int\n """\n Converts 1-based line number and 0-based column to 0-based character offset into text.\n """\n line -= 1\n if line >= len(self._line_offsets):\n return self._text_len\n elif line < 0:\n return 0\n else:\n return min(self._line_offsets[line] + max(0, column), self._text_len)\n\n def offset_to_line(self, offset):\n # type: (int) -> Tuple[int, int]\n """\n Converts 0-based character offset to pair (line, col) of 1-based line and 0-based column\n numbers.\n """\n offset = max(0, min(self._text_len, offset))\n line_index = bisect.bisect_right(self._line_offsets, offset) - 1\n return (line_index + 1, offset - self._line_offsets[line_index])\n\n\n | .venv\Lib\site-packages\asttokens\line_numbers.py | line_numbers.py | Python | 2,834 | 0.95 | 0.197368 | 0.276923 | vue-tools | 729 | 2024-10-25T12:16:15.830093 | MIT | false | cce2b7eb37221c12db89bbf5d1c38d41 |
# Copyright 2016 Grist Labs, Inc.\n#\n# Licensed under the Apache License, Version 2.0 (the "License");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an "AS IS" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport ast\nimport numbers\nimport sys\nimport token\nfrom ast import Module\nfrom typing import Callable, List, Union, cast, Optional, Tuple, TYPE_CHECKING\n\nfrom . import util\nfrom .asttokens import ASTTokens\nfrom .astroid_compat import astroid_node_classes as nc, BaseContainer as AstroidBaseContainer\n\nif TYPE_CHECKING:\n from .util import AstNode\n\n\n# Mapping of matching braces. To find a token here, look up token[:2].\n_matching_pairs_left = {\n (token.OP, '('): (token.OP, ')'),\n (token.OP, '['): (token.OP, ']'),\n (token.OP, '{'): (token.OP, '}'),\n}\n\n_matching_pairs_right = {\n (token.OP, ')'): (token.OP, '('),\n (token.OP, ']'): (token.OP, '['),\n (token.OP, '}'): (token.OP, '{'),\n}\n\n\nclass MarkTokens:\n """\n Helper that visits all nodes in the AST tree and assigns .first_token and .last_token attributes\n to each of them. This is the heart of the token-marking logic.\n """\n def __init__(self, code):\n # type: (ASTTokens) -> None\n self._code = code\n self._methods = util.NodeMethods()\n self._iter_children = None # type: Optional[Callable]\n\n def visit_tree(self, node):\n # type: (Module) -> None\n self._iter_children = util.iter_children_func(node)\n util.visit_tree(node, self._visit_before_children, self._visit_after_children)\n\n def _visit_before_children(self, node, parent_token):\n # type: (AstNode, Optional[util.Token]) -> Tuple[Optional[util.Token], Optional[util.Token]]\n col = getattr(node, 'col_offset', None)\n token = self._code.get_token_from_utf8(node.lineno, col) if col is not None else None\n\n if not token and util.is_module(node):\n # We'll assume that a Module node starts at the start of the source code.\n token = self._code.get_token(1, 0)\n\n # Use our own token, or our parent's if we don't have one, to pass to child calls as\n # parent_token argument. The second value becomes the token argument of _visit_after_children.\n return (token or parent_token, token)\n\n def _visit_after_children(self, node, parent_token, token):\n # type: (AstNode, Optional[util.Token], Optional[util.Token]) -> None\n # This processes the node generically first, after all children have been processed.\n\n # Get the first and last tokens that belong to children. Note how this doesn't assume that we\n # iterate through children in order that corresponds to occurrence in source code. This\n # assumption can fail (e.g. with return annotations).\n first = token\n last = None\n for child in cast(Callable, self._iter_children)(node):\n # astroid slices have especially wrong positions, we don't want them to corrupt their parents.\n if util.is_empty_astroid_slice(child):\n continue\n if not first or child.first_token.index < first.index:\n first = child.first_token\n if not last or child.last_token.index > last.index:\n last = child.last_token\n\n # If we don't have a first token from _visit_before_children, and there were no children, then\n # use the parent's token as the first token.\n first = first or parent_token\n\n # If no children, set last token to the first one.\n last = last or first\n\n # Statements continue to before NEWLINE. This helps cover a few different cases at once.\n if util.is_stmt(node):\n last = self._find_last_in_stmt(cast(util.Token, last))\n\n # Capture any unmatched brackets.\n first, last = self._expand_to_matching_pairs(cast(util.Token, first), cast(util.Token, last), node)\n\n # Give a chance to node-specific methods to adjust.\n nfirst, nlast = self._methods.get(self, node.__class__)(node, first, last)\n\n if (nfirst, nlast) != (first, last):\n # If anything changed, expand again to capture any unmatched brackets.\n nfirst, nlast = self._expand_to_matching_pairs(nfirst, nlast, node)\n\n node.first_token = nfirst\n node.last_token = nlast\n\n def _find_last_in_stmt(self, start_token):\n # type: (util.Token) -> util.Token\n t = start_token\n while (not util.match_token(t, token.NEWLINE) and\n not util.match_token(t, token.OP, ';') and\n not token.ISEOF(t.type)):\n t = self._code.next_token(t, include_extra=True)\n return self._code.prev_token(t)\n\n def _expand_to_matching_pairs(self, first_token, last_token, node):\n # type: (util.Token, util.Token, AstNode) -> Tuple[util.Token, util.Token]\n """\n Scan tokens in [first_token, last_token] range that are between node's children, and for any\n unmatched brackets, adjust first/last tokens to include the closing pair.\n """\n # We look for opening parens/braces among non-child tokens (i.e. tokens between our actual\n # child nodes). If we find any closing ones, we match them to the opens.\n to_match_right = [] # type: List[Tuple[int, str]]\n to_match_left = []\n for tok in self._code.token_range(first_token, last_token):\n tok_info = tok[:2]\n if to_match_right and tok_info == to_match_right[-1]:\n to_match_right.pop()\n elif tok_info in _matching_pairs_left:\n to_match_right.append(_matching_pairs_left[tok_info])\n elif tok_info in _matching_pairs_right:\n to_match_left.append(_matching_pairs_right[tok_info])\n\n # Once done, extend `last_token` to match any unclosed parens/braces.\n for match in reversed(to_match_right):\n last = self._code.next_token(last_token)\n # Allow for trailing commas or colons (allowed in subscripts) before the closing delimiter\n while any(util.match_token(last, token.OP, x) for x in (',', ':')):\n last = self._code.next_token(last)\n # Now check for the actual closing delimiter.\n if util.match_token(last, *match):\n last_token = last\n\n # And extend `first_token` to match any unclosed opening parens/braces.\n for match in to_match_left:\n first = self._code.prev_token(first_token)\n if util.match_token(first, *match):\n first_token = first\n\n return (first_token, last_token)\n\n #----------------------------------------------------------------------\n # Node visitors. Each takes a preliminary first and last tokens, and returns the adjusted pair\n # that will actually be assigned.\n\n def visit_default(self, node, first_token, last_token):\n # type: (AstNode, util.Token, util.Token) -> Tuple[util.Token, util.Token]\n # pylint: disable=no-self-use\n # By default, we don't need to adjust the token we computed earlier.\n return (first_token, last_token)\n\n def handle_comp(self, open_brace, node, first_token, last_token):\n # type: (str, AstNode, util.Token, util.Token) -> Tuple[util.Token, util.Token]\n # For list/set/dict comprehensions, we only get the token of the first child, so adjust it to\n # include the opening brace (the closing brace will be matched automatically).\n before = self._code.prev_token(first_token)\n util.expect_token(before, token.OP, open_brace)\n return (before, last_token)\n\n def visit_comprehension(self,\n node, # type: AstNode\n first_token, # type: util.Token\n last_token, # type: util.Token\n ):\n # type: (...) -> Tuple[util.Token, util.Token]\n # The 'comprehension' node starts with 'for' but we only get first child; we search backwards\n # to find the 'for' keyword.\n first = self._code.find_token(first_token, token.NAME, 'for', reverse=True)\n return (first, last_token)\n\n def visit_if(self, node, first_token, last_token):\n # type: (util.Token, util.Token, util.Token) -> Tuple[util.Token, util.Token]\n while first_token.string not in ('if', 'elif'):\n first_token = self._code.prev_token(first_token)\n return first_token, last_token\n\n def handle_attr(self, node, first_token, last_token):\n # type: (AstNode, util.Token, util.Token) -> Tuple[util.Token, util.Token]\n # Attribute node has ".attr" (2 tokens) after the last child.\n dot = self._code.find_token(last_token, token.OP, '.')\n name = self._code.next_token(dot)\n util.expect_token(name, token.NAME)\n return (first_token, name)\n\n visit_attribute = handle_attr\n visit_assignattr = handle_attr\n visit_delattr = handle_attr\n\n def handle_def(self, node, first_token, last_token):\n # type: (AstNode, util.Token, util.Token) -> Tuple[util.Token, util.Token]\n # With astroid, nodes that start with a doc-string can have an empty body, in which case we\n # need to adjust the last token to include the doc string.\n if not node.body and (getattr(node, 'doc_node', None) or getattr(node, 'doc', None)): # type: ignore[union-attr]\n last_token = self._code.find_token(last_token, token.STRING)\n\n # Include @ from decorator\n if first_token.index > 0:\n prev = self._code.prev_token(first_token)\n if util.match_token(prev, token.OP, '@'):\n first_token = prev\n return (first_token, last_token)\n\n visit_classdef = handle_def\n visit_functiondef = handle_def\n\n def handle_following_brackets(self, node, last_token, opening_bracket):\n # type: (AstNode, util.Token, str) -> util.Token\n # This is for calls and subscripts, which have a pair of brackets\n # at the end which may contain no nodes, e.g. foo() or bar[:].\n # We look for the opening bracket and then let the matching pair be found automatically\n # Remember that last_token is at the end of all children,\n # so we are not worried about encountering a bracket that belongs to a child.\n first_child = next(cast(Callable, self._iter_children)(node))\n call_start = self._code.find_token(first_child.last_token, token.OP, opening_bracket)\n if call_start.index > last_token.index:\n last_token = call_start\n return last_token\n\n def visit_call(self, node, first_token, last_token):\n # type: (util.Token, util.Token, util.Token) -> Tuple[util.Token, util.Token]\n last_token = self.handle_following_brackets(node, last_token, '(')\n\n # Handling a python bug with decorators with empty parens, e.g.\n # @deco()\n # def ...\n if util.match_token(first_token, token.OP, '@'):\n first_token = self._code.next_token(first_token)\n return (first_token, last_token)\n\n def visit_matchclass(self, node, first_token, last_token):\n # type: (util.Token, util.Token, util.Token) -> Tuple[util.Token, util.Token]\n last_token = self.handle_following_brackets(node, last_token, '(')\n return (first_token, last_token)\n\n def visit_subscript(self,\n node, # type: AstNode\n first_token, # type: util.Token\n last_token, # type: util.Token\n ):\n # type: (...) -> Tuple[util.Token, util.Token]\n last_token = self.handle_following_brackets(node, last_token, '[')\n return (first_token, last_token)\n\n def visit_slice(self, node, first_token, last_token):\n # type: (AstNode, util.Token, util.Token) -> Tuple[util.Token, util.Token]\n # consume `:` tokens to the left and right. In Python 3.9, Slice nodes are\n # given a col_offset, (and end_col_offset), so this will always start inside\n # the slice, even if it is the empty slice. However, in 3.8 and below, this\n # will only expand to the full slice if the slice contains a node with a\n # col_offset. So x[:] will only get the correct tokens in 3.9, but x[1:] and\n # x[:1] will even on earlier versions of Python.\n while True:\n prev = self._code.prev_token(first_token)\n if prev.string != ':':\n break\n first_token = prev\n while True:\n next_ = self._code.next_token(last_token)\n if next_.string != ':':\n break\n last_token = next_\n return (first_token, last_token)\n\n def handle_bare_tuple(self, node, first_token, last_token):\n # type: (AstNode, util.Token, util.Token) -> Tuple[util.Token, util.Token]\n # A bare tuple doesn't include parens; if there is a trailing comma, make it part of the tuple.\n maybe_comma = self._code.next_token(last_token)\n if util.match_token(maybe_comma, token.OP, ','):\n last_token = maybe_comma\n return (first_token, last_token)\n\n # In Python3.8 parsed tuples include parentheses when present.\n def handle_tuple_nonempty(self, node, first_token, last_token):\n # type: (AstNode, util.Token, util.Token) -> Tuple[util.Token, util.Token]\n assert isinstance(node, ast.Tuple) or isinstance(node, AstroidBaseContainer)\n # It's a bare tuple if the first token belongs to the first child. The first child may\n # include extraneous parentheses (which don't create new nodes), so account for those too.\n child = node.elts[0]\n if TYPE_CHECKING:\n child = cast(AstNode, child)\n child_first, child_last = self._gobble_parens(child.first_token, child.last_token, True)\n if first_token == child_first:\n return self.handle_bare_tuple(node, first_token, last_token)\n return (first_token, last_token)\n\n def visit_tuple(self, node, first_token, last_token):\n # type: (AstNode, util.Token, util.Token) -> Tuple[util.Token, util.Token]\n assert isinstance(node, ast.Tuple) or isinstance(node, AstroidBaseContainer)\n if not node.elts:\n # An empty tuple is just "()", and we need no further info.\n return (first_token, last_token)\n return self.handle_tuple_nonempty(node, first_token, last_token)\n\n def _gobble_parens(self, first_token, last_token, include_all=False):\n # type: (util.Token, util.Token, bool) -> Tuple[util.Token, util.Token]\n # Expands a range of tokens to include one or all pairs of surrounding parentheses, and\n # returns (first, last) tokens that include these parens.\n while first_token.index > 0:\n prev = self._code.prev_token(first_token)\n next = self._code.next_token(last_token)\n if util.match_token(prev, token.OP, '(') and util.match_token(next, token.OP, ')'):\n first_token, last_token = prev, next\n if include_all:\n continue\n break\n return (first_token, last_token)\n\n def visit_str(self, node, first_token, last_token):\n # type: (AstNode, util.Token, util.Token) -> Tuple[util.Token, util.Token]\n return self.handle_str(first_token, last_token)\n\n def visit_joinedstr(self,\n node, # type: AstNode\n first_token, # type: util.Token\n last_token, # type: util.Token\n ):\n # type: (...) -> Tuple[util.Token, util.Token]\n if sys.version_info < (3, 12):\n # Older versions don't tokenize the contents of f-strings\n return self.handle_str(first_token, last_token)\n\n last = first_token\n while True:\n if util.match_token(last, getattr(token, "FSTRING_START")):\n # Python 3.12+ has tokens for the start (e.g. `f"`) and end (`"`)\n # of the f-string. We can't just look for the next FSTRING_END\n # because f-strings can be nested, e.g. f"{f'{x}'}", so we need\n # to treat this like matching balanced parentheses.\n count = 1\n while count > 0:\n last = self._code.next_token(last)\n # mypy complains about token.FSTRING_START and token.FSTRING_END.\n if util.match_token(last, getattr(token, "FSTRING_START")):\n count += 1\n elif util.match_token(last, getattr(token, "FSTRING_END")):\n count -= 1\n last_token = last\n last = self._code.next_token(last_token)\n elif util.match_token(last, token.STRING):\n # Similar to handle_str, we also need to handle adjacent strings.\n last_token = last\n last = self._code.next_token(last_token)\n else:\n break\n return (first_token, last_token)\n\n def visit_bytes(self, node, first_token, last_token):\n # type: (AstNode, util.Token, util.Token) -> Tuple[util.Token, util.Token]\n return self.handle_str(first_token, last_token)\n\n def handle_str(self, first_token, last_token):\n # type: (util.Token, util.Token) -> Tuple[util.Token, util.Token]\n # Multiple adjacent STRING tokens form a single string.\n last = self._code.next_token(last_token)\n while util.match_token(last, token.STRING):\n last_token = last\n last = self._code.next_token(last_token)\n return (first_token, last_token)\n\n def handle_num(self,\n node, # type: AstNode\n value, # type: Union[complex, int, numbers.Number]\n first_token, # type: util.Token\n last_token, # type: util.Token\n ):\n # type: (...) -> Tuple[util.Token, util.Token]\n # A constant like '-1' gets turned into two tokens; this will skip the '-'.\n while util.match_token(last_token, token.OP):\n last_token = self._code.next_token(last_token)\n\n if isinstance(value, complex):\n # A complex number like -2j cannot be compared directly to 0\n # A complex number like 1-2j is expressed as a binary operation\n # so we don't need to worry about it\n value = value.imag\n\n # This makes sure that the - is included\n if value < 0 and first_token.type == token.NUMBER: # type: ignore[operator]\n first_token = self._code.prev_token(first_token)\n return (first_token, last_token)\n\n def visit_num(self, node, first_token, last_token):\n # type: (AstNode, util.Token, util.Token) -> Tuple[util.Token, util.Token]\n return self.handle_num(node, cast(ast.Num, node).n, first_token, last_token)\n\n def visit_const(self, node, first_token, last_token):\n # type: (AstNode, util.Token, util.Token) -> Tuple[util.Token, util.Token]\n assert isinstance(node, ast.Constant) or isinstance(node, nc.Const)\n if isinstance(node.value, numbers.Number):\n return self.handle_num(node, node.value, first_token, last_token)\n elif isinstance(node.value, (str, bytes)):\n return self.visit_str(node, first_token, last_token)\n return (first_token, last_token)\n\n visit_constant = visit_const\n\n def visit_keyword(self, node, first_token, last_token):\n # type: (AstNode, util.Token, util.Token) -> Tuple[util.Token, util.Token]\n # Until python 3.9 (https://bugs.python.org/issue40141),\n # ast.keyword nodes didn't have line info. Astroid has lineno None.\n assert isinstance(node, ast.keyword) or isinstance(node, nc.Keyword)\n if node.arg is not None and getattr(node, 'lineno', None) is None:\n equals = self._code.find_token(first_token, token.OP, '=', reverse=True)\n name = self._code.prev_token(equals)\n util.expect_token(name, token.NAME, node.arg)\n first_token = name\n return (first_token, last_token)\n\n def visit_starred(self, node, first_token, last_token):\n # type: (AstNode, util.Token, util.Token) -> Tuple[util.Token, util.Token]\n # Astroid has 'Starred' nodes (for "foo(*bar)" type args), but they need to be adjusted.\n if not util.match_token(first_token, token.OP, '*'):\n star = self._code.prev_token(first_token)\n if util.match_token(star, token.OP, '*'):\n first_token = star\n return (first_token, last_token)\n\n def visit_assignname(self, node, first_token, last_token):\n # type: (AstNode, util.Token, util.Token) -> Tuple[util.Token, util.Token]\n # Astroid may turn 'except' clause into AssignName, but we need to adjust it.\n if util.match_token(first_token, token.NAME, 'except'):\n colon = self._code.find_token(last_token, token.OP, ':')\n first_token = last_token = self._code.prev_token(colon)\n return (first_token, last_token)\n\n # Async nodes should typically start with the word 'async'\n # but Python < 3.7 doesn't put the col_offset there\n # AsyncFunctionDef is slightly different because it might have\n # decorators before that, which visit_functiondef handles\n def handle_async(self, node, first_token, last_token):\n # type: (AstNode, util.Token, util.Token) -> Tuple[util.Token, util.Token]\n if not first_token.string == 'async':\n first_token = self._code.prev_token(first_token)\n return (first_token, last_token)\n\n visit_asyncfor = handle_async\n visit_asyncwith = handle_async\n\n def visit_asyncfunctiondef(self,\n node, # type: AstNode\n first_token, # type: util.Token\n last_token, # type: util.Token\n ):\n # type: (...) -> Tuple[util.Token, util.Token]\n if util.match_token(first_token, token.NAME, 'def'):\n # Include the 'async' token\n first_token = self._code.prev_token(first_token)\n return self.visit_functiondef(node, first_token, last_token)\n | .venv\Lib\site-packages\asttokens\mark_tokens.py | mark_tokens.py | Python | 21,012 | 0.95 | 0.229122 | 0.306931 | awesome-app | 73 | 2023-08-31T17:47:12.386153 | MIT | false | bd3fddcca7e380b77f109338b170b43a |
# Copyright 2016 Grist Labs, Inc.\n#\n# Licensed under the Apache License, Version 2.0 (the "License");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an "AS IS" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport ast\nimport collections\nimport io\nimport sys\nimport token\nimport tokenize\nfrom abc import ABCMeta\nfrom ast import Module, expr, AST\nfrom functools import lru_cache\nfrom typing import (\n Callable,\n Dict,\n Iterable,\n Iterator,\n List,\n Optional,\n Tuple,\n Union,\n cast,\n Any,\n TYPE_CHECKING,\n Type,\n)\n\nif TYPE_CHECKING: # pragma: no cover\n from .astroid_compat import NodeNG\n\n # Type class used to expand out the definition of AST to include fields added by this library\n # It's not actually used for anything other than type checking though!\n class EnhancedAST(AST):\n # Additional attributes set by mark_tokens\n first_token = None # type: Token\n last_token = None # type: Token\n lineno = 0 # type: int\n\n AstNode = Union[EnhancedAST, NodeNG]\n\n TokenInfo = tokenize.TokenInfo\n\n\ndef token_repr(tok_type, string):\n # type: (int, Optional[str]) -> str\n """Returns a human-friendly representation of a token with the given type and string."""\n # repr() prefixes unicode with 'u' on Python2 but not Python3; strip it out for consistency.\n return '%s:%s' % (token.tok_name[tok_type], repr(string).lstrip('u'))\n\n\nclass Token(collections.namedtuple('Token', 'type string start end line index startpos endpos')):\n """\n TokenInfo is an 8-tuple containing the same 5 fields as the tokens produced by the tokenize\n module, and 3 additional ones useful for this module:\n\n - [0] .type Token type (see token.py)\n - [1] .string Token (a string)\n - [2] .start Starting (row, column) indices of the token (a 2-tuple of ints)\n - [3] .end Ending (row, column) indices of the token (a 2-tuple of ints)\n - [4] .line Original line (string)\n - [5] .index Index of the token in the list of tokens that it belongs to.\n - [6] .startpos Starting character offset into the input text.\n - [7] .endpos Ending character offset into the input text.\n """\n def __str__(self):\n # type: () -> str\n return token_repr(self.type, self.string)\n\n\ndef match_token(token, tok_type, tok_str=None):\n # type: (Token, int, Optional[str]) -> bool\n """Returns true if token is of the given type and, if a string is given, has that string."""\n return token.type == tok_type and (tok_str is None or token.string == tok_str)\n\n\ndef expect_token(token, tok_type, tok_str=None):\n # type: (Token, int, Optional[str]) -> None\n """\n Verifies that the given token is of the expected type. If tok_str is given, the token string\n is verified too. If the token doesn't match, raises an informative ValueError.\n """\n if not match_token(token, tok_type, tok_str):\n raise ValueError("Expected token %s, got %s on line %s col %s" % (\n token_repr(tok_type, tok_str), str(token),\n token.start[0], token.start[1] + 1))\n\n\ndef is_non_coding_token(token_type):\n # type: (int) -> bool\n """\n These are considered non-coding tokens, as they don't affect the syntax tree.\n """\n return token_type in (token.NL, token.COMMENT, token.ENCODING)\n\n\ndef generate_tokens(text):\n # type: (str) -> Iterator[TokenInfo]\n """\n Generates standard library tokens for the given code.\n """\n # tokenize.generate_tokens is technically an undocumented API for Python3, but allows us to use the same API as for\n # Python2. See http://stackoverflow.com/a/4952291/328565.\n # FIXME: Remove cast once https://github.com/python/typeshed/issues/7003 gets fixed\n return tokenize.generate_tokens(cast(Callable[[], str], io.StringIO(text).readline))\n\n\ndef iter_children_func(node):\n # type: (AST) -> Callable\n """\n Returns a function which yields all direct children of a AST node,\n skipping children that are singleton nodes.\n The function depends on whether ``node`` is from ``ast`` or from the ``astroid`` module.\n """\n return iter_children_astroid if hasattr(node, 'get_children') else iter_children_ast\n\n\ndef iter_children_astroid(node, include_joined_str=False):\n # type: (NodeNG, bool) -> Union[Iterator, List]\n if not include_joined_str and is_joined_str(node):\n return []\n\n return node.get_children()\n\n\nSINGLETONS = {c for n, c in ast.__dict__.items() if isinstance(c, type) and\n issubclass(c, (ast.expr_context, ast.boolop, ast.operator, ast.unaryop, ast.cmpop))}\n\n\ndef iter_children_ast(node, include_joined_str=False):\n # type: (AST, bool) -> Iterator[Union[AST, expr]]\n if not include_joined_str and is_joined_str(node):\n return\n\n if isinstance(node, ast.Dict):\n # override the iteration order: instead of <all keys>, <all values>,\n # yield keys and values in source order (key1, value1, key2, value2, ...)\n for (key, value) in zip(node.keys, node.values):\n if key is not None:\n yield key\n yield value\n return\n\n for child in ast.iter_child_nodes(node):\n # Skip singleton children; they don't reflect particular positions in the code and break the\n # assumptions about the tree consisting of distinct nodes. Note that collecting classes\n # beforehand and checking them in a set is faster than using isinstance each time.\n if child.__class__ not in SINGLETONS:\n yield child\n\n\nstmt_class_names = {n for n, c in ast.__dict__.items()\n if isinstance(c, type) and issubclass(c, ast.stmt)}\nexpr_class_names = ({n for n, c in ast.__dict__.items()\n if isinstance(c, type) and issubclass(c, ast.expr)} |\n {'AssignName', 'DelName', 'Const', 'AssignAttr', 'DelAttr'})\n\n# These feel hacky compared to isinstance() but allow us to work with both ast and astroid nodes\n# in the same way, and without even importing astroid.\ndef is_expr(node):\n # type: (AstNode) -> bool\n """Returns whether node is an expression node."""\n return node.__class__.__name__ in expr_class_names\n\ndef is_stmt(node):\n # type: (AstNode) -> bool\n """Returns whether node is a statement node."""\n return node.__class__.__name__ in stmt_class_names\n\ndef is_module(node):\n # type: (AstNode) -> bool\n """Returns whether node is a module node."""\n return node.__class__.__name__ == 'Module'\n\ndef is_joined_str(node):\n # type: (AstNode) -> bool\n """Returns whether node is a JoinedStr node, used to represent f-strings."""\n # At the moment, nodes below JoinedStr have wrong line/col info, and trying to process them only\n # leads to errors.\n return node.__class__.__name__ == 'JoinedStr'\n\n\ndef is_expr_stmt(node):\n # type: (AstNode) -> bool\n """Returns whether node is an `Expr` node, which is a statement that is an expression."""\n return node.__class__.__name__ == 'Expr'\n\n\n\nCONSTANT_CLASSES: Tuple[Type, ...] = (ast.Constant,)\ntry:\n from astroid import Const\n CONSTANT_CLASSES += (Const,)\nexcept ImportError: # pragma: no cover\n # astroid is not available\n pass\n\ndef is_constant(node):\n # type: (AstNode) -> bool\n """Returns whether node is a Constant node."""\n return isinstance(node, CONSTANT_CLASSES)\n\n\ndef is_ellipsis(node):\n # type: (AstNode) -> bool\n """Returns whether node is an Ellipsis node."""\n return is_constant(node) and node.value is Ellipsis # type: ignore\n\n\ndef is_starred(node):\n # type: (AstNode) -> bool\n """Returns whether node is a starred expression node."""\n return node.__class__.__name__ == 'Starred'\n\n\ndef is_slice(node):\n # type: (AstNode) -> bool\n """Returns whether node represents a slice, e.g. `1:2` in `x[1:2]`"""\n # Before 3.9, a tuple containing a slice is an ExtSlice,\n # but this was removed in https://bugs.python.org/issue34822\n return (\n node.__class__.__name__ in ('Slice', 'ExtSlice')\n or (\n node.__class__.__name__ == 'Tuple'\n and any(map(is_slice, cast(ast.Tuple, node).elts))\n )\n )\n\n\ndef is_empty_astroid_slice(node):\n # type: (AstNode) -> bool\n return (\n node.__class__.__name__ == "Slice"\n and not isinstance(node, ast.AST)\n and node.lower is node.upper is node.step is None\n )\n\n\n# Sentinel value used by visit_tree().\n_PREVISIT = object()\n\ndef visit_tree(node, previsit, postvisit):\n # type: (Module, Callable[[AstNode, Optional[Token]], Tuple[Optional[Token], Optional[Token]]], Optional[Callable[[AstNode, Optional[Token], Optional[Token]], None]]) -> None\n """\n Scans the tree under the node depth-first using an explicit stack. It avoids implicit recursion\n via the function call stack to avoid hitting 'maximum recursion depth exceeded' error.\n\n It calls ``previsit()`` and ``postvisit()`` as follows:\n\n * ``previsit(node, par_value)`` - should return ``(par_value, value)``\n ``par_value`` is as returned from ``previsit()`` of the parent.\n\n * ``postvisit(node, par_value, value)`` - should return ``value``\n ``par_value`` is as returned from ``previsit()`` of the parent, and ``value`` is as\n returned from ``previsit()`` of this node itself. The return ``value`` is ignored except\n the one for the root node, which is returned from the overall ``visit_tree()`` call.\n\n For the initial node, ``par_value`` is None. ``postvisit`` may be None.\n """\n if not postvisit:\n postvisit = lambda node, pvalue, value: None\n\n iter_children = iter_children_func(node)\n done = set()\n ret = None\n stack = [(node, None, _PREVISIT)] # type: List[Tuple[AstNode, Optional[Token], Union[Optional[Token], object]]]\n while stack:\n current, par_value, value = stack.pop()\n if value is _PREVISIT:\n assert current not in done # protect againt infinite loop in case of a bad tree.\n done.add(current)\n\n pvalue, post_value = previsit(current, par_value)\n stack.append((current, par_value, post_value))\n\n # Insert all children in reverse order (so that first child ends up on top of the stack).\n ins = len(stack)\n for n in iter_children(current):\n stack.insert(ins, (n, pvalue, _PREVISIT))\n else:\n ret = postvisit(current, par_value, cast(Optional[Token], value))\n return ret\n\n\ndef walk(node, include_joined_str=False):\n # type: (AST, bool) -> Iterator[Union[Module, AstNode]]\n """\n Recursively yield all descendant nodes in the tree starting at ``node`` (including ``node``\n itself), using depth-first pre-order traversal (yieling parents before their children).\n\n This is similar to ``ast.walk()``, but with a different order, and it works for both ``ast`` and\n ``astroid`` trees. Also, as ``iter_children()``, it skips singleton nodes generated by ``ast``.\n\n By default, ``JoinedStr`` (f-string) nodes and their contents are skipped\n because they previously couldn't be handled. Set ``include_joined_str`` to True to include them.\n """\n iter_children = iter_children_func(node)\n done = set()\n stack = [node]\n while stack:\n current = stack.pop()\n assert current not in done # protect againt infinite loop in case of a bad tree.\n done.add(current)\n\n yield current\n\n # Insert all children in reverse order (so that first child ends up on top of the stack).\n # This is faster than building a list and reversing it.\n ins = len(stack)\n for c in iter_children(current, include_joined_str):\n stack.insert(ins, c)\n\n\ndef replace(text, replacements):\n # type: (str, List[Tuple[int, int, str]]) -> str\n """\n Replaces multiple slices of text with new values. This is a convenience method for making code\n modifications of ranges e.g. as identified by ``ASTTokens.get_text_range(node)``. Replacements is\n an iterable of ``(start, end, new_text)`` tuples.\n\n For example, ``replace("this is a test", [(0, 4, "X"), (8, 9, "THE")])`` produces\n ``"X is THE test"``.\n """\n p = 0\n parts = []\n for (start, end, new_text) in sorted(replacements):\n parts.append(text[p:start])\n parts.append(new_text)\n p = end\n parts.append(text[p:])\n return ''.join(parts)\n\n\nclass NodeMethods:\n """\n Helper to get `visit_{node_type}` methods given a node's class and cache the results.\n """\n def __init__(self):\n # type: () -> None\n self._cache = {} # type: Dict[Union[ABCMeta, type], Callable[[AstNode, Token, Token], Tuple[Token, Token]]]\n\n def get(self, obj, cls):\n # type: (Any, Union[ABCMeta, type]) -> Callable\n """\n Using the lowercase name of the class as node_type, returns `obj.visit_{node_type}`,\n or `obj.visit_default` if the type-specific method is not found.\n """\n method = self._cache.get(cls)\n if not method:\n name = "visit_" + cls.__name__.lower()\n method = getattr(obj, name, obj.visit_default)\n self._cache[cls] = method\n return method\n\n\ndef patched_generate_tokens(original_tokens):\n # type: (Iterable[TokenInfo]) -> Iterator[TokenInfo]\n """\n Fixes tokens yielded by `tokenize.generate_tokens` to handle more non-ASCII characters in identifiers.\n Workaround for https://github.com/python/cpython/issues/68382.\n Should only be used when tokenizing a string that is known to be valid syntax,\n because it assumes that error tokens are not actually errors.\n Combines groups of consecutive NAME, NUMBER, and/or ERRORTOKEN tokens into a single NAME token.\n """\n group = [] # type: List[tokenize.TokenInfo]\n for tok in original_tokens:\n if (\n tok.type in (tokenize.NAME, tokenize.ERRORTOKEN, tokenize.NUMBER)\n # Only combine tokens if they have no whitespace in between\n and (not group or group[-1].end == tok.start)\n ):\n group.append(tok)\n else:\n for combined_token in combine_tokens(group):\n yield combined_token\n group = []\n yield tok\n for combined_token in combine_tokens(group):\n yield combined_token\n\ndef combine_tokens(group):\n # type: (List[tokenize.TokenInfo]) -> List[tokenize.TokenInfo]\n if not any(tok.type == tokenize.ERRORTOKEN for tok in group) or len({tok.line for tok in group}) != 1:\n return group\n return [\n tokenize.TokenInfo(\n type=tokenize.NAME,\n string="".join(t.string for t in group),\n start=group[0].start,\n end=group[-1].end,\n line=group[0].line,\n )\n ]\n\n\ndef last_stmt(node):\n # type: (ast.AST) -> ast.AST\n """\n If the given AST node contains multiple statements, return the last one.\n Otherwise, just return the node.\n """\n child_stmts = [\n child for child in iter_children_func(node)(node)\n if is_stmt(child) or type(child).__name__ in (\n "excepthandler",\n "ExceptHandler",\n "match_case",\n "MatchCase",\n "TryExcept",\n "TryFinally",\n )\n ]\n if child_stmts:\n return last_stmt(child_stmts[-1])\n return node\n\n\n\n@lru_cache(maxsize=None)\ndef fstring_positions_work():\n # type: () -> bool\n """\n The positions attached to nodes inside f-string FormattedValues have some bugs\n that were fixed in Python 3.9.7 in https://github.com/python/cpython/pull/27729.\n This checks for those bugs more concretely without relying on the Python version.\n Specifically this checks:\n - Values with a format spec or conversion\n - Repeated (i.e. identical-looking) expressions\n - f-strings implicitly concatenated over multiple lines.\n - Multiline, triple-quoted f-strings.\n """\n source = """(\n f"a {b}{b} c {d!r} e {f:g} h {i:{j}} k {l:{m:n}}"\n f"a {b}{b} c {d!r} e {f:g} h {i:{j}} k {l:{m:n}}"\n f"{x + y + z} {x} {y} {z} {z} {z!a} {z:z}"\n f'''\n {s} {t}\n {u} {v}\n '''\n )"""\n tree = ast.parse(source)\n name_nodes = [node for node in ast.walk(tree) if isinstance(node, ast.Name)]\n name_positions = [(node.lineno, node.col_offset) for node in name_nodes]\n positions_are_unique = len(set(name_positions)) == len(name_positions)\n correct_source_segments = all(\n ast.get_source_segment(source, node) == node.id\n for node in name_nodes\n )\n return positions_are_unique and correct_source_segments\n\ndef annotate_fstring_nodes(tree):\n # type: (ast.AST) -> None\n """\n Add a special attribute `_broken_positions` to nodes inside f-strings\n if the lineno/col_offset cannot be trusted.\n """\n if sys.version_info >= (3, 12):\n # f-strings were weirdly implemented until https://peps.python.org/pep-0701/\n # In Python 3.12, inner nodes have sensible positions.\n return\n for joinedstr in walk(tree, include_joined_str=True):\n if not isinstance(joinedstr, ast.JoinedStr):\n continue\n for part in joinedstr.values:\n # The ast positions of the FormattedValues/Constant nodes span the full f-string, which is weird.\n setattr(part, '_broken_positions', True) # use setattr for mypy\n\n if isinstance(part, ast.FormattedValue):\n if not fstring_positions_work():\n for child in walk(part.value):\n setattr(child, '_broken_positions', True)\n\n if part.format_spec: # this is another JoinedStr\n # Again, the standard positions span the full f-string.\n setattr(part.format_spec, '_broken_positions', True)\n | .venv\Lib\site-packages\asttokens\util.py | util.py | Python | 17,286 | 0.95 | 0.214433 | 0.17866 | awesome-app | 16 | 2023-10-20T19:37:30.309476 | Apache-2.0 | false | 245875d01d7fc43044227865a98ac930 |
__version__ = "3.0.0"\n | .venv\Lib\site-packages\asttokens\version.py | version.py | Python | 22 | 0.5 | 0 | 0 | vue-tools | 1 | 2025-04-13T00:17:47.250071 | BSD-3-Clause | false | 3ba21c3e0301d5604924318a9c1f011e |
# Copyright 2016 Grist Labs, Inc.\n#\n# Licensed under the Apache License, Version 2.0 (the "License");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an "AS IS" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n"""\nThis module enhances the Python AST tree with token and source code information, sufficent to\ndetect the source text of each AST node. This is helpful for tools that make source code\ntransformations.\n"""\n\nfrom .line_numbers import LineNumbers\nfrom .asttokens import ASTText, ASTTokens, supports_tokenless\n\n__all__ = ['ASTText', 'ASTTokens', 'LineNumbers', 'supports_tokenless']\n | .venv\Lib\site-packages\asttokens\__init__.py | __init__.py | Python | 962 | 0.95 | 0.083333 | 0.619048 | vue-tools | 106 | 2025-01-08T00:16:30.541414 | Apache-2.0 | false | 71d07b566c060bd5aa15c9ec7539e40c |
\n\n | .venv\Lib\site-packages\asttokens\__pycache__\astroid_compat.cpython-313.pyc | astroid_compat.cpython-313.pyc | Other | 659 | 0.8 | 0 | 0 | python-kit | 326 | 2025-03-07T04:26:57.989578 | Apache-2.0 | false | 42b08b3d989efd3bb0c17747d9c87493 |
\n\n | .venv\Lib\site-packages\asttokens\__pycache__\asttokens.cpython-313.pyc | asttokens.cpython-313.pyc | Other | 18,192 | 0.95 | 0.078704 | 0.004975 | node-utils | 823 | 2023-07-13T06:09:26.613937 | GPL-3.0 | false | be7b3651b0926a7ffab93fb1911079d1 |
\n\n | .venv\Lib\site-packages\asttokens\__pycache__\line_numbers.cpython-313.pyc | line_numbers.cpython-313.pyc | Other | 3,686 | 0.95 | 0.05 | 0 | vue-tools | 261 | 2023-12-18T15:59:13.953188 | BSD-3-Clause | false | 1eed57ee1f434bf1b7c0971b2166a3c4 |
\n\n | .venv\Lib\site-packages\asttokens\__pycache__\mark_tokens.cpython-313.pyc | mark_tokens.cpython-313.pyc | Other | 20,663 | 0.95 | 0.013333 | 0.013889 | node-utils | 893 | 2023-08-17T13:29:13.160541 | MIT | false | a9fd995ef6cbb854a4a20d602a458648 |
\n\n | .venv\Lib\site-packages\asttokens\__pycache__\util.cpython-313.pyc | util.cpython-313.pyc | Other | 20,990 | 0.95 | 0.068966 | 0.018349 | node-utils | 285 | 2024-05-09T19:55:18.440026 | BSD-3-Clause | false | 2a50dbf56f1c947fe67baa73d670bf97 |
\n\n | .venv\Lib\site-packages\asttokens\__pycache__\version.cpython-313.pyc | version.cpython-313.pyc | Other | 209 | 0.7 | 0 | 0 | python-kit | 175 | 2023-12-30T11:45:19.045975 | GPL-3.0 | false | 116c8bd51f56a53d8c341b974ae0f029 |
\n\n | .venv\Lib\site-packages\asttokens\__pycache__\__init__.cpython-313.pyc | __init__.cpython-313.pyc | Other | 596 | 0.7 | 0.142857 | 0 | react-lib | 899 | 2024-04-22T16:55:41.972117 | MIT | false | fdcde246dd98a2ed792d21a4087ba131 |
pip\n | .venv\Lib\site-packages\asttokens-3.0.0.dist-info\INSTALLER | INSTALLER | Other | 4 | 0.5 | 0 | 0 | node-utils | 931 | 2024-11-01T07:26:14.811052 | Apache-2.0 | false | 365c9bfeb7d89244f2ce01c1de44cb85 |
Apache License\n Version 2.0, January 2004\n http://www.apache.org/licenses/\n\n TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n 1. Definitions.\n\n "License" shall mean the terms and conditions for use, reproduction,\n and distribution as defined by Sections 1 through 9 of this document.\n\n "Licensor" shall mean the copyright owner or entity authorized by\n the copyright owner that is granting the License.\n\n "Legal Entity" shall mean the union of the acting entity and all\n other entities that control, are controlled by, or are under common\n control with that entity. For the purposes of this definition,\n "control" means (i) the power, direct or indirect, to cause the\n direction or management of such entity, whether by contract or\n otherwise, or (ii) ownership of fifty percent (50%) or more of the\n outstanding shares, or (iii) beneficial ownership of such entity.\n\n "You" (or "Your") shall mean an individual or Legal Entity\n exercising permissions granted by this License.\n\n "Source" form shall mean the preferred form for making modifications,\n including but not limited to software source code, documentation\n source, and configuration files.\n\n "Object" form shall mean any form resulting from mechanical\n transformation or translation of a Source form, including but\n not limited to compiled object code, generated documentation,\n and conversions to other media types.\n\n "Work" shall mean the work of authorship, whether in Source or\n Object form, made available under the License, as indicated by a\n copyright notice that is included in or attached to the work\n (an example is provided in the Appendix below).\n\n "Derivative Works" shall mean any work, whether in Source or Object\n form, that is based on (or derived from) the Work and for which the\n editorial revisions, annotations, elaborations, or other modifications\n represent, as a whole, an original work of authorship. For the purposes\n of this License, Derivative Works shall not include works that remain\n separable from, or merely link (or bind by name) to the interfaces of,\n the Work and Derivative Works thereof.\n\n "Contribution" shall mean any work of authorship, including\n the original version of the Work and any modifications or additions\n to that Work or Derivative Works thereof, that is intentionally\n submitted to Licensor for inclusion in the Work by the copyright owner\n or by an individual or Legal Entity authorized to submit on behalf of\n the copyright owner. For the purposes of this definition, "submitted"\n means any form of electronic, verbal, or written communication sent\n to the Licensor or its representatives, including but not limited to\n communication on electronic mailing lists, source code control systems,\n and issue tracking systems that are managed by, or on behalf of, the\n Licensor for the purpose of discussing and improving the Work, but\n excluding communication that is conspicuously marked or otherwise\n designated in writing by the copyright owner as "Not a Contribution."\n\n "Contributor" shall mean Licensor and any individual or Legal Entity\n on behalf of whom a Contribution has been received by Licensor and\n subsequently incorporated within the Work.\n\n 2. Grant of Copyright License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n copyright license to reproduce, prepare Derivative Works of,\n publicly display, publicly perform, sublicense, and distribute the\n Work and such Derivative Works in Source or Object form.\n\n 3. Grant of Patent License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n (except as stated in this section) patent license to make, have made,\n use, offer to sell, sell, import, and otherwise transfer the Work,\n where such license applies only to those patent claims licensable\n by such Contributor that are necessarily infringed by their\n Contribution(s) alone or by combination of their Contribution(s)\n with the Work to which such Contribution(s) was submitted. If You\n institute patent litigation against any entity (including a\n cross-claim or counterclaim in a lawsuit) alleging that the Work\n or a Contribution incorporated within the Work constitutes direct\n or contributory patent infringement, then any patent licenses\n granted to You under this License for that Work shall terminate\n as of the date such litigation is filed.\n\n 4. Redistribution. You may reproduce and distribute copies of the\n Work or Derivative Works thereof in any medium, with or without\n modifications, and in Source or Object form, provided that You\n meet the following conditions:\n\n (a) You must give any other recipients of the Work or\n Derivative Works a copy of this License; and\n\n (b) You must cause any modified files to carry prominent notices\n stating that You changed the files; and\n\n (c) You must retain, in the Source form of any Derivative Works\n that You distribute, all copyright, patent, trademark, and\n attribution notices from the Source form of the Work,\n excluding those notices that do not pertain to any part of\n the Derivative Works; and\n\n (d) If the Work includes a "NOTICE" text file as part of its\n distribution, then any Derivative Works that You distribute must\n include a readable copy of the attribution notices contained\n within such NOTICE file, excluding those notices that do not\n pertain to any part of the Derivative Works, in at least one\n of the following places: within a NOTICE text file distributed\n as part of the Derivative Works; within the Source form or\n documentation, if provided along with the Derivative Works; or,\n within a display generated by the Derivative Works, if and\n wherever such third-party notices normally appear. The contents\n of the NOTICE file are for informational purposes only and\n do not modify the License. You may add Your own attribution\n notices within Derivative Works that You distribute, alongside\n or as an addendum to the NOTICE text from the Work, provided\n that such additional attribution notices cannot be construed\n as modifying the License.\n\n You may add Your own copyright statement to Your modifications and\n may provide additional or different license terms and conditions\n for use, reproduction, or distribution of Your modifications, or\n for any such Derivative Works as a whole, provided Your use,\n reproduction, and distribution of the Work otherwise complies with\n the conditions stated in this License.\n\n 5. Submission of Contributions. Unless You explicitly state otherwise,\n any Contribution intentionally submitted for inclusion in the Work\n by You to the Licensor shall be under the terms and conditions of\n this License, without any additional terms or conditions.\n Notwithstanding the above, nothing herein shall supersede or modify\n the terms of any separate license agreement you may have executed\n with Licensor regarding such Contributions.\n\n 6. Trademarks. This License does not grant permission to use the trade\n names, trademarks, service marks, or product names of the Licensor,\n except as required for reasonable and customary use in describing the\n origin of the Work and reproducing the content of the NOTICE file.\n\n 7. Disclaimer of Warranty. Unless required by applicable law or\n agreed to in writing, Licensor provides the Work (and each\n Contributor provides its Contributions) on an "AS IS" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n implied, including, without limitation, any warranties or conditions\n of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n PARTICULAR PURPOSE. You are solely responsible for determining the\n appropriateness of using or redistributing the Work and assume any\n risks associated with Your exercise of permissions under this License.\n\n 8. Limitation of Liability. In no event and under no legal theory,\n whether in tort (including negligence), contract, or otherwise,\n unless required by applicable law (such as deliberate and grossly\n negligent acts) or agreed to in writing, shall any Contributor be\n liable to You for damages, including any direct, indirect, special,\n incidental, or consequential damages of any character arising as a\n result of this License or out of the use or inability to use the\n Work (including but not limited to damages for loss of goodwill,\n work stoppage, computer failure or malfunction, or any and all\n other commercial damages or losses), even if such Contributor\n has been advised of the possibility of such damages.\n\n 9. Accepting Warranty or Additional Liability. While redistributing\n the Work or Derivative Works thereof, You may choose to offer,\n and charge a fee for, acceptance of support, warranty, indemnity,\n or other liability obligations and/or rights consistent with this\n License. However, in accepting such obligations, You may act only\n on Your own behalf and on Your sole responsibility, not on behalf\n of any other Contributor, and only if You agree to indemnify,\n defend, and hold each Contributor harmless for any liability\n incurred by, or claims asserted against, such Contributor by reason\n of your accepting any such warranty or additional liability.\n\n END OF TERMS AND CONDITIONS\n\n APPENDIX: How to apply the Apache License to your work.\n\n To apply the Apache License to your work, attach the following\n boilerplate notice, with the fields enclosed by brackets "{}"\n replaced with your own identifying information. (Don't include\n the brackets!) The text should be enclosed in the appropriate\n comment syntax for the file format. We also recommend that a\n file or class name and description of purpose be included on the\n same "printed page" as the copyright notice for easier\n identification within third-party archives.\n\n Copyright {yyyy} {name of copyright owner}\n\n Licensed under the Apache License, Version 2.0 (the "License");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an "AS IS" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n | .venv\Lib\site-packages\asttokens-3.0.0.dist-info\LICENSE | LICENSE | Other | 11,357 | 0.95 | 0.119403 | 0 | react-lib | 305 | 2023-10-18T13:48:01.030709 | GPL-3.0 | false | e3fc50a88d0a364313df4b21ef20c29e |
Metadata-Version: 2.1\nName: asttokens\nVersion: 3.0.0\nSummary: Annotate AST trees with source code positions\nHome-page: https://github.com/gristlabs/asttokens\nAuthor: Dmitry Sagalovskiy, Grist Labs\nAuthor-email: dmitry@getgrist.com\nLicense: Apache 2.0\nKeywords: code,ast,parse,tokenize,refactor\nClassifier: Development Status :: 5 - Production/Stable\nClassifier: Intended Audience :: Developers\nClassifier: Topic :: Software Development :: Libraries :: Python Modules\nClassifier: Topic :: Software Development :: Code Generators\nClassifier: Topic :: Software Development :: Compilers\nClassifier: Topic :: Software Development :: Interpreters\nClassifier: Topic :: Software Development :: Pre-processors\nClassifier: Environment :: Console\nClassifier: Operating System :: OS Independent\nClassifier: Programming Language :: Python :: 3.8\nClassifier: Programming Language :: Python :: 3.9\nClassifier: Programming Language :: Python :: 3.10\nClassifier: Programming Language :: Python :: 3.11\nClassifier: Programming Language :: Python :: 3.12\nClassifier: Programming Language :: Python :: 3.13\nClassifier: Programming Language :: Python :: Implementation :: CPython\nClassifier: Programming Language :: Python :: Implementation :: PyPy\nRequires-Python: >=3.8\nLicense-File: LICENSE\nProvides-Extra: astroid\nRequires-Dist: astroid<4,>=2; extra == "astroid"\nProvides-Extra: test\nRequires-Dist: astroid<4,>=2; extra == "test"\nRequires-Dist: pytest; extra == "test"\nRequires-Dist: pytest-cov; extra == "test"\nRequires-Dist: pytest-xdist; extra == "test"\n\nASTTokens\n=========\n\n.. image:: https://img.shields.io/pypi/v/asttokens.svg\n :target: https://pypi.python.org/pypi/asttokens/\n.. image:: https://img.shields.io/pypi/pyversions/asttokens.svg\n :target: https://pypi.python.org/pypi/asttokens/\n.. image:: https://github.com/gristlabs/asttokens/actions/workflows/build-and-test.yml/badge.svg\n :target: https://github.com/gristlabs/asttokens/actions/workflows/build-and-test.yml\n.. image:: https://readthedocs.org/projects/asttokens/badge/?version=latest\n :target: http://asttokens.readthedocs.io/en/latest/index.html\n.. image:: https://coveralls.io/repos/github/gristlabs/asttokens/badge.svg\n :target: https://coveralls.io/github/gristlabs/asttokens\n\n.. Start of user-guide\n\nThe ``asttokens`` module annotates Python abstract syntax trees (ASTs) with the positions of tokens\nand text in the source code that generated them.\n\nIt makes it possible for tools that work with logical AST nodes to find the particular text that\nresulted in those nodes, for example for automated refactoring or highlighting.\n\nInstallation\n------------\nasttokens is available on PyPI: https://pypi.python.org/pypi/asttokens/::\n\n pip install asttokens\n\nThe code is on GitHub: https://github.com/gristlabs/asttokens.\n\nThe API Reference is here: http://asttokens.readthedocs.io/en/latest/api-index.html.\n\nUsage\n-----\n\nASTTokens can annotate both trees built by `ast <https://docs.python.org/2/library/ast.html>`_,\nAND those built by `astroid <https://github.com/PyCQA/astroid>`_.\n\nHere's an example:\n\n.. code-block:: python\n\n import asttokens, ast\n source = "Robot('blue').walk(steps=10*n)"\n atok = asttokens.ASTTokens(source, parse=True)\n\nOnce the tree has been marked, nodes get ``.first_token``, ``.last_token`` attributes, and\nthe ``ASTTokens`` object offers helpful methods:\n\n.. code-block:: python\n\n attr_node = next(n for n in ast.walk(atok.tree) if isinstance(n, ast.Attribute))\n print(atok.get_text(attr_node))\n start, end = attr_node.last_token.startpos, attr_node.last_token.endpos\n print(atok.text[:start] + 'RUN' + atok.text[end:])\n\nWhich produces this output:\n\n.. code-block:: text\n\n Robot('blue').walk\n Robot('blue').RUN(steps=10*n)\n\nThe ``ASTTokens`` object also offers methods to walk and search the list of tokens that make up\nthe code (or a particular AST node), which is more useful and powerful than dealing with the text\ndirectly.\n\n\nContribute\n----------\n\nTo contribute:\n\n1. Fork this repository, and clone your fork.\n2. Install the package with test dependencies (ideally in a virtualenv) with::\n\n pip install -e '.[test]'\n\n3. Run tests in your current interpreter with the command ``pytest`` or ``python -m pytest``.\n4. Run tests across all supported interpreters with the ``tox`` command. You will need to have the interpreters installed separately. We recommend ``pyenv`` for that. Use ``tox -p auto`` to run the tests in parallel.\n5. By default certain tests which take a very long time to run are skipped, but they are run in CI.\n These are marked using the ``pytest`` marker ``slow`` and can be run on their own with ``pytest -m slow`` or as part of the full suite with ``pytest -m ''``.\n | .venv\Lib\site-packages\asttokens-3.0.0.dist-info\METADATA | METADATA | Other | 4,726 | 0.95 | 0.050847 | 0 | node-utils | 548 | 2025-06-28T22:51:09.504704 | MIT | false | 74f862ac8f7c514d504b971577403d1c |
asttokens-3.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4\nasttokens-3.0.0.dist-info/LICENSE,sha256=tAkwu8-AdEyGxGoSvJ2gVmQdcicWw3j1ZZueVV74M-E,11357\nasttokens-3.0.0.dist-info/METADATA,sha256=cg1yWNJgO6xzqQzaKsQoKJuKZMEfuJAh07iQLAgNv6k,4726\nasttokens-3.0.0.dist-info/RECORD,,\nasttokens-3.0.0.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91\nasttokens-3.0.0.dist-info/top_level.txt,sha256=nJDweSD7_NBhOlR3c8bkKJMKM-pxlAS8Kyh8GcCT2dk,10\nasttokens/__init__.py,sha256=8eONA3X-9s93-v-2gEoz4649fDUpvzBthFB5Ld7dHAg,962\nasttokens/__pycache__/__init__.cpython-313.pyc,,\nasttokens/__pycache__/astroid_compat.cpython-313.pyc,,\nasttokens/__pycache__/asttokens.cpython-313.pyc,,\nasttokens/__pycache__/line_numbers.cpython-313.pyc,,\nasttokens/__pycache__/mark_tokens.cpython-313.pyc,,\nasttokens/__pycache__/util.cpython-313.pyc,,\nasttokens/__pycache__/version.cpython-313.pyc,,\nasttokens/astroid_compat.py,sha256=ilaVBRWcHpQ3ZLBSBs9usUwnLW3Orfn6sM89cMN8zNI,586\nasttokens/asttokens.py,sha256=CQZ0ppXgTzHGbK4dqI4toSLywHIiqNK8jIVqbQClzYI,17760\nasttokens/line_numbers.py,sha256=ODbdlHI4Iht4UnSfsxmOHCIVw4c2XX7j-MdaCa6F8bo,2834\nasttokens/mark_tokens.py,sha256=YKE88IHnYyQiNvlFlxqU-BDhRRWkYYjMEsjxKlF1cqw,21012\nasttokens/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0\nasttokens/util.py,sha256=zkszPUVGR0-UxZJI-I4lTrA7yH2IUOz8IBmwGas-pbs,17286\nasttokens/version.py,sha256=EPmgXOdWKks5S__ZMH7Nu6xpAeVrZpfxaFy4pykuyeI,22\n | .venv\Lib\site-packages\asttokens-3.0.0.dist-info\RECORD | RECORD | Other | 1,485 | 0.7 | 0 | 0 | vue-tools | 832 | 2025-05-31T12:55:15.202379 | MIT | false | 0b092967ab9ceccdfca90587f5d55839 |
asttokens\n | .venv\Lib\site-packages\asttokens-3.0.0.dist-info\top_level.txt | top_level.txt | Other | 10 | 0.5 | 0 | 0 | awesome-app | 416 | 2023-08-01T10:48:08.626937 | GPL-3.0 | false | 7ba6772a2bcaed37607cad4d3771fb76 |
Wheel-Version: 1.0\nGenerator: setuptools (75.6.0)\nRoot-Is-Purelib: true\nTag: py3-none-any\n\n | .venv\Lib\site-packages\asttokens-3.0.0.dist-info\WHEEL | WHEEL | Other | 91 | 0.5 | 0 | 0 | node-utils | 243 | 2025-02-05T17:54:19.846089 | BSD-3-Clause | false | 75ee1e0d275021ca61f0502e783d543c |
\n\n | .venv\Lib\site-packages\async_lru\__pycache__\__init__.cpython-313.pyc | __init__.cpython-313.pyc | Other | 16,466 | 0.95 | 0.013986 | 0.014493 | python-kit | 140 | 2023-10-27T20:50:10.602162 | MIT | false | d4ae7fbbe45f67e7e0276387f7fcf664 |
pip\n | .venv\Lib\site-packages\async_lru-2.0.5.dist-info\INSTALLER | INSTALLER | Other | 4 | 0.5 | 0 | 0 | awesome-app | 311 | 2025-06-20T12:00:21.511530 | GPL-3.0 | false | 365c9bfeb7d89244f2ce01c1de44cb85 |
The MIT License\n\nCopyright (c) 2018 aio-libs team https://github.com/aio-libs/\nCopyright (c) 2017 Ocean S. A. https://ocean.io/\nCopyright (c) 2016-2017 WikiBusiness Corporation http://wikibusiness.org/\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the "Software"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in\nall copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\nTHE SOFTWARE.\n | .venv\Lib\site-packages\async_lru-2.0.5.dist-info\LICENSE | LICENSE | Other | 1,226 | 0.8 | 0 | 0 | vue-tools | 522 | 2024-07-13T16:01:14.332828 | GPL-3.0 | false | 637551ffe345b083f0847cebe17a634d |
Metadata-Version: 2.2\nName: async-lru\nVersion: 2.0.5\nSummary: Simple LRU cache for asyncio\nHome-page: https://github.com/aio-libs/async-lru\nMaintainer: aiohttp team <team@aiohttp.org>\nMaintainer-email: team@aiohttp.org\nLicense: MIT License\nProject-URL: Chat: Matrix, https://matrix.to/#/#aio-libs:matrix.org\nProject-URL: Chat: Matrix Space, https://matrix.to/#/#aio-libs-space:matrix.org\nProject-URL: CI: GitHub Actions, https://github.com/aio-libs/async-lru/actions\nProject-URL: GitHub: repo, https://github.com/aio-libs/async-lru\nKeywords: asyncio,lru,lru_cache\nClassifier: License :: OSI Approved :: MIT License\nClassifier: Intended Audience :: Developers\nClassifier: Programming Language :: Python\nClassifier: Programming Language :: Python :: 3\nClassifier: Programming Language :: Python :: 3 :: Only\nClassifier: Programming Language :: Python :: 3.9\nClassifier: Programming Language :: Python :: 3.10\nClassifier: Programming Language :: Python :: 3.11\nClassifier: Programming Language :: Python :: 3.12\nClassifier: Programming Language :: Python :: 3.13\nClassifier: Development Status :: 5 - Production/Stable\nClassifier: Framework :: AsyncIO\nRequires-Python: >=3.9\nDescription-Content-Type: text/x-rst\nLicense-File: LICENSE\nRequires-Dist: typing_extensions>=4.0.0; python_version < "3.11"\n\nasync-lru\n=========\n\n:info: Simple lru cache for asyncio\n\n.. image:: https://github.com/aio-libs/async-lru/actions/workflows/ci-cd.yml/badge.svg?event=push\n :target: https://github.com/aio-libs/async-lru/actions/workflows/ci-cd.yml?query=event:push\n :alt: GitHub Actions CI/CD workflows status\n\n.. image:: https://img.shields.io/pypi/v/async-lru.svg?logo=Python&logoColor=white\n :target: https://pypi.org/project/async-lru\n :alt: async-lru @ PyPI\n\n.. image:: https://codecov.io/gh/aio-libs/async-lru/branch/master/graph/badge.svg\n :target: https://codecov.io/gh/aio-libs/async-lru\n\n.. image:: https://img.shields.io/matrix/aio-libs:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat\n :target: https://matrix.to/#/%23aio-libs:matrix.org\n :alt: Matrix Room — #aio-libs:matrix.org\n\n.. image:: https://img.shields.io/matrix/aio-libs-space:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs-space%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat\n :target: https://matrix.to/#/%23aio-libs-space:matrix.org\n :alt: Matrix Space — #aio-libs-space:matrix.org\n\nInstallation\n------------\n\n.. code-block:: shell\n\n pip install async-lru\n\nUsage\n-----\n\nThis package is a port of Python's built-in `functools.lru_cache <https://docs.python.org/3/library/functools.html#functools.lru_cache>`_ function for `asyncio <https://docs.python.org/3/library/asyncio.html>`_. To better handle async behaviour, it also ensures multiple concurrent calls will only result in 1 call to the wrapped function, with all ``await``\s receiving the result of that call when it completes.\n\n.. code-block:: python\n\n import asyncio\n\n import aiohttp\n from async_lru import alru_cache\n\n\n @alru_cache(maxsize=32)\n async def get_pep(num):\n resource = 'http://www.python.org/dev/peps/pep-%04d/' % num\n async with aiohttp.ClientSession() as session:\n try:\n async with session.get(resource) as s:\n return await s.read()\n except aiohttp.ClientError:\n return 'Not Found'\n\n\n async def main():\n for n in 8, 290, 308, 320, 8, 218, 320, 279, 289, 320, 9991:\n pep = await get_pep(n)\n print(n, len(pep))\n\n print(get_pep.cache_info())\n # CacheInfo(hits=3, misses=8, maxsize=32, currsize=8)\n\n # closing is optional, but highly recommended\n await get_pep.cache_close()\n\n\n asyncio.run(main())\n\n\nTTL (time-to-live in seconds, expiration on timeout) is supported by accepting `ttl` configuration\nparameter (off by default):\n\n.. code-block:: python\n\n @alru_cache(ttl=5)\n async def func(arg):\n return arg * 2\n\n\nThe library supports explicit invalidation for specific function call by\n`cache_invalidate()`:\n\n.. code-block:: python\n\n @alru_cache(ttl=5)\n async def func(arg1, arg2):\n return arg1 + arg2\n\n func.cache_invalidate(1, arg2=2)\n\nThe method returns `True` if corresponding arguments set was cached already, `False`\notherwise.\n\nThanks\n------\n\nThe library was donated by `Ocean S.A. <https://ocean.io/>`_\n\nThanks to the company for contribution.\n | .venv\Lib\site-packages\async_lru-2.0.5.dist-info\METADATA | METADATA | Other | 4,485 | 0.95 | 0.115385 | 0.021277 | awesome-app | 563 | 2023-11-16T22:53:43.683855 | Apache-2.0 | false | 89a5618fee6ef09e33532dbdbed1d4e9 |
async_lru-2.0.5.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4\nasync_lru-2.0.5.dist-info/LICENSE,sha256=6mWXVnm8IJO-kl1SA1jkEJa9lv3e3uPpKRYKX6rc9XM,1226\nasync_lru-2.0.5.dist-info/METADATA,sha256=8xQLHb4_Zr7AvNfM1kofHWX5JjNubmBnfJeicqq790I,4485\nasync_lru-2.0.5.dist-info/RECORD,,\nasync_lru-2.0.5.dist-info/WHEEL,sha256=52BFRY2Up02UkjOa29eZOS2VxUrpPORXg1pkohGGUS8,91\nasync_lru-2.0.5.dist-info/top_level.txt,sha256=nUy-F2tq_gf0YsQKIGqHmkS_XJxU_dQlINuXZIAHTsk,10\nasync_lru/__init__.py,sha256=ebHg3Yib8ILqq0nNPRGG633b3kVb1ZaJFcZemYiSlEg,9425\nasync_lru/__pycache__/__init__.cpython-313.pyc,,\nasync_lru/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0\n | .venv\Lib\site-packages\async_lru-2.0.5.dist-info\RECORD | RECORD | Other | 693 | 0.7 | 0 | 0 | python-kit | 594 | 2024-03-15T10:02:16.624813 | GPL-3.0 | false | a0b148dbbb60551b58688e644c5d3789 |
async_lru\n | .venv\Lib\site-packages\async_lru-2.0.5.dist-info\top_level.txt | top_level.txt | Other | 10 | 0.5 | 0 | 0 | node-utils | 429 | 2024-07-24T04:56:47.131135 | GPL-3.0 | false | c4b9cb72d56e6b6ab9cd4fac9421f60d |
Wheel-Version: 1.0\nGenerator: setuptools (76.0.0)\nRoot-Is-Purelib: true\nTag: py3-none-any\n\n | .venv\Lib\site-packages\async_lru-2.0.5.dist-info\WHEEL | WHEEL | Other | 91 | 0.5 | 0 | 0 | python-kit | 178 | 2023-09-12T22:26:40.411187 | GPL-3.0 | false | 1b247ebaec32fe9d45aeaf7545cfd008 |
# SPDX-License-Identifier: MIT\n\n"""\nCommonly useful converters.\n"""\n\nimport typing\n\nfrom ._compat import _AnnotationExtractor\nfrom ._make import NOTHING, Converter, Factory, pipe\n\n\n__all__ = [\n "default_if_none",\n "optional",\n "pipe",\n "to_bool",\n]\n\n\ndef optional(converter):\n """\n A converter that allows an attribute to be optional. An optional attribute\n is one which can be set to `None`.\n\n Type annotations will be inferred from the wrapped converter's, if it has\n any.\n\n Args:\n converter (typing.Callable):\n the converter that is used for non-`None` values.\n\n .. versionadded:: 17.1.0\n """\n\n if isinstance(converter, Converter):\n\n def optional_converter(val, inst, field):\n if val is None:\n return None\n return converter(val, inst, field)\n\n else:\n\n def optional_converter(val):\n if val is None:\n return None\n return converter(val)\n\n xtr = _AnnotationExtractor(converter)\n\n t = xtr.get_first_param_type()\n if t:\n optional_converter.__annotations__["val"] = typing.Optional[t]\n\n rt = xtr.get_return_type()\n if rt:\n optional_converter.__annotations__["return"] = typing.Optional[rt]\n\n if isinstance(converter, Converter):\n return Converter(optional_converter, takes_self=True, takes_field=True)\n\n return optional_converter\n\n\ndef default_if_none(default=NOTHING, factory=None):\n """\n A converter that allows to replace `None` values by *default* or the result\n of *factory*.\n\n Args:\n default:\n Value to be used if `None` is passed. Passing an instance of\n `attrs.Factory` is supported, however the ``takes_self`` option is\n *not*.\n\n factory (typing.Callable):\n A callable that takes no parameters whose result is used if `None`\n is passed.\n\n Raises:\n TypeError: If **neither** *default* or *factory* is passed.\n\n TypeError: If **both** *default* and *factory* are passed.\n\n ValueError:\n If an instance of `attrs.Factory` is passed with\n ``takes_self=True``.\n\n .. versionadded:: 18.2.0\n """\n if default is NOTHING and factory is None:\n msg = "Must pass either `default` or `factory`."\n raise TypeError(msg)\n\n if default is not NOTHING and factory is not None:\n msg = "Must pass either `default` or `factory` but not both."\n raise TypeError(msg)\n\n if factory is not None:\n default = Factory(factory)\n\n if isinstance(default, Factory):\n if default.takes_self:\n msg = "`takes_self` is not supported by default_if_none."\n raise ValueError(msg)\n\n def default_if_none_converter(val):\n if val is not None:\n return val\n\n return default.factory()\n\n else:\n\n def default_if_none_converter(val):\n if val is not None:\n return val\n\n return default\n\n return default_if_none_converter\n\n\ndef to_bool(val):\n """\n Convert "boolean" strings (for example, from environment variables) to real\n booleans.\n\n Values mapping to `True`:\n\n - ``True``\n - ``"true"`` / ``"t"``\n - ``"yes"`` / ``"y"``\n - ``"on"``\n - ``"1"``\n - ``1``\n\n Values mapping to `False`:\n\n - ``False``\n - ``"false"`` / ``"f"``\n - ``"no"`` / ``"n"``\n - ``"off"``\n - ``"0"``\n - ``0``\n\n Raises:\n ValueError: For any other value.\n\n .. versionadded:: 21.3.0\n """\n if isinstance(val, str):\n val = val.lower()\n\n if val in (True, "true", "t", "yes", "y", "on", "1", 1):\n return True\n if val in (False, "false", "f", "no", "n", "off", "0", 0):\n return False\n\n msg = f"Cannot convert value to bool: {val!r}"\n raise ValueError(msg)\n | .venv\Lib\site-packages\attr\converters.py | converters.py | Python | 3,861 | 0.95 | 0.17284 | 0.017241 | node-utils | 347 | 2024-03-09T02:28:59.767920 | Apache-2.0 | false | cd26e53682c318df8111f97c47bed6fe |
from typing import Callable, Any, overload\n\nfrom attrs import _ConverterType, _CallableConverterType\n\n@overload\ndef pipe(*validators: _CallableConverterType) -> _CallableConverterType: ...\n@overload\ndef pipe(*validators: _ConverterType) -> _ConverterType: ...\n@overload\ndef optional(converter: _CallableConverterType) -> _CallableConverterType: ...\n@overload\ndef optional(converter: _ConverterType) -> _ConverterType: ...\n@overload\ndef default_if_none(default: Any) -> _CallableConverterType: ...\n@overload\ndef default_if_none(\n *, factory: Callable[[], Any]\n) -> _CallableConverterType: ...\ndef to_bool(val: str | int | bool) -> bool: ...\n | .venv\Lib\site-packages\attr\converters.pyi | converters.pyi | Other | 643 | 0.85 | 0.368421 | 0.058824 | react-lib | 424 | 2025-01-28T12:36:00.571467 | MIT | false | ff5a13df24fda60644b7cee50ae8d498 |
# SPDX-License-Identifier: MIT\n\nfrom __future__ import annotations\n\nfrom typing import ClassVar\n\n\nclass FrozenError(AttributeError):\n """\n A frozen/immutable instance or attribute have been attempted to be\n modified.\n\n It mirrors the behavior of ``namedtuples`` by using the same error message\n and subclassing `AttributeError`.\n\n .. versionadded:: 20.1.0\n """\n\n msg = "can't set attribute"\n args: ClassVar[tuple[str]] = [msg]\n\n\nclass FrozenInstanceError(FrozenError):\n """\n A frozen instance has been attempted to be modified.\n\n .. versionadded:: 16.1.0\n """\n\n\nclass FrozenAttributeError(FrozenError):\n """\n A frozen attribute has been attempted to be modified.\n\n .. versionadded:: 20.1.0\n """\n\n\nclass AttrsAttributeNotFoundError(ValueError):\n """\n An *attrs* function couldn't find an attribute that the user asked for.\n\n .. versionadded:: 16.2.0\n """\n\n\nclass NotAnAttrsClassError(ValueError):\n """\n A non-*attrs* class has been passed into an *attrs* function.\n\n .. versionadded:: 16.2.0\n """\n\n\nclass DefaultAlreadySetError(RuntimeError):\n """\n A default has been set when defining the field and is attempted to be reset\n using the decorator.\n\n .. versionadded:: 17.1.0\n """\n\n\nclass UnannotatedAttributeError(RuntimeError):\n """\n A class with ``auto_attribs=True`` has a field without a type annotation.\n\n .. versionadded:: 17.3.0\n """\n\n\nclass PythonTooOldError(RuntimeError):\n """\n It was attempted to use an *attrs* feature that requires a newer Python\n version.\n\n .. versionadded:: 18.2.0\n """\n\n\nclass NotCallableError(TypeError):\n """\n A field requiring a callable has been set with a value that is not\n callable.\n\n .. versionadded:: 19.2.0\n """\n\n def __init__(self, msg, value):\n super(TypeError, self).__init__(msg, value)\n self.msg = msg\n self.value = value\n\n def __str__(self):\n return str(self.msg)\n | .venv\Lib\site-packages\attr\exceptions.py | exceptions.py | Python | 1,977 | 0.95 | 0.168421 | 0.016129 | vue-tools | 76 | 2024-05-27T06:25:28.365655 | MIT | false | 3d6c30e65770bc4f5f1c4835f81a4c8e |
from typing import Any\n\nclass FrozenError(AttributeError):\n msg: str = ...\n\nclass FrozenInstanceError(FrozenError): ...\nclass FrozenAttributeError(FrozenError): ...\nclass AttrsAttributeNotFoundError(ValueError): ...\nclass NotAnAttrsClassError(ValueError): ...\nclass DefaultAlreadySetError(RuntimeError): ...\nclass UnannotatedAttributeError(RuntimeError): ...\nclass PythonTooOldError(RuntimeError): ...\n\nclass NotCallableError(TypeError):\n msg: str = ...\n value: Any = ...\n def __init__(self, msg: str, value: Any) -> None: ...\n | .venv\Lib\site-packages\attr\exceptions.pyi | exceptions.pyi | Other | 539 | 0.85 | 0.588235 | 0 | awesome-app | 602 | 2023-12-31T22:14:26.353945 | GPL-3.0 | false | 6cbe49e6589682f720aa0413a26d8f20 |
# SPDX-License-Identifier: MIT\n\n"""\nCommonly useful filters for `attrs.asdict` and `attrs.astuple`.\n"""\n\nfrom ._make import Attribute\n\n\ndef _split_what(what):\n """\n Returns a tuple of `frozenset`s of classes and attributes.\n """\n return (\n frozenset(cls for cls in what if isinstance(cls, type)),\n frozenset(cls for cls in what if isinstance(cls, str)),\n frozenset(cls for cls in what if isinstance(cls, Attribute)),\n )\n\n\ndef include(*what):\n """\n Create a filter that only allows *what*.\n\n Args:\n what (list[type, str, attrs.Attribute]):\n What to include. Can be a type, a name, or an attribute.\n\n Returns:\n Callable:\n A callable that can be passed to `attrs.asdict`'s and\n `attrs.astuple`'s *filter* argument.\n\n .. versionchanged:: 23.1.0 Accept strings with field names.\n """\n cls, names, attrs = _split_what(what)\n\n def include_(attribute, value):\n return (\n value.__class__ in cls\n or attribute.name in names\n or attribute in attrs\n )\n\n return include_\n\n\ndef exclude(*what):\n """\n Create a filter that does **not** allow *what*.\n\n Args:\n what (list[type, str, attrs.Attribute]):\n What to exclude. Can be a type, a name, or an attribute.\n\n Returns:\n Callable:\n A callable that can be passed to `attrs.asdict`'s and\n `attrs.astuple`'s *filter* argument.\n\n .. versionchanged:: 23.3.0 Accept field name string as input argument\n """\n cls, names, attrs = _split_what(what)\n\n def exclude_(attribute, value):\n return not (\n value.__class__ in cls\n or attribute.name in names\n or attribute in attrs\n )\n\n return exclude_\n | .venv\Lib\site-packages\attr\filters.py | filters.py | Python | 1,795 | 0.95 | 0.166667 | 0.018519 | python-kit | 879 | 2024-07-25T05:08:49.156579 | Apache-2.0 | false | eb85ac22144f1b1eedbbf5e83f5df7d6 |
from typing import Any\n\nfrom . import Attribute, _FilterType\n\ndef include(*what: type | str | Attribute[Any]) -> _FilterType[Any]: ...\ndef exclude(*what: type | str | Attribute[Any]) -> _FilterType[Any]: ...\n | .venv\Lib\site-packages\attr\filters.pyi | filters.pyi | Other | 208 | 0.85 | 0.333333 | 0 | python-kit | 995 | 2023-12-21T19:00:32.323899 | Apache-2.0 | false | d89860a07215ba78015e70399d70a6ff |
# SPDX-License-Identifier: MIT\n\n"""\nCommonly used hooks for on_setattr.\n"""\n\nfrom . import _config\nfrom .exceptions import FrozenAttributeError\n\n\ndef pipe(*setters):\n """\n Run all *setters* and return the return value of the last one.\n\n .. versionadded:: 20.1.0\n """\n\n def wrapped_pipe(instance, attrib, new_value):\n rv = new_value\n\n for setter in setters:\n rv = setter(instance, attrib, rv)\n\n return rv\n\n return wrapped_pipe\n\n\ndef frozen(_, __, ___):\n """\n Prevent an attribute to be modified.\n\n .. versionadded:: 20.1.0\n """\n raise FrozenAttributeError\n\n\ndef validate(instance, attrib, new_value):\n """\n Run *attrib*'s validator on *new_value* if it has one.\n\n .. versionadded:: 20.1.0\n """\n if _config._run_validators is False:\n return new_value\n\n v = attrib.validator\n if not v:\n return new_value\n\n v(instance, attrib, new_value)\n\n return new_value\n\n\ndef convert(instance, attrib, new_value):\n """\n Run *attrib*'s converter -- if it has one -- on *new_value* and return the\n result.\n\n .. versionadded:: 20.1.0\n """\n c = attrib.converter\n if c:\n # This can be removed once we drop 3.8 and use attrs.Converter instead.\n from ._make import Converter\n\n if not isinstance(c, Converter):\n return c(new_value)\n\n return c(new_value, instance, attrib)\n\n return new_value\n\n\n# Sentinel for disabling class-wide *on_setattr* hooks for certain attributes.\n# Sphinx's autodata stopped working, so the docstring is inlined in the API\n# docs.\nNO_OP = object()\n | .venv\Lib\site-packages\attr\setters.py | setters.py | Python | 1,617 | 0.95 | 0.202532 | 0.09434 | vue-tools | 801 | 2024-06-18T18:49:04.378085 | BSD-3-Clause | false | fc2cf92f3047af94fde462c7ce3de1bd |
from typing import Any, NewType, NoReturn, TypeVar\n\nfrom . import Attribute\nfrom attrs import _OnSetAttrType\n\n_T = TypeVar("_T")\n\ndef frozen(\n instance: Any, attribute: Attribute[Any], new_value: Any\n) -> NoReturn: ...\ndef pipe(*setters: _OnSetAttrType) -> _OnSetAttrType: ...\ndef validate(instance: Any, attribute: Attribute[_T], new_value: _T) -> _T: ...\n\n# convert is allowed to return Any, because they can be chained using pipe.\ndef convert(\n instance: Any, attribute: Attribute[Any], new_value: Any\n) -> Any: ...\n\n_NoOpType = NewType("_NoOpType", object)\nNO_OP: _NoOpType\n | .venv\Lib\site-packages\attr\setters.pyi | setters.pyi | Other | 584 | 0.95 | 0.2 | 0.066667 | awesome-app | 944 | 2024-10-04T08:29:34.566486 | GPL-3.0 | false | b00fe6079c27de8a7c8d98c3183d3415 |
from types import UnionType\nfrom typing import (\n Any,\n AnyStr,\n Callable,\n Container,\n ContextManager,\n Iterable,\n Mapping,\n Match,\n Pattern,\n TypeVar,\n overload,\n)\n\nfrom attrs import _ValidatorType\nfrom attrs import _ValidatorArgType\n\n_T = TypeVar("_T")\n_T1 = TypeVar("_T1")\n_T2 = TypeVar("_T2")\n_T3 = TypeVar("_T3")\n_I = TypeVar("_I", bound=Iterable)\n_K = TypeVar("_K")\n_V = TypeVar("_V")\n_M = TypeVar("_M", bound=Mapping)\n\ndef set_disabled(run: bool) -> None: ...\ndef get_disabled() -> bool: ...\ndef disabled() -> ContextManager[None]: ...\n\n# To be more precise on instance_of use some overloads.\n# If there are more than 3 items in the tuple then we fall back to Any\n@overload\ndef instance_of(type: type[_T]) -> _ValidatorType[_T]: ...\n@overload\ndef instance_of(type: tuple[type[_T]]) -> _ValidatorType[_T]: ...\n@overload\ndef instance_of(\n type: tuple[type[_T1], type[_T2]],\n) -> _ValidatorType[_T1 | _T2]: ...\n@overload\ndef instance_of(\n type: tuple[type[_T1], type[_T2], type[_T3]],\n) -> _ValidatorType[_T1 | _T2 | _T3]: ...\n@overload\ndef instance_of(type: tuple[type, ...]) -> _ValidatorType[Any]: ...\n@overload\ndef instance_of(type: UnionType) -> _ValidatorType[Any]: ...\ndef optional(\n validator: (\n _ValidatorType[_T]\n | list[_ValidatorType[_T]]\n | tuple[_ValidatorType[_T]]\n ),\n) -> _ValidatorType[_T | None]: ...\ndef in_(options: Container[_T]) -> _ValidatorType[_T]: ...\ndef and_(*validators: _ValidatorType[_T]) -> _ValidatorType[_T]: ...\ndef matches_re(\n regex: Pattern[AnyStr] | AnyStr,\n flags: int = ...,\n func: Callable[[AnyStr, AnyStr, int], Match[AnyStr] | None] | None = ...,\n) -> _ValidatorType[AnyStr]: ...\ndef deep_iterable(\n member_validator: _ValidatorArgType[_T],\n iterable_validator: _ValidatorType[_I] | None = ...,\n) -> _ValidatorType[_I]: ...\ndef deep_mapping(\n key_validator: _ValidatorType[_K],\n value_validator: _ValidatorType[_V],\n mapping_validator: _ValidatorType[_M] | None = ...,\n) -> _ValidatorType[_M]: ...\ndef is_callable() -> _ValidatorType[_T]: ...\ndef lt(val: _T) -> _ValidatorType[_T]: ...\ndef le(val: _T) -> _ValidatorType[_T]: ...\ndef ge(val: _T) -> _ValidatorType[_T]: ...\ndef gt(val: _T) -> _ValidatorType[_T]: ...\ndef max_len(length: int) -> _ValidatorType[_T]: ...\ndef min_len(length: int) -> _ValidatorType[_T]: ...\ndef not_(\n validator: _ValidatorType[_T],\n *,\n msg: str | None = None,\n exc_types: type[Exception] | Iterable[type[Exception]] = ...,\n) -> _ValidatorType[_T]: ...\ndef or_(*validators: _ValidatorType[_T]) -> _ValidatorType[_T]: ...\n | .venv\Lib\site-packages\attr\validators.pyi | validators.pyi | Other | 2,603 | 0.95 | 0.27907 | 0.036585 | react-lib | 207 | 2024-01-31T02:43:26.131700 | Apache-2.0 | false | e7a7e48336716e91317ee06dff6446c9 |
# SPDX-License-Identifier: MIT\n\n\nimport functools\nimport types\n\nfrom ._make import __ne__\n\n\n_operation_names = {"eq": "==", "lt": "<", "le": "<=", "gt": ">", "ge": ">="}\n\n\ndef cmp_using(\n eq=None,\n lt=None,\n le=None,\n gt=None,\n ge=None,\n require_same_type=True,\n class_name="Comparable",\n):\n """\n Create a class that can be passed into `attrs.field`'s ``eq``, ``order``,\n and ``cmp`` arguments to customize field comparison.\n\n The resulting class will have a full set of ordering methods if at least\n one of ``{lt, le, gt, ge}`` and ``eq`` are provided.\n\n Args:\n eq (typing.Callable | None):\n Callable used to evaluate equality of two objects.\n\n lt (typing.Callable | None):\n Callable used to evaluate whether one object is less than another\n object.\n\n le (typing.Callable | None):\n Callable used to evaluate whether one object is less than or equal\n to another object.\n\n gt (typing.Callable | None):\n Callable used to evaluate whether one object is greater than\n another object.\n\n ge (typing.Callable | None):\n Callable used to evaluate whether one object is greater than or\n equal to another object.\n\n require_same_type (bool):\n When `True`, equality and ordering methods will return\n `NotImplemented` if objects are not of the same type.\n\n class_name (str | None): Name of class. Defaults to "Comparable".\n\n See `comparison` for more details.\n\n .. versionadded:: 21.1.0\n """\n\n body = {\n "__slots__": ["value"],\n "__init__": _make_init(),\n "_requirements": [],\n "_is_comparable_to": _is_comparable_to,\n }\n\n # Add operations.\n num_order_functions = 0\n has_eq_function = False\n\n if eq is not None:\n has_eq_function = True\n body["__eq__"] = _make_operator("eq", eq)\n body["__ne__"] = __ne__\n\n if lt is not None:\n num_order_functions += 1\n body["__lt__"] = _make_operator("lt", lt)\n\n if le is not None:\n num_order_functions += 1\n body["__le__"] = _make_operator("le", le)\n\n if gt is not None:\n num_order_functions += 1\n body["__gt__"] = _make_operator("gt", gt)\n\n if ge is not None:\n num_order_functions += 1\n body["__ge__"] = _make_operator("ge", ge)\n\n type_ = types.new_class(\n class_name, (object,), {}, lambda ns: ns.update(body)\n )\n\n # Add same type requirement.\n if require_same_type:\n type_._requirements.append(_check_same_type)\n\n # Add total ordering if at least one operation was defined.\n if 0 < num_order_functions < 4:\n if not has_eq_function:\n # functools.total_ordering requires __eq__ to be defined,\n # so raise early error here to keep a nice stack.\n msg = "eq must be define is order to complete ordering from lt, le, gt, ge."\n raise ValueError(msg)\n type_ = functools.total_ordering(type_)\n\n return type_\n\n\ndef _make_init():\n """\n Create __init__ method.\n """\n\n def __init__(self, value):\n """\n Initialize object with *value*.\n """\n self.value = value\n\n return __init__\n\n\ndef _make_operator(name, func):\n """\n Create operator method.\n """\n\n def method(self, other):\n if not self._is_comparable_to(other):\n return NotImplemented\n\n result = func(self.value, other.value)\n if result is NotImplemented:\n return NotImplemented\n\n return result\n\n method.__name__ = f"__{name}__"\n method.__doc__ = (\n f"Return a {_operation_names[name]} b. Computed by attrs."\n )\n\n return method\n\n\ndef _is_comparable_to(self, other):\n """\n Check whether `other` is comparable to `self`.\n """\n return all(func(self, other) for func in self._requirements)\n\n\ndef _check_same_type(self, other):\n """\n Return True if *self* and *other* are of the same type, False otherwise.\n """\n return other.value.__class__ is self.value.__class__\n | .venv\Lib\site-packages\attr\_cmp.py | _cmp.py | Python | 4,117 | 0.95 | 0.1625 | 0.051282 | awesome-app | 908 | 2024-01-26T13:33:21.700821 | BSD-3-Clause | false | d4e1bc61ef1278a6b7848ec382db27d2 |
from typing import Any, Callable\n\n_CompareWithType = Callable[[Any, Any], bool]\n\ndef cmp_using(\n eq: _CompareWithType | None = ...,\n lt: _CompareWithType | None = ...,\n le: _CompareWithType | None = ...,\n gt: _CompareWithType | None = ...,\n ge: _CompareWithType | None = ...,\n require_same_type: bool = ...,\n class_name: str = ...,\n) -> type: ...\n | .venv\Lib\site-packages\attr\_cmp.pyi | _cmp.pyi | Other | 368 | 0.85 | 0.076923 | 0 | vue-tools | 230 | 2023-08-17T07:44:50.300298 | MIT | false | 46c5ff288089741e90ec6f6e6637f9d1 |
# SPDX-License-Identifier: MIT\n\nimport inspect\nimport platform\nimport sys\nimport threading\n\nfrom collections.abc import Mapping, Sequence # noqa: F401\nfrom typing import _GenericAlias\n\n\nPYPY = platform.python_implementation() == "PyPy"\nPY_3_9_PLUS = sys.version_info[:2] >= (3, 9)\nPY_3_10_PLUS = sys.version_info[:2] >= (3, 10)\nPY_3_11_PLUS = sys.version_info[:2] >= (3, 11)\nPY_3_12_PLUS = sys.version_info[:2] >= (3, 12)\nPY_3_13_PLUS = sys.version_info[:2] >= (3, 13)\nPY_3_14_PLUS = sys.version_info[:2] >= (3, 14)\n\n\nif PY_3_14_PLUS: # pragma: no cover\n import annotationlib\n\n _get_annotations = annotationlib.get_annotations\n\nelse:\n\n def _get_annotations(cls):\n """\n Get annotations for *cls*.\n """\n return cls.__dict__.get("__annotations__", {})\n\n\nclass _AnnotationExtractor:\n """\n Extract type annotations from a callable, returning None whenever there\n is none.\n """\n\n __slots__ = ["sig"]\n\n def __init__(self, callable):\n try:\n self.sig = inspect.signature(callable)\n except (ValueError, TypeError): # inspect failed\n self.sig = None\n\n def get_first_param_type(self):\n """\n Return the type annotation of the first argument if it's not empty.\n """\n if not self.sig:\n return None\n\n params = list(self.sig.parameters.values())\n if params and params[0].annotation is not inspect.Parameter.empty:\n return params[0].annotation\n\n return None\n\n def get_return_type(self):\n """\n Return the return type if it's not empty.\n """\n if (\n self.sig\n and self.sig.return_annotation is not inspect.Signature.empty\n ):\n return self.sig.return_annotation\n\n return None\n\n\n# Thread-local global to track attrs instances which are already being repr'd.\n# This is needed because there is no other (thread-safe) way to pass info\n# about the instances that are already being repr'd through the call stack\n# in order to ensure we don't perform infinite recursion.\n#\n# For instance, if an instance contains a dict which contains that instance,\n# we need to know that we're already repr'ing the outside instance from within\n# the dict's repr() call.\n#\n# This lives here rather than in _make.py so that the functions in _make.py\n# don't have a direct reference to the thread-local in their globals dict.\n# If they have such a reference, it breaks cloudpickle.\nrepr_context = threading.local()\n\n\ndef get_generic_base(cl):\n """If this is a generic class (A[str]), return the generic base for it."""\n if cl.__class__ is _GenericAlias:\n return cl.__origin__\n return None\n | .venv\Lib\site-packages\attr\_compat.py | _compat.py | Python | 2,704 | 0.95 | 0.191489 | 0.180556 | python-kit | 575 | 2023-10-14T01:23:08.312370 | BSD-3-Clause | false | de5e2815c6bfac93d1a5ffcc6eb60ca8 |
# SPDX-License-Identifier: MIT\n\n__all__ = ["get_run_validators", "set_run_validators"]\n\n_run_validators = True\n\n\ndef set_run_validators(run):\n """\n Set whether or not validators are run. By default, they are run.\n\n .. deprecated:: 21.3.0 It will not be removed, but it also will not be\n moved to new ``attrs`` namespace. Use `attrs.validators.set_disabled()`\n instead.\n """\n if not isinstance(run, bool):\n msg = "'run' must be bool."\n raise TypeError(msg)\n global _run_validators\n _run_validators = run\n\n\ndef get_run_validators():\n """\n Return whether or not validators are run.\n\n .. deprecated:: 21.3.0 It will not be removed, but it also will not be\n moved to new ``attrs`` namespace. Use `attrs.validators.get_disabled()`\n instead.\n """\n return _run_validators\n | .venv\Lib\site-packages\attr\_config.py | _config.py | Python | 843 | 0.95 | 0.096774 | 0.043478 | vue-tools | 74 | 2024-03-13T01:43:36.159578 | BSD-3-Clause | false | 3fee3666004f884067200cb52519f03a |
from typing import Any, ClassVar, Protocol\n\n# MYPY is a special constant in mypy which works the same way as `TYPE_CHECKING`.\nMYPY = False\n\nif MYPY:\n # A protocol to be able to statically accept an attrs class.\n class AttrsInstance_(Protocol):\n __attrs_attrs__: ClassVar[Any]\n\nelse:\n # For type checkers without plug-in support use an empty protocol that\n # will (hopefully) be combined into a union.\n class AttrsInstance_(Protocol):\n pass\n | .venv\Lib\site-packages\attr\_typing_compat.pyi | _typing_compat.pyi | Other | 469 | 0.95 | 0.266667 | 0.333333 | python-kit | 180 | 2025-01-07T02:25:48.646366 | Apache-2.0 | false | 1a8f6270a94c9380a44abd5300bf8c1a |
# SPDX-License-Identifier: MIT\n\n\nfrom functools import total_ordering\n\nfrom ._funcs import astuple\nfrom ._make import attrib, attrs\n\n\n@total_ordering\n@attrs(eq=False, order=False, slots=True, frozen=True)\nclass VersionInfo:\n """\n A version object that can be compared to tuple of length 1--4:\n\n >>> attr.VersionInfo(19, 1, 0, "final") <= (19, 2)\n True\n >>> attr.VersionInfo(19, 1, 0, "final") < (19, 1, 1)\n True\n >>> vi = attr.VersionInfo(19, 2, 0, "final")\n >>> vi < (19, 1, 1)\n False\n >>> vi < (19,)\n False\n >>> vi == (19, 2,)\n True\n >>> vi == (19, 2, 1)\n False\n\n .. versionadded:: 19.2\n """\n\n year = attrib(type=int)\n minor = attrib(type=int)\n micro = attrib(type=int)\n releaselevel = attrib(type=str)\n\n @classmethod\n def _from_version_string(cls, s):\n """\n Parse *s* and return a _VersionInfo.\n """\n v = s.split(".")\n if len(v) == 3:\n v.append("final")\n\n return cls(\n year=int(v[0]), minor=int(v[1]), micro=int(v[2]), releaselevel=v[3]\n )\n\n def _ensure_tuple(self, other):\n """\n Ensure *other* is a tuple of a valid length.\n\n Returns a possibly transformed *other* and ourselves as a tuple of\n the same length as *other*.\n """\n\n if self.__class__ is other.__class__:\n other = astuple(other)\n\n if not isinstance(other, tuple):\n raise NotImplementedError\n\n if not (1 <= len(other) <= 4):\n raise NotImplementedError\n\n return astuple(self)[: len(other)], other\n\n def __eq__(self, other):\n try:\n us, them = self._ensure_tuple(other)\n except NotImplementedError:\n return NotImplemented\n\n return us == them\n\n def __lt__(self, other):\n try:\n us, them = self._ensure_tuple(other)\n except NotImplementedError:\n return NotImplemented\n\n # Since alphabetically "dev0" < "final" < "post1" < "post2", we don't\n # have to do anything special with releaselevel for now.\n return us < them\n | .venv\Lib\site-packages\attr\_version_info.py | _version_info.py | Python | 2,121 | 0.95 | 0.139535 | 0.045455 | python-kit | 665 | 2024-02-12T09:27:49.777787 | Apache-2.0 | false | 1b37e176fda08f4babc6c10fa09b7250 |
class VersionInfo:\n @property\n def year(self) -> int: ...\n @property\n def minor(self) -> int: ...\n @property\n def micro(self) -> int: ...\n @property\n def releaselevel(self) -> str: ...\n | .venv\Lib\site-packages\attr\_version_info.pyi | _version_info.pyi | Other | 209 | 0.85 | 0.555556 | 0 | node-utils | 942 | 2024-04-24T03:21:09.609718 | BSD-3-Clause | false | ea38221124a8a90c1fa60b3e4940cdf4 |
# SPDX-License-Identifier: MIT\n\n"""\nClasses Without Boilerplate\n"""\n\nfrom functools import partial\nfrom typing import Callable, Literal, Protocol\n\nfrom . import converters, exceptions, filters, setters, validators\nfrom ._cmp import cmp_using\nfrom ._config import get_run_validators, set_run_validators\nfrom ._funcs import asdict, assoc, astuple, has, resolve_types\nfrom ._make import (\n NOTHING,\n Attribute,\n Converter,\n Factory,\n _Nothing,\n attrib,\n attrs,\n evolve,\n fields,\n fields_dict,\n make_class,\n validate,\n)\nfrom ._next_gen import define, field, frozen, mutable\nfrom ._version_info import VersionInfo\n\n\ns = attributes = attrs\nib = attr = attrib\ndataclass = partial(attrs, auto_attribs=True) # happy Easter ;)\n\n\nclass AttrsInstance(Protocol):\n pass\n\n\nNothingType = Literal[_Nothing.NOTHING]\n\n__all__ = [\n "NOTHING",\n "Attribute",\n "AttrsInstance",\n "Converter",\n "Factory",\n "NothingType",\n "asdict",\n "assoc",\n "astuple",\n "attr",\n "attrib",\n "attributes",\n "attrs",\n "cmp_using",\n "converters",\n "define",\n "evolve",\n "exceptions",\n "field",\n "fields",\n "fields_dict",\n "filters",\n "frozen",\n "get_run_validators",\n "has",\n "ib",\n "make_class",\n "mutable",\n "resolve_types",\n "s",\n "set_run_validators",\n "setters",\n "validate",\n "validators",\n]\n\n\ndef _make_getattr(mod_name: str) -> Callable:\n """\n Create a metadata proxy for packaging information that uses *mod_name* in\n its warnings and errors.\n """\n\n def __getattr__(name: str) -> str:\n if name not in ("__version__", "__version_info__"):\n msg = f"module {mod_name} has no attribute {name}"\n raise AttributeError(msg)\n\n from importlib.metadata import metadata\n\n meta = metadata("attrs")\n\n if name == "__version_info__":\n return VersionInfo._from_version_string(meta["version"])\n\n return meta["version"]\n\n return __getattr__\n\n\n__getattr__ = _make_getattr(__name__)\n | .venv\Lib\site-packages\attr\__init__.py | __init__.py | Python | 2,057 | 0.95 | 0.057692 | 0.011905 | node-utils | 244 | 2025-04-26T01:14:18.896106 | Apache-2.0 | false | 6292fe7ca0a97e6511e2f7308b3795ac |
import enum\nimport sys\n\nfrom typing import (\n Any,\n Callable,\n Generic,\n Literal,\n Mapping,\n Protocol,\n Sequence,\n TypeVar,\n overload,\n)\n\n# `import X as X` is required to make these public\nfrom . import converters as converters\nfrom . import exceptions as exceptions\nfrom . import filters as filters\nfrom . import setters as setters\nfrom . import validators as validators\nfrom ._cmp import cmp_using as cmp_using\nfrom ._typing_compat import AttrsInstance_\nfrom ._version_info import VersionInfo\nfrom attrs import (\n define as define,\n field as field,\n mutable as mutable,\n frozen as frozen,\n _EqOrderType,\n _ValidatorType,\n _ConverterType,\n _ReprArgType,\n _OnSetAttrType,\n _OnSetAttrArgType,\n _FieldTransformer,\n _ValidatorArgType,\n)\n\nif sys.version_info >= (3, 10):\n from typing import TypeGuard, TypeAlias\nelse:\n from typing_extensions import TypeGuard, TypeAlias\n\nif sys.version_info >= (3, 11):\n from typing import dataclass_transform\nelse:\n from typing_extensions import dataclass_transform\n\n__version__: str\n__version_info__: VersionInfo\n__title__: str\n__description__: str\n__url__: str\n__uri__: str\n__author__: str\n__email__: str\n__license__: str\n__copyright__: str\n\n_T = TypeVar("_T")\n_C = TypeVar("_C", bound=type)\n\n_FilterType = Callable[["Attribute[_T]", _T], bool]\n\n# We subclass this here to keep the protocol's qualified name clean.\nclass AttrsInstance(AttrsInstance_, Protocol):\n pass\n\n_A = TypeVar("_A", bound=type[AttrsInstance])\n\nclass _Nothing(enum.Enum):\n NOTHING = enum.auto()\n\nNOTHING = _Nothing.NOTHING\nNothingType: TypeAlias = Literal[_Nothing.NOTHING]\n\n# NOTE: Factory lies about its return type to make this possible:\n# `x: List[int] # = Factory(list)`\n# Work around mypy issue #4554 in the common case by using an overload.\n\n@overload\ndef Factory(factory: Callable[[], _T]) -> _T: ...\n@overload\ndef Factory(\n factory: Callable[[Any], _T],\n takes_self: Literal[True],\n) -> _T: ...\n@overload\ndef Factory(\n factory: Callable[[], _T],\n takes_self: Literal[False],\n) -> _T: ...\n\nIn = TypeVar("In")\nOut = TypeVar("Out")\n\nclass Converter(Generic[In, Out]):\n @overload\n def __init__(self, converter: Callable[[In], Out]) -> None: ...\n @overload\n def __init__(\n self,\n converter: Callable[[In, AttrsInstance, Attribute], Out],\n *,\n takes_self: Literal[True],\n takes_field: Literal[True],\n ) -> None: ...\n @overload\n def __init__(\n self,\n converter: Callable[[In, Attribute], Out],\n *,\n takes_field: Literal[True],\n ) -> None: ...\n @overload\n def __init__(\n self,\n converter: Callable[[In, AttrsInstance], Out],\n *,\n takes_self: Literal[True],\n ) -> None: ...\n\nclass Attribute(Generic[_T]):\n name: str\n default: _T | None\n validator: _ValidatorType[_T] | None\n repr: _ReprArgType\n cmp: _EqOrderType\n eq: _EqOrderType\n order: _EqOrderType\n hash: bool | None\n init: bool\n converter: Converter | None\n metadata: dict[Any, Any]\n type: type[_T] | None\n kw_only: bool\n on_setattr: _OnSetAttrType\n alias: str | None\n\n def evolve(self, **changes: Any) -> "Attribute[Any]": ...\n\n# NOTE: We had several choices for the annotation to use for type arg:\n# 1) Type[_T]\n# - Pros: Handles simple cases correctly\n# - Cons: Might produce less informative errors in the case of conflicting\n# TypeVars e.g. `attr.ib(default='bad', type=int)`\n# 2) Callable[..., _T]\n# - Pros: Better error messages than #1 for conflicting TypeVars\n# - Cons: Terrible error messages for validator checks.\n# e.g. attr.ib(type=int, validator=validate_str)\n# -> error: Cannot infer function type argument\n# 3) type (and do all of the work in the mypy plugin)\n# - Pros: Simple here, and we could customize the plugin with our own errors.\n# - Cons: Would need to write mypy plugin code to handle all the cases.\n# We chose option #1.\n\n# `attr` lies about its return type to make the following possible:\n# attr() -> Any\n# attr(8) -> int\n# attr(validator=<some callable>) -> Whatever the callable expects.\n# This makes this type of assignments possible:\n# x: int = attr(8)\n#\n# This form catches explicit None or no default but with no other arguments\n# returns Any.\n@overload\ndef attrib(\n default: None = ...,\n validator: None = ...,\n repr: _ReprArgType = ...,\n cmp: _EqOrderType | None = ...,\n hash: bool | None = ...,\n init: bool = ...,\n metadata: Mapping[Any, Any] | None = ...,\n type: None = ...,\n converter: None = ...,\n factory: None = ...,\n kw_only: bool = ...,\n eq: _EqOrderType | None = ...,\n order: _EqOrderType | None = ...,\n on_setattr: _OnSetAttrArgType | None = ...,\n alias: str | None = ...,\n) -> Any: ...\n\n# This form catches an explicit None or no default and infers the type from the\n# other arguments.\n@overload\ndef attrib(\n default: None = ...,\n validator: _ValidatorArgType[_T] | None = ...,\n repr: _ReprArgType = ...,\n cmp: _EqOrderType | None = ...,\n hash: bool | None = ...,\n init: bool = ...,\n metadata: Mapping[Any, Any] | None = ...,\n type: type[_T] | None = ...,\n converter: _ConverterType\n | list[_ConverterType]\n | tuple[_ConverterType]\n | None = ...,\n factory: Callable[[], _T] | None = ...,\n kw_only: bool = ...,\n eq: _EqOrderType | None = ...,\n order: _EqOrderType | None = ...,\n on_setattr: _OnSetAttrArgType | None = ...,\n alias: str | None = ...,\n) -> _T: ...\n\n# This form catches an explicit default argument.\n@overload\ndef attrib(\n default: _T,\n validator: _ValidatorArgType[_T] | None = ...,\n repr: _ReprArgType = ...,\n cmp: _EqOrderType | None = ...,\n hash: bool | None = ...,\n init: bool = ...,\n metadata: Mapping[Any, Any] | None = ...,\n type: type[_T] | None = ...,\n converter: _ConverterType\n | list[_ConverterType]\n | tuple[_ConverterType]\n | None = ...,\n factory: Callable[[], _T] | None = ...,\n kw_only: bool = ...,\n eq: _EqOrderType | None = ...,\n order: _EqOrderType | None = ...,\n on_setattr: _OnSetAttrArgType | None = ...,\n alias: str | None = ...,\n) -> _T: ...\n\n# This form covers type=non-Type: e.g. forward references (str), Any\n@overload\ndef attrib(\n default: _T | None = ...,\n validator: _ValidatorArgType[_T] | None = ...,\n repr: _ReprArgType = ...,\n cmp: _EqOrderType | None = ...,\n hash: bool | None = ...,\n init: bool = ...,\n metadata: Mapping[Any, Any] | None = ...,\n type: object = ...,\n converter: _ConverterType\n | list[_ConverterType]\n | tuple[_ConverterType]\n | None = ...,\n factory: Callable[[], _T] | None = ...,\n kw_only: bool = ...,\n eq: _EqOrderType | None = ...,\n order: _EqOrderType | None = ...,\n on_setattr: _OnSetAttrArgType | None = ...,\n alias: str | None = ...,\n) -> Any: ...\n@overload\n@dataclass_transform(order_default=True, field_specifiers=(attrib, field))\ndef attrs(\n maybe_cls: _C,\n these: dict[str, Any] | None = ...,\n repr_ns: str | None = ...,\n repr: bool = ...,\n cmp: _EqOrderType | None = ...,\n hash: bool | None = ...,\n init: bool = ...,\n slots: bool = ...,\n frozen: bool = ...,\n weakref_slot: bool = ...,\n str: bool = ...,\n auto_attribs: bool = ...,\n kw_only: bool = ...,\n cache_hash: bool = ...,\n auto_exc: bool = ...,\n eq: _EqOrderType | None = ...,\n order: _EqOrderType | None = ...,\n auto_detect: bool = ...,\n collect_by_mro: bool = ...,\n getstate_setstate: bool | None = ...,\n on_setattr: _OnSetAttrArgType | None = ...,\n field_transformer: _FieldTransformer | None = ...,\n match_args: bool = ...,\n unsafe_hash: bool | None = ...,\n) -> _C: ...\n@overload\n@dataclass_transform(order_default=True, field_specifiers=(attrib, field))\ndef attrs(\n maybe_cls: None = ...,\n these: dict[str, Any] | None = ...,\n repr_ns: str | None = ...,\n repr: bool = ...,\n cmp: _EqOrderType | None = ...,\n hash: bool | None = ...,\n init: bool = ...,\n slots: bool = ...,\n frozen: bool = ...,\n weakref_slot: bool = ...,\n str: bool = ...,\n auto_attribs: bool = ...,\n kw_only: bool = ...,\n cache_hash: bool = ...,\n auto_exc: bool = ...,\n eq: _EqOrderType | None = ...,\n order: _EqOrderType | None = ...,\n auto_detect: bool = ...,\n collect_by_mro: bool = ...,\n getstate_setstate: bool | None = ...,\n on_setattr: _OnSetAttrArgType | None = ...,\n field_transformer: _FieldTransformer | None = ...,\n match_args: bool = ...,\n unsafe_hash: bool | None = ...,\n) -> Callable[[_C], _C]: ...\ndef fields(cls: type[AttrsInstance]) -> Any: ...\ndef fields_dict(cls: type[AttrsInstance]) -> dict[str, Attribute[Any]]: ...\ndef validate(inst: AttrsInstance) -> None: ...\ndef resolve_types(\n cls: _A,\n globalns: dict[str, Any] | None = ...,\n localns: dict[str, Any] | None = ...,\n attribs: list[Attribute[Any]] | None = ...,\n include_extras: bool = ...,\n) -> _A: ...\n\n# TODO: add support for returning a proper attrs class from the mypy plugin\n# we use Any instead of _CountingAttr so that e.g. `make_class('Foo',\n# [attr.ib()])` is valid\ndef make_class(\n name: str,\n attrs: list[str] | tuple[str, ...] | dict[str, Any],\n bases: tuple[type, ...] = ...,\n class_body: dict[str, Any] | None = ...,\n repr_ns: str | None = ...,\n repr: bool = ...,\n cmp: _EqOrderType | None = ...,\n hash: bool | None = ...,\n init: bool = ...,\n slots: bool = ...,\n frozen: bool = ...,\n weakref_slot: bool = ...,\n str: bool = ...,\n auto_attribs: bool = ...,\n kw_only: bool = ...,\n cache_hash: bool = ...,\n auto_exc: bool = ...,\n eq: _EqOrderType | None = ...,\n order: _EqOrderType | None = ...,\n collect_by_mro: bool = ...,\n on_setattr: _OnSetAttrArgType | None = ...,\n field_transformer: _FieldTransformer | None = ...,\n) -> type: ...\n\n# _funcs --\n\n# TODO: add support for returning TypedDict from the mypy plugin\n# FIXME: asdict/astuple do not honor their factory args. Waiting on one of\n# these:\n# https://github.com/python/mypy/issues/4236\n# https://github.com/python/typing/issues/253\n# XXX: remember to fix attrs.asdict/astuple too!\ndef asdict(\n inst: AttrsInstance,\n recurse: bool = ...,\n filter: _FilterType[Any] | None = ...,\n dict_factory: type[Mapping[Any, Any]] = ...,\n retain_collection_types: bool = ...,\n value_serializer: Callable[[type, Attribute[Any], Any], Any] | None = ...,\n tuple_keys: bool | None = ...,\n) -> dict[str, Any]: ...\n\n# TODO: add support for returning NamedTuple from the mypy plugin\ndef astuple(\n inst: AttrsInstance,\n recurse: bool = ...,\n filter: _FilterType[Any] | None = ...,\n tuple_factory: type[Sequence[Any]] = ...,\n retain_collection_types: bool = ...,\n) -> tuple[Any, ...]: ...\ndef has(cls: type) -> TypeGuard[type[AttrsInstance]]: ...\ndef assoc(inst: _T, **changes: Any) -> _T: ...\ndef evolve(inst: _T, **changes: Any) -> _T: ...\n\n# _config --\n\ndef set_run_validators(run: bool) -> None: ...\ndef get_run_validators() -> bool: ...\n\n# aliases --\n\ns = attributes = attrs\nib = attr = attrib\ndataclass = attrs # Technically, partial(attrs, auto_attribs=True) ;)\n | .venv\Lib\site-packages\attr\__init__.pyi | __init__.pyi | Other | 11,281 | 0.95 | 0.105398 | 0.133705 | react-lib | 145 | 2023-11-18T12:35:08.780605 | MIT | false | 7b6ba085123d9b8577a66256e3ecc4b1 |
\n\n | .venv\Lib\site-packages\attr\__pycache__\converters.cpython-313.pyc | converters.cpython-313.pyc | Other | 4,472 | 0.8 | 0.055556 | 0.013514 | python-kit | 86 | 2024-07-05T16:40:33.150903 | Apache-2.0 | false | 6c691ca9a38af4a68591d36406ed4266 |
\n\n | .venv\Lib\site-packages\attr\__pycache__\exceptions.cpython-313.pyc | exceptions.cpython-313.pyc | Other | 3,675 | 0.95 | 0.081967 | 0 | python-kit | 909 | 2024-08-11T13:47:41.139418 | Apache-2.0 | false | d55e51aa33b80b6c5a03878af1099b70 |
\n\n | .venv\Lib\site-packages\attr\__pycache__\filters.cpython-313.pyc | filters.cpython-313.pyc | Other | 2,979 | 0.8 | 0.022222 | 0 | python-kit | 908 | 2024-09-25T10:03:35.338631 | BSD-3-Clause | false | 0c25d4827a68027ee5d184f1acf7027f |
\n\n | .venv\Lib\site-packages\attr\__pycache__\setters.cpython-313.pyc | setters.cpython-313.pyc | Other | 1,885 | 0.7 | 0.107143 | 0 | awesome-app | 473 | 2024-12-19T17:37:34.809597 | GPL-3.0 | false | f366b0a0f5c0f7fe20f51a3abe445775 |
\n\n | .venv\Lib\site-packages\attr\__pycache__\validators.cpython-313.pyc | validators.cpython-313.pyc | Other | 25,865 | 0.95 | 0.107895 | 0.003185 | react-lib | 497 | 2024-06-16T04:59:24.487770 | Apache-2.0 | false | 3c1a20e163e57cfef31cf02c221c6e98 |
\n\n | .venv\Lib\site-packages\attr\__pycache__\_cmp.cpython-313.pyc | _cmp.cpython-313.pyc | Other | 4,992 | 0.95 | 0.084337 | 0 | node-utils | 41 | 2025-06-19T07:10:57.451846 | MIT | false | c28643b07a313cbc5b2edda867f9356b |
\n\n | .venv\Lib\site-packages\attr\__pycache__\_compat.cpython-313.pyc | _compat.cpython-313.pyc | Other | 3,552 | 0.95 | 0.151515 | 0 | react-lib | 562 | 2024-10-17T00:16:25.515701 | Apache-2.0 | false | 3dde2784632d2fa8d4dde28d62e5762e |
\n\n | .venv\Lib\site-packages\attr\__pycache__\_config.cpython-313.pyc | _config.cpython-313.pyc | Other | 1,085 | 0.8 | 0 | 0 | react-lib | 655 | 2024-11-09T07:27:07.337376 | Apache-2.0 | false | 04d2e582760248d375b161d1306ee48d |
\n\n | .venv\Lib\site-packages\attr\__pycache__\_funcs.cpython-313.pyc | _funcs.cpython-313.pyc | Other | 13,333 | 0.95 | 0.112319 | 0.013158 | vue-tools | 825 | 2023-11-21T06:16:46.562358 | Apache-2.0 | false | 8b9ce25d1b0890bd2843558d1560c8d6 |
\n\n | .venv\Lib\site-packages\attr\__pycache__\_make.cpython-313.pyc | _make.cpython-313.pyc | Other | 99,469 | 0.75 | 0.090015 | 0.018456 | node-utils | 931 | 2024-09-07T06:34:09.403953 | BSD-3-Clause | false | 176a6004721f069c4b98f1f6e84a88b3 |
\n\n | .venv\Lib\site-packages\attr\__pycache__\_next_gen.cpython-313.pyc | _next_gen.cpython-313.pyc | Other | 22,956 | 0.95 | 0.144404 | 0.027149 | python-kit | 356 | 2024-02-11T03:13:36.386948 | BSD-3-Clause | false | 7f326c1a1562824f277219a9598f93d6 |
\n\n | .venv\Lib\site-packages\attr\__pycache__\_version_info.cpython-313.pyc | _version_info.cpython-313.pyc | Other | 3,047 | 0.8 | 0 | 0 | awesome-app | 898 | 2023-12-09T12:47:15.172783 | BSD-3-Clause | false | d9f27f09c5eaaef471bdb61b223d953a |
\n\n | .venv\Lib\site-packages\attr\__pycache__\__init__.cpython-313.pyc | __init__.cpython-313.pyc | Other | 2,670 | 0.95 | 0.033333 | 0 | react-lib | 621 | 2024-08-16T15:55:12.553660 | BSD-3-Clause | false | 30681f00092c7bbe9ec87520c6fdf2fe |
# SPDX-License-Identifier: MIT\n\nfrom attr.converters import * # noqa: F403\n | .venv\Lib\site-packages\attrs\converters.py | converters.py | Python | 76 | 0.75 | 0 | 0.5 | python-kit | 441 | 2024-11-01T08:51:39.165182 | BSD-3-Clause | false | a84a9bacae96a5151158dc8c708f2ab9 |
# SPDX-License-Identifier: MIT\n\nfrom attr.exceptions import * # noqa: F403\n | .venv\Lib\site-packages\attrs\exceptions.py | exceptions.py | Python | 76 | 0.75 | 0 | 0.5 | vue-tools | 744 | 2024-08-05T17:11:36.992040 | MIT | false | 97bb60b7a06744e84fb33731cfad7287 |
# SPDX-License-Identifier: MIT\n\nfrom attr.filters import * # noqa: F403\n | .venv\Lib\site-packages\attrs\filters.py | filters.py | Python | 73 | 0.75 | 0 | 0.5 | node-utils | 124 | 2024-12-25T18:51:04.010798 | BSD-3-Clause | false | d9d3a0b22ae66f8055addd295dac1b24 |
# SPDX-License-Identifier: MIT\n\nfrom attr.setters import * # noqa: F403\n | .venv\Lib\site-packages\attrs\setters.py | setters.py | Python | 73 | 0.75 | 0 | 0.5 | vue-tools | 979 | 2024-05-03T06:09:08.273636 | MIT | false | a9bf5b0b8fbf97f26fc0be8b20e412f6 |
# SPDX-License-Identifier: MIT\n\nfrom attr.validators import * # noqa: F403\n | .venv\Lib\site-packages\attrs\validators.py | validators.py | Python | 76 | 0.75 | 0 | 0.5 | awesome-app | 15 | 2024-09-16T05:03:07.901685 | GPL-3.0 | false | 6cb626f6d5a315338101956ba6725d04 |
# SPDX-License-Identifier: MIT\n\nfrom attr import (\n NOTHING,\n Attribute,\n AttrsInstance,\n Converter,\n Factory,\n NothingType,\n _make_getattr,\n assoc,\n cmp_using,\n define,\n evolve,\n field,\n fields,\n fields_dict,\n frozen,\n has,\n make_class,\n mutable,\n resolve_types,\n validate,\n)\nfrom attr._next_gen import asdict, astuple\n\nfrom . import converters, exceptions, filters, setters, validators\n\n\n__all__ = [\n "NOTHING",\n "Attribute",\n "AttrsInstance",\n "Converter",\n "Factory",\n "NothingType",\n "__author__",\n "__copyright__",\n "__description__",\n "__doc__",\n "__email__",\n "__license__",\n "__title__",\n "__url__",\n "__version__",\n "__version_info__",\n "asdict",\n "assoc",\n "astuple",\n "cmp_using",\n "converters",\n "define",\n "evolve",\n "exceptions",\n "field",\n "fields",\n "fields_dict",\n "filters",\n "frozen",\n "has",\n "make_class",\n "mutable",\n "resolve_types",\n "setters",\n "validate",\n "validators",\n]\n\n__getattr__ = _make_getattr(__name__)\n | .venv\Lib\site-packages\attrs\__init__.py | __init__.py | Python | 1,107 | 0.95 | 0 | 0.015625 | node-utils | 479 | 2024-04-21T09:35:41.407918 | Apache-2.0 | false | ae4868ccdd9e6ceb62c38da4948bb267 |
import sys\n\nfrom typing import (\n Any,\n Callable,\n Mapping,\n Sequence,\n overload,\n TypeVar,\n)\n\n# Because we need to type our own stuff, we have to make everything from\n# attr explicitly public too.\nfrom attr import __author__ as __author__\nfrom attr import __copyright__ as __copyright__\nfrom attr import __description__ as __description__\nfrom attr import __email__ as __email__\nfrom attr import __license__ as __license__\nfrom attr import __title__ as __title__\nfrom attr import __url__ as __url__\nfrom attr import __version__ as __version__\nfrom attr import __version_info__ as __version_info__\nfrom attr import assoc as assoc\nfrom attr import Attribute as Attribute\nfrom attr import AttrsInstance as AttrsInstance\nfrom attr import cmp_using as cmp_using\nfrom attr import converters as converters\nfrom attr import Converter as Converter\nfrom attr import evolve as evolve\nfrom attr import exceptions as exceptions\nfrom attr import Factory as Factory\nfrom attr import fields as fields\nfrom attr import fields_dict as fields_dict\nfrom attr import filters as filters\nfrom attr import has as has\nfrom attr import make_class as make_class\nfrom attr import NOTHING as NOTHING\nfrom attr import resolve_types as resolve_types\nfrom attr import setters as setters\nfrom attr import validate as validate\nfrom attr import validators as validators\nfrom attr import attrib, asdict as asdict, astuple as astuple\nfrom attr import NothingType as NothingType\n\nif sys.version_info >= (3, 11):\n from typing import dataclass_transform\nelse:\n from typing_extensions import dataclass_transform\n\n_T = TypeVar("_T")\n_C = TypeVar("_C", bound=type)\n\n_EqOrderType = bool | Callable[[Any], Any]\n_ValidatorType = Callable[[Any, "Attribute[_T]", _T], Any]\n_CallableConverterType = Callable[[Any], Any]\n_ConverterType = _CallableConverterType | Converter[Any, Any]\n_ReprType = Callable[[Any], str]\n_ReprArgType = bool | _ReprType\n_OnSetAttrType = Callable[[Any, "Attribute[Any]", Any], Any]\n_OnSetAttrArgType = _OnSetAttrType | list[_OnSetAttrType] | setters._NoOpType\n_FieldTransformer = Callable[\n [type, list["Attribute[Any]"]], list["Attribute[Any]"]\n]\n# FIXME: in reality, if multiple validators are passed they must be in a list\n# or tuple, but those are invariant and so would prevent subtypes of\n# _ValidatorType from working when passed in a list or tuple.\n_ValidatorArgType = _ValidatorType[_T] | Sequence[_ValidatorType[_T]]\n\n@overload\ndef field(\n *,\n default: None = ...,\n validator: None = ...,\n repr: _ReprArgType = ...,\n hash: bool | None = ...,\n init: bool = ...,\n metadata: Mapping[Any, Any] | None = ...,\n converter: None = ...,\n factory: None = ...,\n kw_only: bool = ...,\n eq: bool | None = ...,\n order: bool | None = ...,\n on_setattr: _OnSetAttrArgType | None = ...,\n alias: str | None = ...,\n type: type | None = ...,\n) -> Any: ...\n\n# This form catches an explicit None or no default and infers the type from the\n# other arguments.\n@overload\ndef field(\n *,\n default: None = ...,\n validator: _ValidatorArgType[_T] | None = ...,\n repr: _ReprArgType = ...,\n hash: bool | None = ...,\n init: bool = ...,\n metadata: Mapping[Any, Any] | None = ...,\n converter: _ConverterType\n | list[_ConverterType]\n | tuple[_ConverterType]\n | None = ...,\n factory: Callable[[], _T] | None = ...,\n kw_only: bool = ...,\n eq: _EqOrderType | None = ...,\n order: _EqOrderType | None = ...,\n on_setattr: _OnSetAttrArgType | None = ...,\n alias: str | None = ...,\n type: type | None = ...,\n) -> _T: ...\n\n# This form catches an explicit default argument.\n@overload\ndef field(\n *,\n default: _T,\n validator: _ValidatorArgType[_T] | None = ...,\n repr: _ReprArgType = ...,\n hash: bool | None = ...,\n init: bool = ...,\n metadata: Mapping[Any, Any] | None = ...,\n converter: _ConverterType\n | list[_ConverterType]\n | tuple[_ConverterType]\n | None = ...,\n factory: Callable[[], _T] | None = ...,\n kw_only: bool = ...,\n eq: _EqOrderType | None = ...,\n order: _EqOrderType | None = ...,\n on_setattr: _OnSetAttrArgType | None = ...,\n alias: str | None = ...,\n type: type | None = ...,\n) -> _T: ...\n\n# This form covers type=non-Type: e.g. forward references (str), Any\n@overload\ndef field(\n *,\n default: _T | None = ...,\n validator: _ValidatorArgType[_T] | None = ...,\n repr: _ReprArgType = ...,\n hash: bool | None = ...,\n init: bool = ...,\n metadata: Mapping[Any, Any] | None = ...,\n converter: _ConverterType\n | list[_ConverterType]\n | tuple[_ConverterType]\n | None = ...,\n factory: Callable[[], _T] | None = ...,\n kw_only: bool = ...,\n eq: _EqOrderType | None = ...,\n order: _EqOrderType | None = ...,\n on_setattr: _OnSetAttrArgType | None = ...,\n alias: str | None = ...,\n type: type | None = ...,\n) -> Any: ...\n@overload\n@dataclass_transform(field_specifiers=(attrib, field))\ndef define(\n maybe_cls: _C,\n *,\n these: dict[str, Any] | None = ...,\n repr: bool = ...,\n unsafe_hash: bool | None = ...,\n hash: bool | None = ...,\n init: bool = ...,\n slots: bool = ...,\n frozen: bool = ...,\n weakref_slot: bool = ...,\n str: bool = ...,\n auto_attribs: bool = ...,\n kw_only: bool = ...,\n cache_hash: bool = ...,\n auto_exc: bool = ...,\n eq: bool | None = ...,\n order: bool | None = ...,\n auto_detect: bool = ...,\n getstate_setstate: bool | None = ...,\n on_setattr: _OnSetAttrArgType | None = ...,\n field_transformer: _FieldTransformer | None = ...,\n match_args: bool = ...,\n) -> _C: ...\n@overload\n@dataclass_transform(field_specifiers=(attrib, field))\ndef define(\n maybe_cls: None = ...,\n *,\n these: dict[str, Any] | None = ...,\n repr: bool = ...,\n unsafe_hash: bool | None = ...,\n hash: bool | None = ...,\n init: bool = ...,\n slots: bool = ...,\n frozen: bool = ...,\n weakref_slot: bool = ...,\n str: bool = ...,\n auto_attribs: bool = ...,\n kw_only: bool = ...,\n cache_hash: bool = ...,\n auto_exc: bool = ...,\n eq: bool | None = ...,\n order: bool | None = ...,\n auto_detect: bool = ...,\n getstate_setstate: bool | None = ...,\n on_setattr: _OnSetAttrArgType | None = ...,\n field_transformer: _FieldTransformer | None = ...,\n match_args: bool = ...,\n) -> Callable[[_C], _C]: ...\n\nmutable = define\n\n@overload\n@dataclass_transform(frozen_default=True, field_specifiers=(attrib, field))\ndef frozen(\n maybe_cls: _C,\n *,\n these: dict[str, Any] | None = ...,\n repr: bool = ...,\n unsafe_hash: bool | None = ...,\n hash: bool | None = ...,\n init: bool = ...,\n slots: bool = ...,\n frozen: bool = ...,\n weakref_slot: bool = ...,\n str: bool = ...,\n auto_attribs: bool = ...,\n kw_only: bool = ...,\n cache_hash: bool = ...,\n auto_exc: bool = ...,\n eq: bool | None = ...,\n order: bool | None = ...,\n auto_detect: bool = ...,\n getstate_setstate: bool | None = ...,\n on_setattr: _OnSetAttrArgType | None = ...,\n field_transformer: _FieldTransformer | None = ...,\n match_args: bool = ...,\n) -> _C: ...\n@overload\n@dataclass_transform(frozen_default=True, field_specifiers=(attrib, field))\ndef frozen(\n maybe_cls: None = ...,\n *,\n these: dict[str, Any] | None = ...,\n repr: bool = ...,\n unsafe_hash: bool | None = ...,\n hash: bool | None = ...,\n init: bool = ...,\n slots: bool = ...,\n frozen: bool = ...,\n weakref_slot: bool = ...,\n str: bool = ...,\n auto_attribs: bool = ...,\n kw_only: bool = ...,\n cache_hash: bool = ...,\n auto_exc: bool = ...,\n eq: bool | None = ...,\n order: bool | None = ...,\n auto_detect: bool = ...,\n getstate_setstate: bool | None = ...,\n on_setattr: _OnSetAttrArgType | None = ...,\n field_transformer: _FieldTransformer | None = ...,\n match_args: bool = ...,\n) -> Callable[[_C], _C]: ...\n | .venv\Lib\site-packages\attrs\__init__.pyi | __init__.pyi | Other | 7,931 | 0.95 | 0.038023 | 0.06746 | vue-tools | 677 | 2024-12-29T05:34:59.723604 | GPL-3.0 | false | c8b5be48bd106e544f66fa0c77427c88 |
\n\n | .venv\Lib\site-packages\attrs\__pycache__\converters.cpython-313.pyc | converters.cpython-313.pyc | Other | 221 | 0.7 | 0 | 0 | vue-tools | 76 | 2025-06-10T22:53:43.630207 | GPL-3.0 | false | a3b985ce04744bb142914e85ad91f357 |
\n\n | .venv\Lib\site-packages\attrs\__pycache__\exceptions.cpython-313.pyc | exceptions.cpython-313.pyc | Other | 221 | 0.7 | 0 | 0 | awesome-app | 64 | 2025-01-07T09:39:35.563543 | Apache-2.0 | false | 8defba4aa15a59eb867c7e834d771fd4 |
\n\n | .venv\Lib\site-packages\attrs\__pycache__\filters.cpython-313.pyc | filters.cpython-313.pyc | Other | 215 | 0.7 | 0 | 0 | react-lib | 183 | 2023-10-28T19:23:04.484000 | MIT | false | ec1d8eb79320faeeef054ea12ee5b885 |
\n\n | .venv\Lib\site-packages\attrs\__pycache__\setters.cpython-313.pyc | setters.cpython-313.pyc | Other | 215 | 0.7 | 0 | 0 | vue-tools | 813 | 2024-02-14T23:33:58.097691 | GPL-3.0 | false | 7dfc7b65a78b6e1a5c24ffdac6838df6 |
\n\n | .venv\Lib\site-packages\attrs\__pycache__\validators.cpython-313.pyc | validators.cpython-313.pyc | Other | 221 | 0.7 | 0 | 0 | awesome-app | 807 | 2025-01-23T15:20:58.788764 | MIT | false | d8b1b7be4f1c131e2df0b5ee3a471dd0 |
\n\n | .venv\Lib\site-packages\attrs\__pycache__\__init__.cpython-313.pyc | __init__.cpython-313.pyc | Other | 1,136 | 0.7 | 0 | 0 | awesome-app | 117 | 2023-12-25T18:55:31.888956 | GPL-3.0 | false | 12c53eb73465ee7302548ad1916df2ef |
pip\n | .venv\Lib\site-packages\attrs-25.3.0.dist-info\INSTALLER | INSTALLER | Other | 4 | 0.5 | 0 | 0 | awesome-app | 320 | 2024-01-04T00:45:59.784272 | MIT | false | 365c9bfeb7d89244f2ce01c1de44cb85 |
Metadata-Version: 2.4\nName: attrs\nVersion: 25.3.0\nSummary: Classes Without Boilerplate\nProject-URL: Documentation, https://www.attrs.org/\nProject-URL: Changelog, https://www.attrs.org/en/stable/changelog.html\nProject-URL: GitHub, https://github.com/python-attrs/attrs\nProject-URL: Funding, https://github.com/sponsors/hynek\nProject-URL: Tidelift, https://tidelift.com/subscription/pkg/pypi-attrs?utm_source=pypi-attrs&utm_medium=pypi\nAuthor-email: Hynek Schlawack <hs@ox.cx>\nLicense-Expression: MIT\nLicense-File: LICENSE\nKeywords: attribute,boilerplate,class\nClassifier: Development Status :: 5 - Production/Stable\nClassifier: Programming Language :: Python :: 3.8\nClassifier: Programming Language :: Python :: 3.9\nClassifier: Programming Language :: Python :: 3.10\nClassifier: Programming Language :: Python :: 3.11\nClassifier: Programming Language :: Python :: 3.12\nClassifier: Programming Language :: Python :: 3.13\nClassifier: Programming Language :: Python :: Implementation :: CPython\nClassifier: Programming Language :: Python :: Implementation :: PyPy\nClassifier: Typing :: Typed\nRequires-Python: >=3.8\nProvides-Extra: benchmark\nRequires-Dist: cloudpickle; (platform_python_implementation == 'CPython') and extra == 'benchmark'\nRequires-Dist: hypothesis; extra == 'benchmark'\nRequires-Dist: mypy>=1.11.1; (platform_python_implementation == 'CPython' and python_version >= '3.10') and extra == 'benchmark'\nRequires-Dist: pympler; extra == 'benchmark'\nRequires-Dist: pytest-codspeed; extra == 'benchmark'\nRequires-Dist: pytest-mypy-plugins; (platform_python_implementation == 'CPython' and python_version >= '3.10') and extra == 'benchmark'\nRequires-Dist: pytest-xdist[psutil]; extra == 'benchmark'\nRequires-Dist: pytest>=4.3.0; extra == 'benchmark'\nProvides-Extra: cov\nRequires-Dist: cloudpickle; (platform_python_implementation == 'CPython') and extra == 'cov'\nRequires-Dist: coverage[toml]>=5.3; extra == 'cov'\nRequires-Dist: hypothesis; extra == 'cov'\nRequires-Dist: mypy>=1.11.1; (platform_python_implementation == 'CPython' and python_version >= '3.10') and extra == 'cov'\nRequires-Dist: pympler; extra == 'cov'\nRequires-Dist: pytest-mypy-plugins; (platform_python_implementation == 'CPython' and python_version >= '3.10') and extra == 'cov'\nRequires-Dist: pytest-xdist[psutil]; extra == 'cov'\nRequires-Dist: pytest>=4.3.0; extra == 'cov'\nProvides-Extra: dev\nRequires-Dist: cloudpickle; (platform_python_implementation == 'CPython') and extra == 'dev'\nRequires-Dist: hypothesis; extra == 'dev'\nRequires-Dist: mypy>=1.11.1; (platform_python_implementation == 'CPython' and python_version >= '3.10') and extra == 'dev'\nRequires-Dist: pre-commit-uv; extra == 'dev'\nRequires-Dist: pympler; extra == 'dev'\nRequires-Dist: pytest-mypy-plugins; (platform_python_implementation == 'CPython' and python_version >= '3.10') and extra == 'dev'\nRequires-Dist: pytest-xdist[psutil]; extra == 'dev'\nRequires-Dist: pytest>=4.3.0; extra == 'dev'\nProvides-Extra: docs\nRequires-Dist: cogapp; extra == 'docs'\nRequires-Dist: furo; extra == 'docs'\nRequires-Dist: myst-parser; extra == 'docs'\nRequires-Dist: sphinx; extra == 'docs'\nRequires-Dist: sphinx-notfound-page; extra == 'docs'\nRequires-Dist: sphinxcontrib-towncrier; extra == 'docs'\nRequires-Dist: towncrier; extra == 'docs'\nProvides-Extra: tests\nRequires-Dist: cloudpickle; (platform_python_implementation == 'CPython') and extra == 'tests'\nRequires-Dist: hypothesis; extra == 'tests'\nRequires-Dist: mypy>=1.11.1; (platform_python_implementation == 'CPython' and python_version >= '3.10') and extra == 'tests'\nRequires-Dist: pympler; extra == 'tests'\nRequires-Dist: pytest-mypy-plugins; (platform_python_implementation == 'CPython' and python_version >= '3.10') and extra == 'tests'\nRequires-Dist: pytest-xdist[psutil]; extra == 'tests'\nRequires-Dist: pytest>=4.3.0; extra == 'tests'\nProvides-Extra: tests-mypy\nRequires-Dist: mypy>=1.11.1; (platform_python_implementation == 'CPython' and python_version >= '3.10') and extra == 'tests-mypy'\nRequires-Dist: pytest-mypy-plugins; (platform_python_implementation == 'CPython' and python_version >= '3.10') and extra == 'tests-mypy'\nDescription-Content-Type: text/markdown\n\n<p align="center">\n <a href="https://www.attrs.org/">\n <img src="https://raw.githubusercontent.com/python-attrs/attrs/main/docs/_static/attrs_logo.svg" width="35%" alt="attrs" />\n </a>\n</p>\n\n\n*attrs* is the Python package that will bring back the **joy** of **writing classes** by relieving you from the drudgery of implementing object protocols (aka [dunder methods](https://www.attrs.org/en/latest/glossary.html#term-dunder-methods)).\n[Trusted by NASA](https://docs.github.com/en/account-and-profile/setting-up-and-managing-your-github-profile/customizing-your-profile/personalizing-your-profile#list-of-qualifying-repositories-for-mars-2020-helicopter-contributor-achievement) for Mars missions since 2020!\n\nIts main goal is to help you to write **concise** and **correct** software without slowing down your code.\n\n\n## Sponsors\n\n*attrs* would not be possible without our [amazing sponsors](https://github.com/sponsors/hynek).\nEspecially those generously supporting us at the *The Organization* tier and higher:\n\n<!-- sponsor-break-begin -->\n\n<p align="center">\n\n<!-- [[[cog\nimport pathlib, tomllib\n\nfor sponsor in tomllib.loads(pathlib.Path("pyproject.toml").read_text())["tool"]["sponcon"]["sponsors"]:\n print(f'<a href="{sponsor["url"]}"><img title="{sponsor["title"]}" src="https://www.attrs.org/en/25.3.0/_static/sponsors/{sponsor["img"]}" width="190" /></a>')\n]]] -->\n<a href="https://www.variomedia.de/"><img title="Variomedia AG" src="https://www.attrs.org/en/25.3.0/_static/sponsors/Variomedia.svg" width="190" /></a>\n<a href="https://tidelift.com/?utm_source=lifter&utm_medium=referral&utm_campaign=hynek"><img title="Tidelift" src="https://www.attrs.org/en/25.3.0/_static/sponsors/Tidelift.svg" width="190" /></a>\n<a href="https://klaviyo.com/"><img title="Klaviyo" src="https://www.attrs.org/en/25.3.0/_static/sponsors/Klaviyo.svg" width="190" /></a>\n<a href="https://privacy-solutions.org/"><img title="Privacy Solutions" src="https://www.attrs.org/en/25.3.0/_static/sponsors/Privacy-Solutions.svg" width="190" /></a>\n<a href="https://www.emsys-renewables.com/"><img title="emsys renewables" src="https://www.attrs.org/en/25.3.0/_static/sponsors/emsys-renewables.svg" width="190" /></a>\n<a href="https://filepreviews.io/"><img title="FilePreviews" src="https://www.attrs.org/en/25.3.0/_static/sponsors/FilePreviews.svg" width="190" /></a>\n<a href="https://polar.sh/"><img title="Polar" src="https://www.attrs.org/en/25.3.0/_static/sponsors/Polar.svg" width="190" /></a>\n<!-- [[[end]]] -->\n\n</p>\n\n<!-- sponsor-break-end -->\n\n<p align="center">\n <strong>Please consider <a href="https://github.com/sponsors/hynek">joining them</a> to help make <em>attrs</em>’s maintenance more sustainable!</strong>\n</p>\n\n<!-- teaser-end -->\n\n## Example\n\n*attrs* gives you a class decorator and a way to declaratively define the attributes on that class:\n\n<!-- code-begin -->\n\n```pycon\n>>> from attrs import asdict, define, make_class, Factory\n\n>>> @define\n... class SomeClass:\n... a_number: int = 42\n... list_of_numbers: list[int] = Factory(list)\n...\n... def hard_math(self, another_number):\n... return self.a_number + sum(self.list_of_numbers) * another_number\n\n\n>>> sc = SomeClass(1, [1, 2, 3])\n>>> sc\nSomeClass(a_number=1, list_of_numbers=[1, 2, 3])\n\n>>> sc.hard_math(3)\n19\n>>> sc == SomeClass(1, [1, 2, 3])\nTrue\n>>> sc != SomeClass(2, [3, 2, 1])\nTrue\n\n>>> asdict(sc)\n{'a_number': 1, 'list_of_numbers': [1, 2, 3]}\n\n>>> SomeClass()\nSomeClass(a_number=42, list_of_numbers=[])\n\n>>> C = make_class("C", ["a", "b"])\n>>> C("foo", "bar")\nC(a='foo', b='bar')\n```\n\nAfter *declaring* your attributes, *attrs* gives you:\n\n- a concise and explicit overview of the class's attributes,\n- a nice human-readable `__repr__`,\n- equality-checking methods,\n- an initializer,\n- and much more,\n\n*without* writing dull boilerplate code again and again and *without* runtime performance penalties.\n\n---\n\nThis example uses *attrs*'s modern APIs that have been introduced in version 20.1.0, and the *attrs* package import name that has been added in version 21.3.0.\nThe classic APIs (`@attr.s`, `attr.ib`, plus their serious-business aliases) and the `attr` package import name will remain **indefinitely**.\n\nCheck out [*On The Core API Names*](https://www.attrs.org/en/latest/names.html) for an in-depth explanation!\n\n\n### Hate Type Annotations!?\n\nNo problem!\nTypes are entirely **optional** with *attrs*.\nSimply assign `attrs.field()` to the attributes instead of annotating them with types:\n\n```python\nfrom attrs import define, field\n\n@define\nclass SomeClass:\n a_number = field(default=42)\n list_of_numbers = field(factory=list)\n```\n\n\n## Data Classes\n\nOn the tin, *attrs* might remind you of `dataclasses` (and indeed, `dataclasses` [are a descendant](https://hynek.me/articles/import-attrs/) of *attrs*).\nIn practice it does a lot more and is more flexible.\nFor instance, it allows you to define [special handling of NumPy arrays for equality checks](https://www.attrs.org/en/stable/comparison.html#customization), allows more ways to [plug into the initialization process](https://www.attrs.org/en/stable/init.html#hooking-yourself-into-initialization), has a replacement for `__init_subclass__`, and allows for stepping through the generated methods using a debugger.\n\nFor more details, please refer to our [comparison page](https://www.attrs.org/en/stable/why.html#data-classes), but generally speaking, we are more likely to commit crimes against nature to make things work that one would expect to work, but that are quite complicated in practice.\n\n\n## Project Information\n\n- [**Changelog**](https://www.attrs.org/en/stable/changelog.html)\n- [**Documentation**](https://www.attrs.org/)\n- [**PyPI**](https://pypi.org/project/attrs/)\n- [**Source Code**](https://github.com/python-attrs/attrs)\n- [**Contributing**](https://github.com/python-attrs/attrs/blob/main/.github/CONTRIBUTING.md)\n- [**Third-party Extensions**](https://github.com/python-attrs/attrs/wiki/Extensions-to-attrs)\n- **Get Help**: use the `python-attrs` tag on [Stack Overflow](https://stackoverflow.com/questions/tagged/python-attrs)\n\n\n### *attrs* for Enterprise\n\nAvailable as part of the [Tidelift Subscription](https://tidelift.com/?utm_source=lifter&utm_medium=referral&utm_campaign=hynek).\n\nThe maintainers of *attrs* and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source packages you use to build your applications.\nSave time, reduce risk, and improve code health, while paying the maintainers of the exact packages you use.\n\n## Release Information\n\n### Changes\n\n- Restore support for generator-based `field_transformer`s.\n [#1417](https://github.com/python-attrs/attrs/issues/1417)\n\n\n\n---\n\n[Full changelog →](https://www.attrs.org/en/stable/changelog.html)\n | .venv\Lib\site-packages\attrs-25.3.0.dist-info\METADATA | METADATA | Other | 10,993 | 0.95 | 0.077586 | 0.067797 | python-kit | 173 | 2025-03-15T09:05:42.787991 | Apache-2.0 | false | 70c3743909329f059ee883254e8bf64b |
attr/__init__.py,sha256=fOYIvt1eGSqQre4uCS3sJWKZ0mwAuC8UD6qba5OS9_U,2057\nattr/__init__.pyi,sha256=QIXnnHPoucmDWkbpNsWTP-cgJ1bn8le7DjyRa_wYdew,11281\nattr/__pycache__/__init__.cpython-313.pyc,,\nattr/__pycache__/_cmp.cpython-313.pyc,,\nattr/__pycache__/_compat.cpython-313.pyc,,\nattr/__pycache__/_config.cpython-313.pyc,,\nattr/__pycache__/_funcs.cpython-313.pyc,,\nattr/__pycache__/_make.cpython-313.pyc,,\nattr/__pycache__/_next_gen.cpython-313.pyc,,\nattr/__pycache__/_version_info.cpython-313.pyc,,\nattr/__pycache__/converters.cpython-313.pyc,,\nattr/__pycache__/exceptions.cpython-313.pyc,,\nattr/__pycache__/filters.cpython-313.pyc,,\nattr/__pycache__/setters.cpython-313.pyc,,\nattr/__pycache__/validators.cpython-313.pyc,,\nattr/_cmp.py,sha256=3Nn1TjxllUYiX_nJoVnEkXoDk0hM1DYKj5DE7GZe4i0,4117\nattr/_cmp.pyi,sha256=U-_RU_UZOyPUEQzXE6RMYQQcjkZRY25wTH99sN0s7MM,368\nattr/_compat.py,sha256=4hlXbWhdDjQCDK6FKF1EgnZ3POiHgtpp54qE0nxaGHg,2704\nattr/_config.py,sha256=dGq3xR6fgZEF6UBt_L0T-eUHIB4i43kRmH0P28sJVw8,843\nattr/_funcs.py,sha256=5-tUKJtp3h5El55EcDl6GWXFp68fT8D8U7uCRN6497I,15854\nattr/_make.py,sha256=lBUPPmxiA1BeHzB6OlHoCEh--tVvM1ozXO8eXOa6g4c,96664\nattr/_next_gen.py,sha256=7FRkbtl_N017SuBhf_Vw3mw2c2pGZhtCGOzadgz7tp4,24395\nattr/_typing_compat.pyi,sha256=XDP54TUn-ZKhD62TOQebmzrwFyomhUCoGRpclb6alRA,469\nattr/_version_info.py,sha256=exSqb3b5E-fMSsgZAlEw9XcLpEgobPORCZpcaEglAM4,2121\nattr/_version_info.pyi,sha256=x_M3L3WuB7r_ULXAWjx959udKQ4HLB8l-hsc1FDGNvk,209\nattr/converters.py,sha256=GlDeOzPeTFgeBBLbj9G57Ez5lAk68uhSALRYJ_exe84,3861\nattr/converters.pyi,sha256=orU2bff-VjQa2kMDyvnMQV73oJT2WRyQuw4ZR1ym1bE,643\nattr/exceptions.py,sha256=HRFq4iybmv7-DcZwyjl6M1euM2YeJVK_hFxuaBGAngI,1977\nattr/exceptions.pyi,sha256=zZq8bCUnKAy9mDtBEw42ZhPhAUIHoTKedDQInJD883M,539\nattr/filters.py,sha256=ZBiKWLp3R0LfCZsq7X11pn9WX8NslS2wXM4jsnLOGc8,1795\nattr/filters.pyi,sha256=3J5BG-dTxltBk1_-RuNRUHrv2qu1v8v4aDNAQ7_mifA,208\nattr/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0\nattr/setters.py,sha256=5-dcT63GQK35ONEzSgfXCkbB7pPkaR-qv15mm4PVSzQ,1617\nattr/setters.pyi,sha256=NnVkaFU1BB4JB8E4JuXyrzTUgvtMpj8p3wBdJY7uix4,584\nattr/validators.py,sha256=WaB1HLAHHqRHWsrv_K9H-sJ7ESil3H3Cmv2d8TtVZx4,20046\nattr/validators.pyi,sha256=s2WhKPqskxbsckJfKk8zOuuB088GfgpyxcCYSNFLqNU,2603\nattrs-25.3.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4\nattrs-25.3.0.dist-info/METADATA,sha256=W38cREj7s1wqNf1fg4hVwZmL1xh0AdSp4IhtTMROinw,10993\nattrs-25.3.0.dist-info/RECORD,,\nattrs-25.3.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87\nattrs-25.3.0.dist-info/licenses/LICENSE,sha256=iCEVyV38KvHutnFPjsbVy8q_Znyv-HKfQkINpj9xTp8,1109\nattrs/__init__.py,sha256=qeQJZ4O08yczSn840v9bYOaZyRE81WsVi-QCrY3krCU,1107\nattrs/__init__.pyi,sha256=nZmInocjM7tHV4AQw0vxO_fo6oJjL_PonlV9zKKW8DY,7931\nattrs/__pycache__/__init__.cpython-313.pyc,,\nattrs/__pycache__/converters.cpython-313.pyc,,\nattrs/__pycache__/exceptions.cpython-313.pyc,,\nattrs/__pycache__/filters.cpython-313.pyc,,\nattrs/__pycache__/setters.cpython-313.pyc,,\nattrs/__pycache__/validators.cpython-313.pyc,,\nattrs/converters.py,sha256=8kQljrVwfSTRu8INwEk8SI0eGrzmWftsT7rM0EqyohM,76\nattrs/exceptions.py,sha256=ACCCmg19-vDFaDPY9vFl199SPXCQMN_bENs4DALjzms,76\nattrs/filters.py,sha256=VOUMZug9uEU6dUuA0dF1jInUK0PL3fLgP0VBS5d-CDE,73\nattrs/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0\nattrs/setters.py,sha256=eL1YidYQV3T2h9_SYIZSZR1FAcHGb1TuCTy0E0Lv2SU,73\nattrs/validators.py,sha256=xcy6wD5TtTkdCG1f4XWbocPSO0faBjk5IfVJfP6SUj0,76\n | .venv\Lib\site-packages\attrs-25.3.0.dist-info\RECORD | RECORD | Other | 3,556 | 0.7 | 0 | 0 | node-utils | 959 | 2024-12-19T18:15:01.871308 | BSD-3-Clause | false | 8e67204ce7abc7ca81eb248e39dcb11c |
Wheel-Version: 1.0\nGenerator: hatchling 1.27.0\nRoot-Is-Purelib: true\nTag: py3-none-any\n | .venv\Lib\site-packages\attrs-25.3.0.dist-info\WHEEL | WHEEL | Other | 87 | 0.5 | 0 | 0 | react-lib | 551 | 2024-01-08T15:12:22.421904 | GPL-3.0 | false | e2fcb0ad9ea59332c808928b4b439e7a |
The MIT License (MIT)\n\nCopyright (c) 2015 Hynek Schlawack and the attrs contributors\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the "Software"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.\n | .venv\Lib\site-packages\attrs-25.3.0.dist-info\licenses\LICENSE | LICENSE | Other | 1,109 | 0.7 | 0 | 0 | vue-tools | 911 | 2024-02-27T23:09:14.564216 | GPL-3.0 | false | 5e55731824cf9205cfabeab9a0600887 |
from __future__ import annotations\n\nfrom babel.core import get_global\n\n\ndef get_official_languages(territory: str, regional: bool = False, de_facto: bool = False) -> tuple[str, ...]:\n """\n Get the official language(s) for the given territory.\n\n The language codes, if any are known, are returned in order of descending popularity.\n\n If the `regional` flag is set, then languages which are regionally official are also returned.\n\n If the `de_facto` flag is set, then languages which are "de facto" official are also returned.\n\n .. warning:: Note that the data is as up to date as the current version of the CLDR used\n by Babel. If you need scientifically accurate information, use another source!\n\n :param territory: Territory code\n :type territory: str\n :param regional: Whether to return regionally official languages too\n :type regional: bool\n :param de_facto: Whether to return de-facto official languages too\n :type de_facto: bool\n :return: Tuple of language codes\n :rtype: tuple[str]\n """\n\n territory = str(territory).upper()\n allowed_stati = {"official"}\n if regional:\n allowed_stati.add("official_regional")\n if de_facto:\n allowed_stati.add("de_facto_official")\n\n languages = get_global("territory_languages").get(territory, {})\n pairs = [\n (info['population_percent'], language)\n for language, info in languages.items()\n if info.get('official_status') in allowed_stati\n ]\n pairs.sort(reverse=True)\n return tuple(lang for _, lang in pairs)\n\n\ndef get_territory_language_info(territory: str) -> dict[str, dict[str, float | str | None]]:\n """\n Get a dictionary of language information for a territory.\n\n The dictionary is keyed by language code; the values are dicts with more information.\n\n The following keys are currently known for the values:\n\n * `population_percent`: The percentage of the territory's population speaking the\n language.\n * `official_status`: An optional string describing the officiality status of the language.\n Known values are "official", "official_regional" and "de_facto_official".\n\n .. warning:: Note that the data is as up to date as the current version of the CLDR used\n by Babel. If you need scientifically accurate information, use another source!\n\n .. note:: Note that the format of the dict returned may change between Babel versions.\n\n See https://www.unicode.org/cldr/charts/latest/supplemental/territory_language_information.html\n\n :param territory: Territory code\n :type territory: str\n :return: Language information dictionary\n :rtype: dict[str, dict]\n """\n territory = str(territory).upper()\n return get_global("territory_languages").get(territory, {}).copy()\n | .venv\Lib\site-packages\babel\languages.py | languages.py | Python | 2,844 | 0.95 | 0.152778 | 0.037736 | vue-tools | 369 | 2025-01-06T08:36:03.902504 | BSD-3-Clause | false | 4de229adcaeb16bfa88946b1eae6771e |
"""\n babel.lists\n ~~~~~~~~~~~\n\n Locale dependent formatting of lists.\n\n The default locale for the functions in this module is determined by the\n following environment variables, in that order:\n\n * ``LC_ALL``, and\n * ``LANG``\n\n :copyright: (c) 2015-2025 by the Babel Team.\n :license: BSD, see LICENSE for more details.\n"""\nfrom __future__ import annotations\n\nimport warnings\nfrom collections.abc import Sequence\nfrom typing import Literal\n\nfrom babel.core import Locale, default_locale\n\n_DEFAULT_LOCALE = default_locale() # TODO(3.0): Remove this.\n\n\ndef __getattr__(name):\n if name == "DEFAULT_LOCALE":\n warnings.warn(\n "The babel.lists.DEFAULT_LOCALE constant is deprecated and will be removed.",\n DeprecationWarning,\n stacklevel=2,\n )\n return _DEFAULT_LOCALE\n raise AttributeError(f"module {__name__!r} has no attribute {name!r}")\n\n\ndef format_list(\n lst: Sequence[str],\n style: Literal['standard', 'standard-short', 'or', 'or-short', 'unit', 'unit-short', 'unit-narrow'] = 'standard',\n locale: Locale | str | None = None,\n) -> str:\n """\n Format the items in `lst` as a list.\n\n >>> format_list(['apples', 'oranges', 'pears'], locale='en')\n u'apples, oranges, and pears'\n >>> format_list(['apples', 'oranges', 'pears'], locale='zh')\n u'apples\u3001oranges\u548cpears'\n >>> format_list(['omena', 'peruna', 'aplari'], style='or', locale='fi')\n u'omena, peruna tai aplari'\n\n Not all styles are necessarily available in all locales.\n The function will attempt to fall back to replacement styles according to the rules\n set forth in the CLDR root XML file, and raise a ValueError if no suitable replacement\n can be found.\n\n The following text is verbatim from the Unicode TR35-49 spec [1].\n\n * standard:\n A typical 'and' list for arbitrary placeholders.\n eg. "January, February, and March"\n * standard-short:\n A short version of an 'and' list, suitable for use with short or abbreviated placeholder values.\n eg. "Jan., Feb., and Mar."\n * or:\n A typical 'or' list for arbitrary placeholders.\n eg. "January, February, or March"\n * or-short:\n A short version of an 'or' list.\n eg. "Jan., Feb., or Mar."\n * unit:\n A list suitable for wide units.\n eg. "3 feet, 7 inches"\n * unit-short:\n A list suitable for short units\n eg. "3 ft, 7 in"\n * unit-narrow:\n A list suitable for narrow units, where space on the screen is very limited.\n eg. "3′ 7″"\n\n [1]: https://www.unicode.org/reports/tr35/tr35-49/tr35-general.html#ListPatterns\n\n :param lst: a sequence of items to format in to a list\n :param style: the style to format the list with. See above for description.\n :param locale: the locale. Defaults to the system locale.\n """\n locale = Locale.parse(locale or _DEFAULT_LOCALE)\n if not lst:\n return ''\n if len(lst) == 1:\n return lst[0]\n\n patterns = _resolve_list_style(locale, style)\n\n if len(lst) == 2 and '2' in patterns:\n return patterns['2'].format(*lst)\n\n result = patterns['start'].format(lst[0], lst[1])\n for elem in lst[2:-1]:\n result = patterns['middle'].format(result, elem)\n result = patterns['end'].format(result, lst[-1])\n\n return result\n\n\n# Based on CLDR 45's root.xml file's `<alias>`es.\n# The root file defines both `standard` and `or`,\n# so they're always available.\n# TODO: It would likely be better to use the\n# babel.localedata.Alias mechanism for this,\n# but I'm not quite sure how it's supposed to\n# work with inheritance and data in the root.\n_style_fallbacks = {\n "or-narrow": ["or-short", "or"],\n "or-short": ["or"],\n "standard-narrow": ["standard-short", "standard"],\n "standard-short": ["standard"],\n "unit": ["unit-short", "standard"],\n "unit-narrow": ["unit-short", "unit", "standard"],\n "unit-short": ["standard"],\n}\n\n\ndef _resolve_list_style(locale: Locale, style: str):\n for style in (style, *(_style_fallbacks.get(style, []))): # noqa: B020\n if style in locale.list_patterns:\n return locale.list_patterns[style]\n raise ValueError(\n f"Locale {locale} does not support list formatting style {style!r} "\n f"(supported are {sorted(locale.list_patterns)})",\n )\n | .venv\Lib\site-packages\babel\lists.py | lists.py | Python | 4,361 | 0.95 | 0.166667 | 0.149533 | node-utils | 725 | 2024-11-12T17:10:37.716830 | GPL-3.0 | false | ac6ec290c98a74369743dc05a0559c4a |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.